blob: e892844ea0db0cd5c8d6c8f60c7903add24c34b2 [file] [log] [blame]
Caesar Wang2831bc32016-10-27 01:13:16 +08001/*
2 * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3 *
dp-arm82cb2c12017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Caesar Wang2831bc32016-10-27 01:13:16 +08005 */
6#include <debug.h>
7#include <arch_helpers.h>
8#include <platform_def.h>
9#include <plat_private.h>
10#include <dram.h>
11#include <pmu_regs.h>
12#include <rk3399_def.h>
Xing Zhenge3525112017-02-24 14:56:41 +080013#include <secure.h>
Caesar Wang2831bc32016-10-27 01:13:16 +080014#include <soc.h>
15#include <suspend.h>
16
17#define PMUGRF_OS_REG0 0x300
18#define PMUGRF_OS_REG1 0x304
19#define PMUGRF_OS_REG2 0x308
20#define PMUGRF_OS_REG3 0x30c
21
22#define CRU_SFTRST_DDR_CTRL(ch, n) ((0x1 << (8 + 16 + (ch) * 4)) | \
23 ((n) << (8 + (ch) * 4)))
24#define CRU_SFTRST_DDR_PHY(ch, n) ((0x1 << (9 + 16 + (ch) * 4)) | \
25 ((n) << (9 + (ch) * 4)))
26
27#define FBDIV_ENC(n) ((n) << 16)
28#define FBDIV_DEC(n) (((n) >> 16) & 0xfff)
29#define POSTDIV2_ENC(n) ((n) << 12)
30#define POSTDIV2_DEC(n) (((n) >> 12) & 0x7)
31#define POSTDIV1_ENC(n) ((n) << 8)
32#define POSTDIV1_DEC(n) (((n) >> 8) & 0x7)
33#define REFDIV_ENC(n) (n)
34#define REFDIV_DEC(n) ((n) & 0x3f)
35
36/* PMU CRU */
37#define PMUCRU_RSTNHOLD_CON0 0x120
38#define PMUCRU_RSTNHOLD_CON1 0x124
39
40#define PRESET_GPIO0_HOLD(n) (((n) << 7) | WMSK_BIT(7))
41#define PRESET_GPIO1_HOLD(n) (((n) << 8) | WMSK_BIT(8))
42
43#define SYS_COUNTER_FREQ_IN_MHZ (SYS_COUNTER_FREQ_IN_TICKS / 1000000)
44
45/*
46 * Copy @num registers from @src to @dst
47 */
48__sramfunc void sram_regcpy(uintptr_t dst, uintptr_t src, uint32_t num)
49{
50 while (num--) {
51 mmio_write_32(dst, mmio_read_32(src));
52 dst += sizeof(uint32_t);
53 src += sizeof(uint32_t);
54 }
55}
56
57static __sramfunc uint32_t sram_get_timer_value(void)
58{
59 /*
60 * Generic delay timer implementation expects the timer to be a down
61 * counter. We apply bitwise NOT operator to the tick values returned
62 * by read_cntpct_el0() to simulate the down counter.
63 */
64 return (uint32_t)(~read_cntpct_el0());
65}
66
67static __sramfunc void sram_udelay(uint32_t usec)
68{
69 uint32_t start, cnt, delta, delta_us;
70
71 /* counter is decreasing */
72 start = sram_get_timer_value();
73 do {
74 cnt = sram_get_timer_value();
75 if (cnt > start) {
76 delta = UINT32_MAX - cnt;
77 delta += start;
78 } else
79 delta = start - cnt;
80 delta_us = (delta * SYS_COUNTER_FREQ_IN_MHZ);
81 } while (delta_us < usec);
82}
83
84static __sramfunc void configure_sgrf(void)
85{
86 /*
87 * SGRF_DDR_RGN_DPLL_CLK and SGRF_DDR_RGN_RTC_CLK:
88 * IC ECO bug, need to set this register.
89 *
90 * SGRF_DDR_RGN_BYPS:
91 * After the PD_CENTER suspend/resume, the DDR region
92 * related registers in the SGRF will be reset, we
93 * need to re-initialize them.
94 */
95 mmio_write_32(SGRF_BASE + SGRF_DDRRGN_CON0_16(16),
96 SGRF_DDR_RGN_DPLL_CLK |
97 SGRF_DDR_RGN_RTC_CLK |
98 SGRF_DDR_RGN_BYPS);
99}
100
101static __sramfunc void rkclk_ddr_reset(uint32_t channel, uint32_t ctl,
102 uint32_t phy)
103{
104 channel &= 0x1;
105 ctl &= 0x1;
106 phy &= 0x1;
107 mmio_write_32(CRU_BASE + CRU_SOFTRST_CON(4),
108 CRU_SFTRST_DDR_CTRL(channel, ctl) |
109 CRU_SFTRST_DDR_PHY(channel, phy));
110}
111
112static __sramfunc void phy_pctrl_reset(uint32_t ch)
113{
114 rkclk_ddr_reset(ch, 1, 1);
115 sram_udelay(10);
116 rkclk_ddr_reset(ch, 1, 0);
117 sram_udelay(10);
118 rkclk_ddr_reset(ch, 0, 0);
119 sram_udelay(10);
120}
121
Caesar Wang2831bc32016-10-27 01:13:16 +0800122static __sramfunc void set_cs_training_index(uint32_t ch, uint32_t rank)
123{
124 /* PHY_8/136/264/392 phy_per_cs_training_index_X 1bit offset_24 */
125 mmio_clrsetbits_32(PHY_REG(ch, 8), 0x1 << 24, rank << 24);
126 mmio_clrsetbits_32(PHY_REG(ch, 136), 0x1 << 24, rank << 24);
127 mmio_clrsetbits_32(PHY_REG(ch, 264), 0x1 << 24, rank << 24);
128 mmio_clrsetbits_32(PHY_REG(ch, 392), 0x1 << 24, rank << 24);
129}
130
131static __sramfunc void select_per_cs_training_index(uint32_t ch, uint32_t rank)
132{
133 /* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
134 if ((mmio_read_32(PHY_REG(ch, 84)) >> 16) & 1)
135 set_cs_training_index(ch, rank);
136}
137
138static void override_write_leveling_value(uint32_t ch)
139{
140 uint32_t byte;
141
Caesar Wang2831bc32016-10-27 01:13:16 +0800142 /*
143 * PHY_8/136/264/392
144 * phy_per_cs_training_multicast_en_X 1bit offset_16
145 */
146 mmio_clrsetbits_32(PHY_REG(ch, 8), 0x1 << 16, 1 << 16);
147 mmio_clrsetbits_32(PHY_REG(ch, 136), 0x1 << 16, 1 << 16);
148 mmio_clrsetbits_32(PHY_REG(ch, 264), 0x1 << 16, 1 << 16);
149 mmio_clrsetbits_32(PHY_REG(ch, 392), 0x1 << 16, 1 << 16);
150
151 for (byte = 0; byte < 4; byte++)
152 mmio_clrsetbits_32(PHY_REG(ch, 63 + (128 * byte)),
153 0xffff << 16,
154 0x200 << 16);
155
Caesar Wang2831bc32016-10-27 01:13:16 +0800156 /* CTL_200 ctrlupd_req 1bit offset_8 */
157 mmio_clrsetbits_32(CTL_REG(ch, 200), 0x1 << 8, 0x1 << 8);
158}
159
160static __sramfunc int data_training(uint32_t ch,
161 struct rk3399_sdram_params *sdram_params,
162 uint32_t training_flag)
163{
164 uint32_t obs_0, obs_1, obs_2, obs_3, obs_err = 0;
165 uint32_t rank = sdram_params->ch[ch].rank;
166 uint32_t rank_mask;
167 uint32_t i, tmp;
168
169 if (sdram_params->dramtype == LPDDR4)
170 rank_mask = (rank == 1) ? 0x5 : 0xf;
171 else
172 rank_mask = (rank == 1) ? 0x1 : 0x3;
173
174 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
175 mmio_setbits_32(PHY_REG(ch, 927), (1 << 22));
176
177 if (training_flag == PI_FULL_TRAINING) {
178 if (sdram_params->dramtype == LPDDR4) {
179 training_flag = PI_WRITE_LEVELING |
180 PI_READ_GATE_TRAINING |
181 PI_READ_LEVELING |
182 PI_WDQ_LEVELING;
183 } else if (sdram_params->dramtype == LPDDR3) {
184 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
185 PI_READ_GATE_TRAINING;
186 } else if (sdram_params->dramtype == DDR3) {
187 training_flag = PI_WRITE_LEVELING |
188 PI_READ_GATE_TRAINING |
189 PI_READ_LEVELING;
190 }
191 }
192
193 /* ca training(LPDDR4,LPDDR3 support) */
194 if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) {
195 for (i = 0; i < 4; i++) {
196 if (!(rank_mask & (1 << i)))
197 continue;
198
199 select_per_cs_training_index(ch, i);
200 /* PI_100 PI_CALVL_EN:RW:8:2 */
201 mmio_clrsetbits_32(PI_REG(ch, 100), 0x3 << 8, 0x2 << 8);
202
203 /* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
204 mmio_clrsetbits_32(PI_REG(ch, 92),
205 (0x1 << 16) | (0x3 << 24),
206 (0x1 << 16) | (i << 24));
207 while (1) {
208 /* PI_174 PI_INT_STATUS:RD:8:18 */
209 tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
210
211 /*
212 * check status obs
213 * PHY_532/660/788 phy_adr_calvl_obs1_:0:32
214 */
215 obs_0 = mmio_read_32(PHY_REG(ch, 532));
216 obs_1 = mmio_read_32(PHY_REG(ch, 660));
217 obs_2 = mmio_read_32(PHY_REG(ch, 788));
218 if (((obs_0 >> 30) & 0x3) ||
219 ((obs_1 >> 30) & 0x3) ||
220 ((obs_2 >> 30) & 0x3))
221 obs_err = 1;
222 if ((((tmp >> 11) & 0x1) == 0x1) &&
223 (((tmp >> 13) & 0x1) == 0x1) &&
224 (((tmp >> 5) & 0x1) == 0x0) &&
225 (obs_err == 0))
226 break;
227 else if ((((tmp >> 5) & 0x1) == 0x1) ||
228 (obs_err == 1))
229 return -1;
230 }
231 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
232 mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
233 }
234 mmio_clrbits_32(PI_REG(ch, 100), 0x3 << 8);
235 }
236
237 /* write leveling(LPDDR4,LPDDR3,DDR3 support) */
238 if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) {
239 for (i = 0; i < rank; i++) {
240 select_per_cs_training_index(ch, i);
241 /* PI_60 PI_WRLVL_EN:RW:8:2 */
242 mmio_clrsetbits_32(PI_REG(ch, 60), 0x3 << 8, 0x2 << 8);
243 /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
244 mmio_clrsetbits_32(PI_REG(ch, 59),
245 (0x1 << 8) | (0x3 << 16),
246 (0x1 << 8) | (i << 16));
247
248 while (1) {
249 /* PI_174 PI_INT_STATUS:RD:8:18 */
250 tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
251
252 /*
253 * check status obs, if error maybe can not
254 * get leveling done PHY_40/168/296/424
255 * phy_wrlvl_status_obs_X:0:13
256 */
257 obs_0 = mmio_read_32(PHY_REG(ch, 40));
258 obs_1 = mmio_read_32(PHY_REG(ch, 168));
259 obs_2 = mmio_read_32(PHY_REG(ch, 296));
260 obs_3 = mmio_read_32(PHY_REG(ch, 424));
261 if (((obs_0 >> 12) & 0x1) ||
262 ((obs_1 >> 12) & 0x1) ||
263 ((obs_2 >> 12) & 0x1) ||
264 ((obs_3 >> 12) & 0x1))
265 obs_err = 1;
266 if ((((tmp >> 10) & 0x1) == 0x1) &&
267 (((tmp >> 13) & 0x1) == 0x1) &&
268 (((tmp >> 4) & 0x1) == 0x0) &&
269 (obs_err == 0))
270 break;
271 else if ((((tmp >> 4) & 0x1) == 0x1) ||
272 (obs_err == 1))
273 return -1;
274 }
275
276 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
277 mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
278 }
279 override_write_leveling_value(ch);
280 mmio_clrbits_32(PI_REG(ch, 60), 0x3 << 8);
281 }
282
283 /* read gate training(LPDDR4,LPDDR3,DDR3 support) */
284 if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) {
285 for (i = 0; i < rank; i++) {
286 select_per_cs_training_index(ch, i);
287 /* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
288 mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 24,
289 0x2 << 24);
290 /*
291 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
292 * PI_RDLVL_CS:RW:24:2
293 */
294 mmio_clrsetbits_32(PI_REG(ch, 74),
295 (0x1 << 16) | (0x3 << 24),
296 (0x1 << 16) | (i << 24));
297
298 while (1) {
299 /* PI_174 PI_INT_STATUS:RD:8:18 */
300 tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
301
302 /*
303 * check status obs
304 * PHY_43/171/299/427
305 * PHY_GTLVL_STATUS_OBS_x:16:8
306 */
307 obs_0 = mmio_read_32(PHY_REG(ch, 43));
308 obs_1 = mmio_read_32(PHY_REG(ch, 171));
309 obs_2 = mmio_read_32(PHY_REG(ch, 299));
310 obs_3 = mmio_read_32(PHY_REG(ch, 427));
311 if (((obs_0 >> (16 + 6)) & 0x3) ||
312 ((obs_1 >> (16 + 6)) & 0x3) ||
313 ((obs_2 >> (16 + 6)) & 0x3) ||
314 ((obs_3 >> (16 + 6)) & 0x3))
315 obs_err = 1;
316 if ((((tmp >> 9) & 0x1) == 0x1) &&
317 (((tmp >> 13) & 0x1) == 0x1) &&
318 (((tmp >> 3) & 0x1) == 0x0) &&
319 (obs_err == 0))
320 break;
321 else if ((((tmp >> 3) & 0x1) == 0x1) ||
322 (obs_err == 1))
323 return -1;
324 }
325 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
326 mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
327 }
328 mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 24);
329 }
330
331 /* read leveling(LPDDR4,LPDDR3,DDR3 support) */
332 if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) {
333 for (i = 0; i < rank; i++) {
334 select_per_cs_training_index(ch, i);
335 /* PI_80 PI_RDLVL_EN:RW:16:2 */
336 mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 16,
337 0x2 << 16);
338 /* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
339 mmio_clrsetbits_32(PI_REG(ch, 74),
340 (0x1 << 8) | (0x3 << 24),
341 (0x1 << 8) | (i << 24));
342 while (1) {
343 /* PI_174 PI_INT_STATUS:RD:8:18 */
344 tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
345
346 /*
347 * make sure status obs not report error bit
348 * PHY_46/174/302/430
349 * phy_rdlvl_status_obs_X:16:8
350 */
351 if ((((tmp >> 8) & 0x1) == 0x1) &&
352 (((tmp >> 13) & 0x1) == 0x1) &&
353 (((tmp >> 2) & 0x1) == 0x0))
354 break;
355 else if (((tmp >> 2) & 0x1) == 0x1)
356 return -1;
357 }
358 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
359 mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
360 }
361 mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 16);
362 }
363
364 /* wdq leveling(LPDDR4 support) */
365 if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) {
366 for (i = 0; i < 4; i++) {
367 if (!(rank_mask & (1 << i)))
368 continue;
369
370 select_per_cs_training_index(ch, i);
371 /*
372 * disable PI_WDQLVL_VREF_EN before wdq leveling?
373 * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
374 */
375 mmio_clrbits_32(PI_REG(ch, 181), 0x1 << 8);
376 /* PI_124 PI_WDQLVL_EN:RW:16:2 */
377 mmio_clrsetbits_32(PI_REG(ch, 124), 0x3 << 16,
378 0x2 << 16);
379 /* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
380 mmio_clrsetbits_32(PI_REG(ch, 121),
381 (0x1 << 8) | (0x3 << 16),
382 (0x1 << 8) | (i << 16));
383 while (1) {
384 /* PI_174 PI_INT_STATUS:RD:8:18 */
385 tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
386 if ((((tmp >> 12) & 0x1) == 0x1) &&
387 (((tmp >> 13) & 0x1) == 0x1) &&
388 (((tmp >> 6) & 0x1) == 0x0))
389 break;
390 else if (((tmp >> 6) & 0x1) == 0x1)
391 return -1;
392 }
393 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
394 mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
395 }
396 mmio_clrbits_32(PI_REG(ch, 124), 0x3 << 16);
397 }
398
399 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
400 mmio_clrbits_32(PHY_REG(ch, 927), (1 << 22));
401
402 return 0;
403}
404
405static __sramfunc void set_ddrconfig(struct rk3399_sdram_params *sdram_params,
406 unsigned char channel, uint32_t ddrconfig)
407{
408 /* only need to set ddrconfig */
409 struct rk3399_sdram_channel *ch = &sdram_params->ch[channel];
410 unsigned int cs0_cap = 0;
411 unsigned int cs1_cap = 0;
412
413 cs0_cap = (1 << (ch->cs0_row + ch->col + ch->bk + ch->bw - 20));
414 if (ch->rank > 1)
415 cs1_cap = cs0_cap >> (ch->cs0_row - ch->cs1_row);
416 if (ch->row_3_4) {
417 cs0_cap = cs0_cap * 3 / 4;
418 cs1_cap = cs1_cap * 3 / 4;
419 }
420
421 mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICECONF,
422 ddrconfig | (ddrconfig << 6));
423 mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICESIZE,
424 ((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8));
425}
426
427static __sramfunc void dram_all_config(struct rk3399_sdram_params *sdram_params)
428{
429 unsigned int i;
430
431 for (i = 0; i < 2; i++) {
432 struct rk3399_sdram_channel *info = &sdram_params->ch[i];
433 struct rk3399_msch_timings *noc = &info->noc_timings;
434
435 if (sdram_params->ch[i].col == 0)
436 continue;
437
438 mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGA0,
439 noc->ddrtiminga0.d32);
440 mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGB0,
441 noc->ddrtimingb0.d32);
442 mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGC0,
443 noc->ddrtimingc0.d32);
444 mmio_write_32(MSCH_BASE(i) + MSCH_DEVTODEV0,
445 noc->devtodev0.d32);
446 mmio_write_32(MSCH_BASE(i) + MSCH_DDRMODE, noc->ddrmode.d32);
447
448 /* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
449 if (sdram_params->ch[i].rank == 1)
450 mmio_setbits_32(CTL_REG(i, 276), 1 << 17);
451 }
452
453 DDR_STRIDE(sdram_params->stride);
454
455 /* reboot hold register set */
456 mmio_write_32(PMUCRU_BASE + CRU_PMU_RSTHOLD_CON(1),
457 CRU_PMU_SGRF_RST_RLS |
458 PRESET_GPIO0_HOLD(1) |
459 PRESET_GPIO1_HOLD(1));
460 mmio_clrsetbits_32(CRU_BASE + CRU_GLB_RST_CON, 0x3, 0x3);
461}
462
463static __sramfunc void pctl_cfg(uint32_t ch,
464 struct rk3399_sdram_params *sdram_params)
465{
466 const uint32_t *params_ctl = sdram_params->pctl_regs.denali_ctl;
Caesar Wang2831bc32016-10-27 01:13:16 +0800467 const uint32_t *params_pi = sdram_params->pi_regs.denali_pi;
Derek Basehore60400fc2017-05-05 17:53:33 -0700468 const struct rk3399_ddr_publ_regs *phy_regs = &sdram_params->phy_regs;
469 uint32_t tmp, tmp1, tmp2, i;
Caesar Wang2831bc32016-10-27 01:13:16 +0800470
471 /*
472 * Workaround controller bug:
473 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
474 */
475 sram_regcpy(CTL_REG(ch, 1), (uintptr_t)&params_ctl[1],
476 CTL_REG_NUM - 1);
477 mmio_write_32(CTL_REG(ch, 0), params_ctl[0]);
478 sram_regcpy(PI_REG(ch, 0), (uintptr_t)&params_pi[0],
479 PI_REG_NUM);
480
Derek Basehore60400fc2017-05-05 17:53:33 -0700481 sram_regcpy(PHY_REG(ch, 910), (uintptr_t)&phy_regs->phy896[910 - 896],
482 3);
Caesar Wang2831bc32016-10-27 01:13:16 +0800483
484 mmio_clrsetbits_32(CTL_REG(ch, 68), PWRUP_SREFRESH_EXIT,
485 PWRUP_SREFRESH_EXIT);
486
487 /* PHY_DLL_RST_EN */
488 mmio_clrsetbits_32(PHY_REG(ch, 957), 0x3 << 24, 1 << 24);
489 dmbst();
490
491 mmio_setbits_32(PI_REG(ch, 0), START);
492 mmio_setbits_32(CTL_REG(ch, 0), START);
493
494 /* wait lock */
495 while (1) {
496 tmp = mmio_read_32(PHY_REG(ch, 920));
497 tmp1 = mmio_read_32(PHY_REG(ch, 921));
498 tmp2 = mmio_read_32(PHY_REG(ch, 922));
499 if ((((tmp >> 16) & 0x1) == 0x1) &&
500 (((tmp1 >> 16) & 0x1) == 0x1) &&
501 (((tmp1 >> 0) & 0x1) == 0x1) &&
502 (((tmp2 >> 0) & 0x1) == 0x1))
503 break;
504 /* if PLL bypass,don't need wait lock */
505 if (mmio_read_32(PHY_REG(ch, 911)) & 0x1)
506 break;
507 }
508
Derek Basehore60400fc2017-05-05 17:53:33 -0700509 sram_regcpy(PHY_REG(ch, 896), (uintptr_t)&phy_regs->phy896[0], 63);
510
511 for (i = 0; i < 4; i++)
512 sram_regcpy(PHY_REG(ch, 128 * i),
513 (uintptr_t)&phy_regs->phy0[i][0], 91);
514
515 for (i = 0; i < 3; i++)
516 sram_regcpy(PHY_REG(ch, 512 + 128 * i),
517 (uintptr_t)&phy_regs->phy512[i][0], 38);
Caesar Wang2831bc32016-10-27 01:13:16 +0800518}
519
Derek Basehore4bd1d3f2017-02-24 14:31:36 +0800520static __sramfunc int dram_switch_to_next_index(
Caesar Wang2831bc32016-10-27 01:13:16 +0800521 struct rk3399_sdram_params *sdram_params)
522{
523 uint32_t ch, ch_count;
Derek Basehore4bd1d3f2017-02-24 14:31:36 +0800524 uint32_t fn = ((mmio_read_32(CTL_REG(0, 111)) >> 16) + 1) & 0x1;
Caesar Wang2831bc32016-10-27 01:13:16 +0800525
526 mmio_write_32(CIC_BASE + CIC_CTRL0,
527 (((0x3 << 4) | (1 << 2) | 1) << 16) |
Derek Basehore4bd1d3f2017-02-24 14:31:36 +0800528 (fn << 4) | (1 << 2) | 1);
Caesar Wang2831bc32016-10-27 01:13:16 +0800529 while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 2)))
530 ;
531
532 mmio_write_32(CIC_BASE + CIC_CTRL0, 0x20002);
533 while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 0)))
534 ;
535
536 ch_count = sdram_params->num_channels;
537
538 /* LPDDR4 f2 cann't do training, all training will fail */
539 for (ch = 0; ch < ch_count; ch++) {
540 mmio_clrsetbits_32(PHY_REG(ch, 896), (0x3 << 8) | 1,
Derek Basehore4bd1d3f2017-02-24 14:31:36 +0800541 fn << 8);
Caesar Wang2831bc32016-10-27 01:13:16 +0800542
543 /* data_training failed */
544 if (data_training(ch, sdram_params, PI_FULL_TRAINING))
545 return -1;
546 }
547
548 return 0;
549}
550
551/*
552 * Needs to be done for both channels at once in case of a shared reset signal
553 * between channels.
554 */
555static __sramfunc int pctl_start(uint32_t channel_mask,
556 struct rk3399_sdram_params *sdram_params)
557{
558 uint32_t count;
Derek Basehore951752d2017-01-31 00:20:19 -0800559 uint32_t byte;
Caesar Wang2831bc32016-10-27 01:13:16 +0800560
561 mmio_setbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
562 mmio_setbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
563
564 /* need de-access IO retention before controller START */
565 if (channel_mask & (1 << 0))
566 mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 19));
567 if (channel_mask & (1 << 1))
568 mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 23));
569
570 /* PHY_DLL_RST_EN */
571 if (channel_mask & (1 << 0))
572 mmio_clrsetbits_32(PHY_REG(0, 957), 0x3 << 24,
573 0x2 << 24);
574 if (channel_mask & (1 << 1))
575 mmio_clrsetbits_32(PHY_REG(1, 957), 0x3 << 24,
576 0x2 << 24);
577
578 /* check ERROR bit */
579 if (channel_mask & (1 << 0)) {
580 count = 0;
581 while (!(mmio_read_32(CTL_REG(0, 203)) & (1 << 3))) {
582 /* CKE is low, loop 10ms */
583 if (count > 100)
584 return -1;
585
586 sram_udelay(100);
587 count++;
588 }
589
590 mmio_clrbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
Derek Basehore951752d2017-01-31 00:20:19 -0800591
592 /* Restore the PHY_RX_CAL_DQS value */
593 for (byte = 0; byte < 4; byte++)
594 mmio_clrsetbits_32(PHY_REG(0, 57 + 128 * byte),
595 0xfff << 16,
596 sdram_params->rx_cal_dqs[0][byte]);
Caesar Wang2831bc32016-10-27 01:13:16 +0800597 }
598 if (channel_mask & (1 << 1)) {
599 count = 0;
600 while (!(mmio_read_32(CTL_REG(1, 203)) & (1 << 3))) {
601 /* CKE is low, loop 10ms */
602 if (count > 100)
603 return -1;
604
605 sram_udelay(100);
606 count++;
607 }
608
609 mmio_clrbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
Derek Basehore951752d2017-01-31 00:20:19 -0800610
611 /* Restore the PHY_RX_CAL_DQS value */
612 for (byte = 0; byte < 4; byte++)
613 mmio_clrsetbits_32(PHY_REG(1, 57 + 128 * byte),
614 0xfff << 16,
615 sdram_params->rx_cal_dqs[1][byte]);
Caesar Wang2831bc32016-10-27 01:13:16 +0800616 }
617
618 return 0;
619}
620
621void dmc_save(void)
622{
623 struct rk3399_sdram_params *sdram_params = &sdram_config;
Derek Basehore60400fc2017-05-05 17:53:33 -0700624 struct rk3399_ddr_publ_regs *phy_regs;
Caesar Wang2831bc32016-10-27 01:13:16 +0800625 uint32_t *params_ctl;
626 uint32_t *params_pi;
Caesar Wang2831bc32016-10-27 01:13:16 +0800627 uint32_t refdiv, postdiv2, postdiv1, fbdiv;
Derek Basehore60400fc2017-05-05 17:53:33 -0700628 uint32_t tmp, ch, byte, i;
Caesar Wang2831bc32016-10-27 01:13:16 +0800629
Derek Basehore60400fc2017-05-05 17:53:33 -0700630 phy_regs = &sdram_params->phy_regs;
Caesar Wang2831bc32016-10-27 01:13:16 +0800631 params_ctl = sdram_params->pctl_regs.denali_ctl;
632 params_pi = sdram_params->pi_regs.denali_pi;
Caesar Wang2831bc32016-10-27 01:13:16 +0800633
634 fbdiv = mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, 0)) & 0xfff;
635 tmp = mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, 1));
636 postdiv2 = POSTDIV2_DEC(tmp);
637 postdiv1 = POSTDIV1_DEC(tmp);
638 refdiv = REFDIV_DEC(tmp);
639
640 sdram_params->ddr_freq = ((fbdiv * 24) /
641 (refdiv * postdiv1 * postdiv2)) * MHz;
642
643 INFO("sdram_params->ddr_freq = %d\n", sdram_params->ddr_freq);
644 sdram_params->odt = (((mmio_read_32(PHY_REG(0, 5)) >> 16) &
645 0x7) != 0) ? 1 : 0;
646
647 /* copy the registers CTL PI and PHY */
648 sram_regcpy((uintptr_t)&params_ctl[0], CTL_REG(0, 0), CTL_REG_NUM);
649
650 /* mask DENALI_CTL_00_DATA.START, only copy here, will trigger later */
651 params_ctl[0] &= ~(0x1 << 0);
652
653 sram_regcpy((uintptr_t)&params_pi[0], PI_REG(0, 0),
654 PI_REG_NUM);
655
656 /* mask DENALI_PI_00_DATA.START, only copy here, will trigger later*/
657 params_pi[0] &= ~(0x1 << 0);
658
Derek Basehore60400fc2017-05-05 17:53:33 -0700659 for (i = 0; i < 4; i++)
660 sram_regcpy((uintptr_t)&phy_regs->phy0[i][0],
661 PHY_REG(0, 128 * i), 91);
662
663 for (i = 0; i < 3; i++)
664 sram_regcpy((uintptr_t)&phy_regs->phy512[i][0],
665 PHY_REG(0, 512 + 128 * i), 38);
666
667 sram_regcpy((uintptr_t)&phy_regs->phy896[0], PHY_REG(0, 896), 63);
Caesar Wang2831bc32016-10-27 01:13:16 +0800668
Derek Basehore951752d2017-01-31 00:20:19 -0800669 for (ch = 0; ch < sdram_params->num_channels; ch++) {
670 for (byte = 0; byte < 4; byte++)
671 sdram_params->rx_cal_dqs[ch][byte] = (0xfff << 16) &
672 mmio_read_32(PHY_REG(ch, 57 + byte * 128));
673 }
674
Caesar Wang2831bc32016-10-27 01:13:16 +0800675 /* set DENALI_PHY_957_DATA.PHY_DLL_RST_EN = 0x1 */
Derek Basehore60400fc2017-05-05 17:53:33 -0700676 phy_regs->phy896[957 - 896] &= ~(0x3 << 24);
677 phy_regs->phy896[957 - 896] |= 1 << 24;
678 phy_regs->phy896[0] |= 1;
679 phy_regs->phy896[0] &= ~(0x3 << 8);
Caesar Wang2831bc32016-10-27 01:13:16 +0800680}
681
682__sramfunc void dmc_restore(void)
683{
684 struct rk3399_sdram_params *sdram_params = &sdram_config;
685 uint32_t channel_mask = 0;
686 uint32_t channel;
687
688 configure_sgrf();
689
690retry:
691 for (channel = 0; channel < sdram_params->num_channels; channel++) {
692 phy_pctrl_reset(channel);
Caesar Wang2831bc32016-10-27 01:13:16 +0800693 pctl_cfg(channel, sdram_params);
694 }
695
696 for (channel = 0; channel < 2; channel++) {
697 if (sdram_params->ch[channel].col)
698 channel_mask |= 1 << channel;
699 }
700
701 if (pctl_start(channel_mask, sdram_params) < 0)
702 goto retry;
703
704 for (channel = 0; channel < sdram_params->num_channels; channel++) {
705 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
706 if (sdram_params->dramtype == LPDDR3)
707 sram_udelay(10);
708
709 /* If traning fail, retry to do it again. */
710 if (data_training(channel, sdram_params, PI_FULL_TRAINING))
711 goto retry;
712
713 set_ddrconfig(sdram_params, channel,
714 sdram_params->ch[channel].ddrconfig);
715 }
716
717 dram_all_config(sdram_params);
718
719 /* Switch to index 1 and prepare for DDR frequency switch. */
Derek Basehore4bd1d3f2017-02-24 14:31:36 +0800720 dram_switch_to_next_index(sdram_params);
Caesar Wang2831bc32016-10-27 01:13:16 +0800721}