1 /*
2 * Copyright (C) 2018-2019, STMicroelectronics - All Rights Reserved
3 *
4 * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
5 */
6
7 #include <errno.h>
8 #include <stddef.h>
9
10 #include <platform_def.h>
11
12 #include <arch.h>
13 #include <arch_helpers.h>
14 #include <common/debug.h>
15 #include <drivers/delay_timer.h>
16 #include <drivers/st/stm32mp_pmic.h>
17 #include <drivers/st/stm32mp1_ddr.h>
18 #include <drivers/st/stm32mp1_ddr_regs.h>
19 #include <drivers/st/stm32mp1_pwr.h>
20 #include <drivers/st/stm32mp1_ram.h>
21 #include <lib/mmio.h>
22 #include <plat/common/platform.h>
23
24 struct reg_desc {
25 const char *name;
26 uint16_t offset; /* Offset for base address */
27 uint8_t par_offset; /* Offset for parameter array */
28 };
29
30 #define INVALID_OFFSET 0xFFU
31
32 #define TIMEOUT_US_1S 1000000U
33
34 #define DDRCTL_REG(x, y) \
35 { \
36 .name = #x, \
37 .offset = offsetof(struct stm32mp1_ddrctl, x), \
38 .par_offset = offsetof(struct y, x) \
39 }
40
41 #define DDRPHY_REG(x, y) \
42 { \
43 .name = #x, \
44 .offset = offsetof(struct stm32mp1_ddrphy, x), \
45 .par_offset = offsetof(struct y, x) \
46 }
47
48 #define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
49 static const struct reg_desc ddr_reg[] = {
50 DDRCTL_REG_REG(mstr),
51 DDRCTL_REG_REG(mrctrl0),
52 DDRCTL_REG_REG(mrctrl1),
53 DDRCTL_REG_REG(derateen),
54 DDRCTL_REG_REG(derateint),
55 DDRCTL_REG_REG(pwrctl),
56 DDRCTL_REG_REG(pwrtmg),
57 DDRCTL_REG_REG(hwlpctl),
58 DDRCTL_REG_REG(rfshctl0),
59 DDRCTL_REG_REG(rfshctl3),
60 DDRCTL_REG_REG(crcparctl0),
61 DDRCTL_REG_REG(zqctl0),
62 DDRCTL_REG_REG(dfitmg0),
63 DDRCTL_REG_REG(dfitmg1),
64 DDRCTL_REG_REG(dfilpcfg0),
65 DDRCTL_REG_REG(dfiupd0),
66 DDRCTL_REG_REG(dfiupd1),
67 DDRCTL_REG_REG(dfiupd2),
68 DDRCTL_REG_REG(dfiphymstr),
69 DDRCTL_REG_REG(odtmap),
70 DDRCTL_REG_REG(dbg0),
71 DDRCTL_REG_REG(dbg1),
72 DDRCTL_REG_REG(dbgcmd),
73 DDRCTL_REG_REG(poisoncfg),
74 DDRCTL_REG_REG(pccfg),
75 };
76
77 #define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
78 static const struct reg_desc ddr_timing[] = {
79 DDRCTL_REG_TIMING(rfshtmg),
80 DDRCTL_REG_TIMING(dramtmg0),
81 DDRCTL_REG_TIMING(dramtmg1),
82 DDRCTL_REG_TIMING(dramtmg2),
83 DDRCTL_REG_TIMING(dramtmg3),
84 DDRCTL_REG_TIMING(dramtmg4),
85 DDRCTL_REG_TIMING(dramtmg5),
86 DDRCTL_REG_TIMING(dramtmg6),
87 DDRCTL_REG_TIMING(dramtmg7),
88 DDRCTL_REG_TIMING(dramtmg8),
89 DDRCTL_REG_TIMING(dramtmg14),
90 DDRCTL_REG_TIMING(odtcfg),
91 };
92
93 #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map)
94 static const struct reg_desc ddr_map[] = {
95 DDRCTL_REG_MAP(addrmap1),
96 DDRCTL_REG_MAP(addrmap2),
97 DDRCTL_REG_MAP(addrmap3),
98 DDRCTL_REG_MAP(addrmap4),
99 DDRCTL_REG_MAP(addrmap5),
100 DDRCTL_REG_MAP(addrmap6),
101 DDRCTL_REG_MAP(addrmap9),
102 DDRCTL_REG_MAP(addrmap10),
103 DDRCTL_REG_MAP(addrmap11),
104 };
105
106 #define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
107 static const struct reg_desc ddr_perf[] = {
108 DDRCTL_REG_PERF(sched),
109 DDRCTL_REG_PERF(sched1),
110 DDRCTL_REG_PERF(perfhpr1),
111 DDRCTL_REG_PERF(perflpr1),
112 DDRCTL_REG_PERF(perfwr1),
113 DDRCTL_REG_PERF(pcfgr_0),
114 DDRCTL_REG_PERF(pcfgw_0),
115 DDRCTL_REG_PERF(pcfgqos0_0),
116 DDRCTL_REG_PERF(pcfgqos1_0),
117 DDRCTL_REG_PERF(pcfgwqos0_0),
118 DDRCTL_REG_PERF(pcfgwqos1_0),
119 DDRCTL_REG_PERF(pcfgr_1),
120 DDRCTL_REG_PERF(pcfgw_1),
121 DDRCTL_REG_PERF(pcfgqos0_1),
122 DDRCTL_REG_PERF(pcfgqos1_1),
123 DDRCTL_REG_PERF(pcfgwqos0_1),
124 DDRCTL_REG_PERF(pcfgwqos1_1),
125 };
126
127 #define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg)
128 static const struct reg_desc ddrphy_reg[] = {
129 DDRPHY_REG_REG(pgcr),
130 DDRPHY_REG_REG(aciocr),
131 DDRPHY_REG_REG(dxccr),
132 DDRPHY_REG_REG(dsgcr),
133 DDRPHY_REG_REG(dcr),
134 DDRPHY_REG_REG(odtcr),
135 DDRPHY_REG_REG(zq0cr1),
136 DDRPHY_REG_REG(dx0gcr),
137 DDRPHY_REG_REG(dx1gcr),
138 DDRPHY_REG_REG(dx2gcr),
139 DDRPHY_REG_REG(dx3gcr),
140 };
141
142 #define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing)
143 static const struct reg_desc ddrphy_timing[] = {
144 DDRPHY_REG_TIMING(ptr0),
145 DDRPHY_REG_TIMING(ptr1),
146 DDRPHY_REG_TIMING(ptr2),
147 DDRPHY_REG_TIMING(dtpr0),
148 DDRPHY_REG_TIMING(dtpr1),
149 DDRPHY_REG_TIMING(dtpr2),
150 DDRPHY_REG_TIMING(mr0),
151 DDRPHY_REG_TIMING(mr1),
152 DDRPHY_REG_TIMING(mr2),
153 DDRPHY_REG_TIMING(mr3),
154 };
155
156 #define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
157 static const struct reg_desc ddrphy_cal[] = {
158 DDRPHY_REG_CAL(dx0dllcr),
159 DDRPHY_REG_CAL(dx0dqtr),
160 DDRPHY_REG_CAL(dx0dqstr),
161 DDRPHY_REG_CAL(dx1dllcr),
162 DDRPHY_REG_CAL(dx1dqtr),
163 DDRPHY_REG_CAL(dx1dqstr),
164 DDRPHY_REG_CAL(dx2dllcr),
165 DDRPHY_REG_CAL(dx2dqtr),
166 DDRPHY_REG_CAL(dx2dqstr),
167 DDRPHY_REG_CAL(dx3dllcr),
168 DDRPHY_REG_CAL(dx3dqtr),
169 DDRPHY_REG_CAL(dx3dqstr),
170 };
171
172 #define DDR_REG_DYN(x) \
173 { \
174 .name = #x, \
175 .offset = offsetof(struct stm32mp1_ddrctl, x), \
176 .par_offset = INVALID_OFFSET \
177 }
178
179 static const struct reg_desc ddr_dyn[] = {
180 DDR_REG_DYN(stat),
181 DDR_REG_DYN(init0),
182 DDR_REG_DYN(dfimisc),
183 DDR_REG_DYN(dfistat),
184 DDR_REG_DYN(swctl),
185 DDR_REG_DYN(swstat),
186 DDR_REG_DYN(pctrl_0),
187 DDR_REG_DYN(pctrl_1),
188 };
189
190 #define DDRPHY_REG_DYN(x) \
191 { \
192 .name = #x, \
193 .offset = offsetof(struct stm32mp1_ddrphy, x), \
194 .par_offset = INVALID_OFFSET \
195 }
196
197 static const struct reg_desc ddrphy_dyn[] = {
198 DDRPHY_REG_DYN(pir),
199 DDRPHY_REG_DYN(pgsr),
200 };
201
202 enum reg_type {
203 REG_REG,
204 REG_TIMING,
205 REG_PERF,
206 REG_MAP,
207 REGPHY_REG,
208 REGPHY_TIMING,
209 REGPHY_CAL,
210 /*
211 * Dynamic registers => managed in driver or not changed,
212 * can be dumped in interactive mode.
213 */
214 REG_DYN,
215 REGPHY_DYN,
216 REG_TYPE_NB
217 };
218
219 enum base_type {
220 DDR_BASE,
221 DDRPHY_BASE,
222 NONE_BASE
223 };
224
225 struct ddr_reg_info {
226 const char *name;
227 const struct reg_desc *desc;
228 uint8_t size;
229 enum base_type base;
230 };
231
232 static const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
233 [REG_REG] = {
234 .name = "static",
235 .desc = ddr_reg,
236 .size = ARRAY_SIZE(ddr_reg),
237 .base = DDR_BASE
238 },
239 [REG_TIMING] = {
240 .name = "timing",
241 .desc = ddr_timing,
242 .size = ARRAY_SIZE(ddr_timing),
243 .base = DDR_BASE
244 },
245 [REG_PERF] = {
246 .name = "perf",
247 .desc = ddr_perf,
248 .size = ARRAY_SIZE(ddr_perf),
249 .base = DDR_BASE
250 },
251 [REG_MAP] = {
252 .name = "map",
253 .desc = ddr_map,
254 .size = ARRAY_SIZE(ddr_map),
255 .base = DDR_BASE
256 },
257 [REGPHY_REG] = {
258 .name = "static",
259 .desc = ddrphy_reg,
260 .size = ARRAY_SIZE(ddrphy_reg),
261 .base = DDRPHY_BASE
262 },
263 [REGPHY_TIMING] = {
264 .name = "timing",
265 .desc = ddrphy_timing,
266 .size = ARRAY_SIZE(ddrphy_timing),
267 .base = DDRPHY_BASE
268 },
269 [REGPHY_CAL] = {
270 .name = "cal",
271 .desc = ddrphy_cal,
272 .size = ARRAY_SIZE(ddrphy_cal),
273 .base = DDRPHY_BASE
274 },
275 [REG_DYN] = {
276 .name = "dyn",
277 .desc = ddr_dyn,
278 .size = ARRAY_SIZE(ddr_dyn),
279 .base = DDR_BASE
280 },
281 [REGPHY_DYN] = {
282 .name = "dyn",
283 .desc = ddrphy_dyn,
284 .size = ARRAY_SIZE(ddrphy_dyn),
285 .base = DDRPHY_BASE
286 },
287 };
288
get_base_addr(const struct ddr_info * priv,enum base_type base)289 static uintptr_t get_base_addr(const struct ddr_info *priv, enum base_type base)
290 {
291 if (base == DDRPHY_BASE) {
292 return (uintptr_t)priv->phy;
293 } else {
294 return (uintptr_t)priv->ctl;
295 }
296 }
297
set_reg(const struct ddr_info * priv,enum reg_type type,const void * param)298 static void set_reg(const struct ddr_info *priv,
299 enum reg_type type,
300 const void *param)
301 {
302 unsigned int i;
303 unsigned int value;
304 enum base_type base = ddr_registers[type].base;
305 uintptr_t base_addr = get_base_addr(priv, base);
306 const struct reg_desc *desc = ddr_registers[type].desc;
307
308 VERBOSE("init %s\n", ddr_registers[type].name);
309 for (i = 0; i < ddr_registers[type].size; i++) {
310 uintptr_t ptr = base_addr + desc[i].offset;
311
312 if (desc[i].par_offset == INVALID_OFFSET) {
313 ERROR("invalid parameter offset for %s", desc[i].name);
314 panic();
315 } else {
316 value = *((uint32_t *)((uintptr_t)param +
317 desc[i].par_offset));
318 mmio_write_32(ptr, value);
319 }
320 }
321 }
322
stm32mp1_ddrphy_idone_wait(struct stm32mp1_ddrphy * phy)323 static void stm32mp1_ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
324 {
325 uint32_t pgsr;
326 int error = 0;
327 uint64_t timeout = timeout_init_us(TIMEOUT_US_1S);
328
329 do {
330 pgsr = mmio_read_32((uintptr_t)&phy->pgsr);
331
332 VERBOSE(" > [0x%lx] pgsr = 0x%x &\n",
333 (uintptr_t)&phy->pgsr, pgsr);
334
335 if (timeout_elapsed(timeout)) {
336 panic();
337 }
338
339 if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) {
340 VERBOSE("DQS Gate Trainig Error\n");
341 error++;
342 }
343
344 if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) {
345 VERBOSE("DQS Gate Trainig Intermittent Error\n");
346 error++;
347 }
348
349 if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) {
350 VERBOSE("DQS Drift Error\n");
351 error++;
352 }
353
354 if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) {
355 VERBOSE("Read Valid Training Error\n");
356 error++;
357 }
358
359 if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) {
360 VERBOSE("Read Valid Training Intermittent Error\n");
361 error++;
362 }
363 } while (((pgsr & DDRPHYC_PGSR_IDONE) == 0U) && (error == 0));
364 VERBOSE("\n[0x%lx] pgsr = 0x%x\n",
365 (uintptr_t)&phy->pgsr, pgsr);
366 }
367
stm32mp1_ddrphy_init(struct stm32mp1_ddrphy * phy,uint32_t pir)368 static void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, uint32_t pir)
369 {
370 uint32_t pir_init = pir | DDRPHYC_PIR_INIT;
371
372 mmio_write_32((uintptr_t)&phy->pir, pir_init);
373 VERBOSE("[0x%lx] pir = 0x%x -> 0x%x\n",
374 (uintptr_t)&phy->pir, pir_init,
375 mmio_read_32((uintptr_t)&phy->pir));
376
377 /* Need to wait 10 configuration clock before start polling */
378 udelay(10);
379
380 /* Wait DRAM initialization and Gate Training Evaluation complete */
381 stm32mp1_ddrphy_idone_wait(phy);
382 }
383
384 /* Start quasi dynamic register update */
stm32mp1_start_sw_done(struct stm32mp1_ddrctl * ctl)385 static void stm32mp1_start_sw_done(struct stm32mp1_ddrctl *ctl)
386 {
387 mmio_clrbits_32((uintptr_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
388 VERBOSE("[0x%lx] swctl = 0x%x\n",
389 (uintptr_t)&ctl->swctl, mmio_read_32((uintptr_t)&ctl->swctl));
390 }
391
392 /* Wait quasi dynamic register update */
stm32mp1_wait_sw_done_ack(struct stm32mp1_ddrctl * ctl)393 static void stm32mp1_wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
394 {
395 uint64_t timeout;
396 uint32_t swstat;
397
398 mmio_setbits_32((uintptr_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
399 VERBOSE("[0x%lx] swctl = 0x%x\n",
400 (uintptr_t)&ctl->swctl, mmio_read_32((uintptr_t)&ctl->swctl));
401
402 timeout = timeout_init_us(TIMEOUT_US_1S);
403 do {
404 swstat = mmio_read_32((uintptr_t)&ctl->swstat);
405 VERBOSE("[0x%lx] swstat = 0x%x ",
406 (uintptr_t)&ctl->swstat, swstat);
407 if (timeout_elapsed(timeout)) {
408 panic();
409 }
410 } while ((swstat & DDRCTRL_SWSTAT_SW_DONE_ACK) == 0U);
411
412 VERBOSE("[0x%lx] swstat = 0x%x\n",
413 (uintptr_t)&ctl->swstat, swstat);
414 }
415
416 /* Wait quasi dynamic register update */
stm32mp1_wait_operating_mode(struct ddr_info * priv,uint32_t mode)417 static void stm32mp1_wait_operating_mode(struct ddr_info *priv, uint32_t mode)
418 {
419 uint64_t timeout;
420 uint32_t stat;
421 int break_loop = 0;
422
423 timeout = timeout_init_us(TIMEOUT_US_1S);
424 for ( ; ; ) {
425 uint32_t operating_mode;
426 uint32_t selref_type;
427
428 stat = mmio_read_32((uintptr_t)&priv->ctl->stat);
429 operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK;
430 selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK;
431 VERBOSE("[0x%lx] stat = 0x%x\n",
432 (uintptr_t)&priv->ctl->stat, stat);
433 if (timeout_elapsed(timeout)) {
434 panic();
435 }
436
437 if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
438 /*
439 * Self-refresh due to software
440 * => checking also STAT.selfref_type.
441 */
442 if ((operating_mode ==
443 DDRCTRL_STAT_OPERATING_MODE_SR) &&
444 (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) {
445 break_loop = 1;
446 }
447 } else if (operating_mode == mode) {
448 break_loop = 1;
449 } else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) &&
450 (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) &&
451 (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) {
452 /* Normal mode: handle also automatic self refresh */
453 break_loop = 1;
454 }
455
456 if (break_loop == 1) {
457 break;
458 }
459 }
460
461 VERBOSE("[0x%lx] stat = 0x%x\n",
462 (uintptr_t)&priv->ctl->stat, stat);
463 }
464
465 /* Mode Register Writes (MRW or MRS) */
stm32mp1_mode_register_write(struct ddr_info * priv,uint8_t addr,uint32_t data)466 static void stm32mp1_mode_register_write(struct ddr_info *priv, uint8_t addr,
467 uint32_t data)
468 {
469 uint32_t mrctrl0;
470
471 VERBOSE("MRS: %d = %x\n", addr, data);
472
473 /*
474 * 1. Poll MRSTAT.mr_wr_busy until it is '0'.
475 * This checks that there is no outstanding MR transaction.
476 * No write should be performed to MRCTRL0 and MRCTRL1
477 * if MRSTAT.mr_wr_busy = 1.
478 */
479 while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
480 DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
481 ;
482 }
483
484 /*
485 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
486 * and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
487 */
488 mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE |
489 DDRCTRL_MRCTRL0_MR_RANK_ALL |
490 (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) &
491 DDRCTRL_MRCTRL0_MR_ADDR_MASK);
492 mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
493 VERBOSE("[0x%lx] mrctrl0 = 0x%x (0x%x)\n",
494 (uintptr_t)&priv->ctl->mrctrl0,
495 mmio_read_32((uintptr_t)&priv->ctl->mrctrl0), mrctrl0);
496 mmio_write_32((uintptr_t)&priv->ctl->mrctrl1, data);
497 VERBOSE("[0x%lx] mrctrl1 = 0x%x\n",
498 (uintptr_t)&priv->ctl->mrctrl1,
499 mmio_read_32((uintptr_t)&priv->ctl->mrctrl1));
500
501 /*
502 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This
503 * bit is self-clearing, and triggers the MR transaction.
504 * The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
505 * the MR transaction to SDRAM, and no further access can be
506 * initiated until it is deasserted.
507 */
508 mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR;
509 mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
510
511 while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
512 DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
513 ;
514 }
515
516 VERBOSE("[0x%lx] mrctrl0 = 0x%x\n",
517 (uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
518 }
519
520 /* Switch DDR3 from DLL-on to DLL-off */
stm32mp1_ddr3_dll_off(struct ddr_info * priv)521 static void stm32mp1_ddr3_dll_off(struct ddr_info *priv)
522 {
523 uint32_t mr1 = mmio_read_32((uintptr_t)&priv->phy->mr1);
524 uint32_t mr2 = mmio_read_32((uintptr_t)&priv->phy->mr2);
525 uint32_t dbgcam;
526
527 VERBOSE("mr1: 0x%x\n", mr1);
528 VERBOSE("mr2: 0x%x\n", mr2);
529
530 /*
531 * 1. Set the DBG1.dis_hif = 1.
532 * This prevents further reads/writes being received on the HIF.
533 */
534 mmio_setbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
535 VERBOSE("[0x%lx] dbg1 = 0x%x\n",
536 (uintptr_t)&priv->ctl->dbg1,
537 mmio_read_32((uintptr_t)&priv->ctl->dbg1));
538
539 /*
540 * 2. Ensure all commands have been flushed from the uMCTL2 by polling
541 * DBGCAM.wr_data_pipeline_empty = 1,
542 * DBGCAM.rd_data_pipeline_empty = 1,
543 * DBGCAM.dbg_wr_q_depth = 0 ,
544 * DBGCAM.dbg_lpr_q_depth = 0, and
545 * DBGCAM.dbg_hpr_q_depth = 0.
546 */
547 do {
548 dbgcam = mmio_read_32((uintptr_t)&priv->ctl->dbgcam);
549 VERBOSE("[0x%lx] dbgcam = 0x%x\n",
550 (uintptr_t)&priv->ctl->dbgcam, dbgcam);
551 } while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) ==
552 DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) &&
553 ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U));
554
555 /*
556 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
557 * to disable RTT_NOM:
558 * a. DDR3: Write to MR1[9], MR1[6] and MR1[2]
559 * b. DDR4: Write to MR1[10:8]
560 */
561 mr1 &= ~(BIT(9) | BIT(6) | BIT(2));
562 stm32mp1_mode_register_write(priv, 1, mr1);
563
564 /*
565 * 4. For DDR4 only: Perform an MRS command
566 * (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6]
567 * to disable RTT_PARK
568 */
569
570 /*
571 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
572 * to write to MR2[10:9], to disable RTT_WR
573 * (and therefore disable dynamic ODT).
574 * This applies for both DDR3 and DDR4.
575 */
576 mr2 &= ~GENMASK(10, 9);
577 stm32mp1_mode_register_write(priv, 2, mr2);
578
579 /*
580 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
581 * to disable the DLL. The timing of this MRS is automatically
582 * handled by the uMCTL2.
583 * a. DDR3: Write to MR1[0]
584 * b. DDR4: Write to MR1[0]
585 */
586 mr1 |= BIT(0);
587 stm32mp1_mode_register_write(priv, 1, mr1);
588
589 /*
590 * 7. Put the SDRAM into self-refresh mode by setting
591 * PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure
592 * the DDRC has entered self-refresh.
593 */
594 mmio_setbits_32((uintptr_t)&priv->ctl->pwrctl,
595 DDRCTRL_PWRCTL_SELFREF_SW);
596 VERBOSE("[0x%lx] pwrctl = 0x%x\n",
597 (uintptr_t)&priv->ctl->pwrctl,
598 mmio_read_32((uintptr_t)&priv->ctl->pwrctl));
599
600 /*
601 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the
602 * DWC_ddr_umctl2 core is in self-refresh mode.
603 * Ensure transition to self-refresh was due to software
604 * by checking that STAT.selfref_type[1:0]=2.
605 */
606 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR);
607
608 /*
609 * 9. Set the MSTR.dll_off_mode = 1.
610 * warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field
611 */
612 stm32mp1_start_sw_done(priv->ctl);
613
614 mmio_setbits_32((uintptr_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE);
615 VERBOSE("[0x%lx] mstr = 0x%x\n",
616 (uintptr_t)&priv->ctl->mstr,
617 mmio_read_32((uintptr_t)&priv->ctl->mstr));
618
619 stm32mp1_wait_sw_done_ack(priv->ctl);
620
621 /* 10. Change the clock frequency to the desired value. */
622
623 /*
624 * 11. Update any registers which may be required to change for the new
625 * frequency. This includes static and dynamic registers.
626 * This includes both uMCTL2 registers and PHY registers.
627 */
628
629 /* Change Bypass Mode Frequency Range */
630 if (stm32mp_clk_get_rate(DDRPHYC) < 100000000U) {
631 mmio_clrbits_32((uintptr_t)&priv->phy->dllgcr,
632 DDRPHYC_DLLGCR_BPS200);
633 } else {
634 mmio_setbits_32((uintptr_t)&priv->phy->dllgcr,
635 DDRPHYC_DLLGCR_BPS200);
636 }
637
638 mmio_setbits_32((uintptr_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS);
639
640 mmio_setbits_32((uintptr_t)&priv->phy->dx0dllcr,
641 DDRPHYC_DXNDLLCR_DLLDIS);
642 mmio_setbits_32((uintptr_t)&priv->phy->dx1dllcr,
643 DDRPHYC_DXNDLLCR_DLLDIS);
644 mmio_setbits_32((uintptr_t)&priv->phy->dx2dllcr,
645 DDRPHYC_DXNDLLCR_DLLDIS);
646 mmio_setbits_32((uintptr_t)&priv->phy->dx3dllcr,
647 DDRPHYC_DXNDLLCR_DLLDIS);
648
649 /* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
650 mmio_clrbits_32((uintptr_t)&priv->ctl->pwrctl,
651 DDRCTRL_PWRCTL_SELFREF_SW);
652 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
653
654 /*
655 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command
656 * at this point.
657 */
658
659 /*
660 * 14. Perform MRS commands as required to re-program timing registers
661 * in the SDRAM for the new frequency
662 * (in particular, CL, CWL and WR may need to be changed).
663 */
664
665 /* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */
666 mmio_clrbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
667 VERBOSE("[0x%lx] dbg1 = 0x%x\n",
668 (uintptr_t)&priv->ctl->dbg1,
669 mmio_read_32((uintptr_t)&priv->ctl->dbg1));
670 }
671
stm32mp1_refresh_disable(struct stm32mp1_ddrctl * ctl)672 static void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
673 {
674 stm32mp1_start_sw_done(ctl);
675 /* Quasi-dynamic register update*/
676 mmio_setbits_32((uintptr_t)&ctl->rfshctl3,
677 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
678 mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
679 mmio_clrbits_32((uintptr_t)&ctl->dfimisc,
680 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
681 stm32mp1_wait_sw_done_ack(ctl);
682 }
683
stm32mp1_refresh_restore(struct stm32mp1_ddrctl * ctl,uint32_t rfshctl3,uint32_t pwrctl)684 static void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
685 uint32_t rfshctl3, uint32_t pwrctl)
686 {
687 stm32mp1_start_sw_done(ctl);
688 if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
689 mmio_clrbits_32((uintptr_t)&ctl->rfshctl3,
690 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
691 }
692 if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
693 mmio_setbits_32((uintptr_t)&ctl->pwrctl,
694 DDRCTRL_PWRCTL_POWERDOWN_EN);
695 }
696 mmio_setbits_32((uintptr_t)&ctl->dfimisc,
697 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
698 stm32mp1_wait_sw_done_ack(ctl);
699 }
700
board_ddr_power_init(enum ddr_type ddr_type)701 static int board_ddr_power_init(enum ddr_type ddr_type)
702 {
703 if (dt_pmic_status() > 0) {
704 return pmic_ddr_power_init(ddr_type);
705 }
706
707 return 0;
708 }
709
stm32mp1_ddr_init(struct ddr_info * priv,struct stm32mp1_ddr_config * config)710 void stm32mp1_ddr_init(struct ddr_info *priv,
711 struct stm32mp1_ddr_config *config)
712 {
713 uint32_t pir;
714 int ret = -EINVAL;
715
716 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
717 ret = board_ddr_power_init(STM32MP_DDR3);
718 } else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2) != 0U) {
719 ret = board_ddr_power_init(STM32MP_LPDDR2);
720 } else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR3) != 0U) {
721 ret = board_ddr_power_init(STM32MP_LPDDR3);
722 } else {
723 ERROR("DDR type not supported\n");
724 }
725
726 if (ret != 0) {
727 panic();
728 }
729
730 VERBOSE("name = %s\n", config->info.name);
731 VERBOSE("speed = %d kHz\n", config->info.speed);
732 VERBOSE("size = 0x%x\n", config->info.size);
733
734 /* DDR INIT SEQUENCE */
735
736 /*
737 * 1. Program the DWC_ddr_umctl2 registers
738 * nota: check DFIMISC.dfi_init_complete = 0
739 */
740
741 /* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */
742 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
743 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
744 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
745 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
746 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
747 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
748
749 /* 1.2. start CLOCK */
750 if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) {
751 panic();
752 }
753
754 /* 1.3. deassert reset */
755 /* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */
756 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
757 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
758 /*
759 * De-assert presetn once the clocks are active
760 * and stable via DDRCAPBRST bit.
761 */
762 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
763
764 /* 1.4. wait 128 cycles to permit initialization of end logic */
765 udelay(2);
766 /* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
767
768 /* 1.5. initialize registers ddr_umctl2 */
769 /* Stop uMCTL2 before PHY is ready */
770 mmio_clrbits_32((uintptr_t)&priv->ctl->dfimisc,
771 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
772 VERBOSE("[0x%lx] dfimisc = 0x%x\n",
773 (uintptr_t)&priv->ctl->dfimisc,
774 mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
775
776 set_reg(priv, REG_REG, &config->c_reg);
777
778 /* DDR3 = don't set DLLOFF for init mode */
779 if ((config->c_reg.mstr &
780 (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
781 == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
782 VERBOSE("deactivate DLL OFF in mstr\n");
783 mmio_clrbits_32((uintptr_t)&priv->ctl->mstr,
784 DDRCTRL_MSTR_DLL_OFF_MODE);
785 VERBOSE("[0x%lx] mstr = 0x%x\n",
786 (uintptr_t)&priv->ctl->mstr,
787 mmio_read_32((uintptr_t)&priv->ctl->mstr));
788 }
789
790 set_reg(priv, REG_TIMING, &config->c_timing);
791 set_reg(priv, REG_MAP, &config->c_map);
792
793 /* Skip CTRL init, SDRAM init is done by PHY PUBL */
794 mmio_clrsetbits_32((uintptr_t)&priv->ctl->init0,
795 DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
796 DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
797 VERBOSE("[0x%lx] init0 = 0x%x\n",
798 (uintptr_t)&priv->ctl->init0,
799 mmio_read_32((uintptr_t)&priv->ctl->init0));
800
801 set_reg(priv, REG_PERF, &config->c_perf);
802
803 /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
804 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
805 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
806 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
807
808 /*
809 * 3. start PHY init by accessing relevant PUBL registers
810 * (DXGCR, DCR, PTR*, MR*, DTPR*)
811 */
812 set_reg(priv, REGPHY_REG, &config->p_reg);
813 set_reg(priv, REGPHY_TIMING, &config->p_timing);
814 set_reg(priv, REGPHY_CAL, &config->p_cal);
815
816 /* DDR3 = don't set DLLOFF for init mode */
817 if ((config->c_reg.mstr &
818 (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
819 == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
820 VERBOSE("deactivate DLL OFF in mr1\n");
821 mmio_clrbits_32((uintptr_t)&priv->phy->mr1, BIT(0));
822 VERBOSE("[0x%lx] mr1 = 0x%x\n",
823 (uintptr_t)&priv->phy->mr1,
824 mmio_read_32((uintptr_t)&priv->phy->mr1));
825 }
826
827 /*
828 * 4. Monitor PHY init status by polling PUBL register PGSR.IDONE
829 * Perform DDR PHY DRAM initialization and Gate Training Evaluation
830 */
831 stm32mp1_ddrphy_idone_wait(priv->phy);
832
833 /*
834 * 5. Indicate to PUBL that controller performs SDRAM initialization
835 * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
836 * DRAM init is done by PHY, init0.skip_dram.init = 1
837 */
838
839 pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
840 DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
841
842 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
843 pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */
844 }
845
846 stm32mp1_ddrphy_init(priv->phy, pir);
847
848 /*
849 * 6. SET DFIMISC.dfi_init_complete_en to 1
850 * Enable quasi-dynamic register programming.
851 */
852 stm32mp1_start_sw_done(priv->ctl);
853
854 mmio_setbits_32((uintptr_t)&priv->ctl->dfimisc,
855 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
856 VERBOSE("[0x%lx] dfimisc = 0x%x\n",
857 (uintptr_t)&priv->ctl->dfimisc,
858 mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
859
860 stm32mp1_wait_sw_done_ack(priv->ctl);
861
862 /*
863 * 7. Wait for DWC_ddr_umctl2 to move to normal operation mode
864 * by monitoring STAT.operating_mode signal
865 */
866
867 /* Wait uMCTL2 ready */
868 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
869
870 /* Switch to DLL OFF mode */
871 if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) {
872 stm32mp1_ddr3_dll_off(priv);
873 }
874
875 VERBOSE("DDR DQS training : ");
876
877 /*
878 * 8. Disable Auto refresh and power down by setting
879 * - RFSHCTL3.dis_au_refresh = 1
880 * - PWRCTL.powerdown_en = 0
881 * - DFIMISC.dfiinit_complete_en = 0
882 */
883 stm32mp1_refresh_disable(priv->ctl);
884
885 /*
886 * 9. Program PUBL PGCR to enable refresh during training
887 * and rank to train
888 * not done => keep the programed value in PGCR
889 */
890
891 /*
892 * 10. configure PUBL PIR register to specify which training step
893 * to run
894 * Warning : RVTRN is not supported by this PUBL
895 */
896 stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
897
898 /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
899 stm32mp1_ddrphy_idone_wait(priv->phy);
900
901 /*
902 * 12. set back registers in step 8 to the orginal values if desidered
903 */
904 stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
905 config->c_reg.pwrctl);
906
907 /* Enable uMCTL2 AXI port 0 */
908 mmio_setbits_32((uintptr_t)&priv->ctl->pctrl_0,
909 DDRCTRL_PCTRL_N_PORT_EN);
910 VERBOSE("[0x%lx] pctrl_0 = 0x%x\n",
911 (uintptr_t)&priv->ctl->pctrl_0,
912 mmio_read_32((uintptr_t)&priv->ctl->pctrl_0));
913
914 /* Enable uMCTL2 AXI port 1 */
915 mmio_setbits_32((uintptr_t)&priv->ctl->pctrl_1,
916 DDRCTRL_PCTRL_N_PORT_EN);
917 VERBOSE("[0x%lx] pctrl_1 = 0x%x\n",
918 (uintptr_t)&priv->ctl->pctrl_1,
919 mmio_read_32((uintptr_t)&priv->ctl->pctrl_1));
920 }
921