1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * (c) 2015 Paul Thacker <paul.thacker@microchip.com>
4  *
5  */
6 #include <common.h>
7 #include <wait_bit.h>
8 #include <linux/kernel.h>
9 #include <linux/bitops.h>
10 #include <mach/pic32.h>
11 #include <mach/ddr.h>
12 
13 #include "ddr2_regs.h"
14 #include "ddr2_timing.h"
15 
16 /* init DDR2 Phy */
ddr2_phy_init(void)17 void ddr2_phy_init(void)
18 {
19 	struct ddr2_phy_regs *ddr2_phy;
20 	u32 pad_ctl;
21 
22 	ddr2_phy = ioremap(PIC32_DDR2P_BASE, sizeof(*ddr2_phy));
23 
24 	/* PHY_DLL_RECALIB */
25 	writel(DELAY_START_VAL(3) | DISABLE_RECALIB(0) |
26 	       RECALIB_CNT(0x10), &ddr2_phy->dll_recalib);
27 
28 	/* PHY_PAD_CTRL */
29 	pad_ctl = ODT_SEL | ODT_EN | DRIVE_SEL(0) |
30 		  ODT_PULLDOWN(2) | ODT_PULLUP(3) |
31 		  EXTRA_OEN_CLK(0) | NOEXT_DLL |
32 		  DLR_DFT_WRCMD | HALF_RATE |
33 		  DRVSTR_PFET(0xe) | DRVSTR_NFET(0xe) |
34 		  RCVR_EN | PREAMBLE_DLY(2);
35 	writel(pad_ctl, &ddr2_phy->pad_ctrl);
36 
37 	/* SCL_CONFIG_0 */
38 	writel(SCL_BURST8 | SCL_DDR_CONNECTED | SCL_RCAS_LAT(RL) |
39 	       SCL_ODTCSWW, &ddr2_phy->scl_config_1);
40 
41 	/* SCL_CONFIG_1 */
42 	writel(SCL_CSEN | SCL_WCAS_LAT(WL), &ddr2_phy->scl_config_2);
43 
44 	/* SCL_LAT */
45 	writel(SCL_CAPCLKDLY(3) | SCL_DDRCLKDLY(4), &ddr2_phy->scl_latency);
46 }
47 
48 /* start phy self calibration logic */
ddr2_phy_calib_start(void)49 static int ddr2_phy_calib_start(void)
50 {
51 	struct ddr2_phy_regs *ddr2_phy;
52 
53 	ddr2_phy = ioremap(PIC32_DDR2P_BASE, sizeof(*ddr2_phy));
54 
55 	/* DDR Phy SCL Start */
56 	writel(SCL_START | SCL_EN, &ddr2_phy->scl_start);
57 
58 	/* Wait for SCL for data byte to pass */
59 	return wait_for_bit_le32(&ddr2_phy->scl_start, SCL_LUBPASS,
60 				 true, CONFIG_SYS_HZ, false);
61 }
62 
63 /* DDR2 Controller initialization */
64 
65 /* Target Agent Arbiter */
ddr_set_arbiter(struct ddr2_ctrl_regs * ctrl,const struct ddr2_arbiter_params * const param)66 static void ddr_set_arbiter(struct ddr2_ctrl_regs *ctrl,
67 			    const struct ddr2_arbiter_params *const param)
68 {
69 	int i;
70 
71 	for (i = 0; i < NUM_AGENTS; i++) {
72 		/* set min burst size */
73 		writel(i * MIN_LIM_WIDTH, &ctrl->tsel);
74 		writel(param->min_limit, &ctrl->minlim);
75 
76 		/* set request period (4 * req_period clocks) */
77 		writel(i * RQST_PERIOD_WIDTH, &ctrl->tsel);
78 		writel(param->req_period, &ctrl->reqprd);
79 
80 		/* set number of burst accepted */
81 		writel(i * MIN_CMDACPT_WIDTH, &ctrl->tsel);
82 		writel(param->min_cmd_acpt, &ctrl->mincmd);
83 	}
84 }
85 
board_get_ddr_arbiter_params(void)86 const struct ddr2_arbiter_params *__weak board_get_ddr_arbiter_params(void)
87 {
88 	/* default arbiter parameters */
89 	static const struct ddr2_arbiter_params arb_params[] = {
90 		{ .min_limit = 0x1f, .req_period = 0xff, .min_cmd_acpt = 0x04,},
91 		{ .min_limit = 0x1f, .req_period = 0xff, .min_cmd_acpt = 0x10,},
92 		{ .min_limit = 0x1f, .req_period = 0xff, .min_cmd_acpt = 0x10,},
93 		{ .min_limit = 0x04, .req_period = 0xff, .min_cmd_acpt = 0x04,},
94 		{ .min_limit = 0x04, .req_period = 0xff, .min_cmd_acpt = 0x04,},
95 	};
96 
97 	return &arb_params[0];
98 }
99 
host_load_cmd(struct ddr2_ctrl_regs * ctrl,u32 cmd_idx,u32 hostcmd2,u32 hostcmd1,u32 delay)100 static void host_load_cmd(struct ddr2_ctrl_regs *ctrl, u32 cmd_idx,
101 			  u32 hostcmd2, u32 hostcmd1, u32 delay)
102 {
103 	u32 hc_delay;
104 
105 	hc_delay = max_t(u32, DIV_ROUND_UP(delay, T_CK), 2) - 2;
106 	writel(hostcmd1, &ctrl->cmd10[cmd_idx]);
107 	writel((hostcmd2 & 0x7ff) | (hc_delay << 11), &ctrl->cmd20[cmd_idx]);
108 }
109 
110 /* init DDR2 Controller */
ddr2_ctrl_init(void)111 void ddr2_ctrl_init(void)
112 {
113 	u32 wr2prech, rd2prech, wr2rd, wr2rd_cs;
114 	u32 ras2ras, ras2cas, prech2ras, temp;
115 	const struct ddr2_arbiter_params *arb_params;
116 	struct ddr2_ctrl_regs *ctrl;
117 
118 	ctrl = ioremap(PIC32_DDR2C_BASE, sizeof(*ctrl));
119 
120 	/* PIC32 DDR2 controller always work in HALF_RATE */
121 	writel(HALF_RATE_MODE, &ctrl->memwidth);
122 
123 	/* Set arbiter configuration per target */
124 	arb_params = board_get_ddr_arbiter_params();
125 	ddr_set_arbiter(ctrl, arb_params);
126 
127 	/* Address Configuration, model {CS, ROW, BA, COL} */
128 	writel((ROW_ADDR_RSHIFT | (BA_RSHFT << 8) | (CS_ADDR_RSHIFT << 16) |
129 	       (COL_HI_RSHFT << 24) | (SB_PRI << 29)  |
130 	       (EN_AUTO_PRECH << 30)), &ctrl->memcfg0);
131 
132 	writel(ROW_ADDR_MASK, &ctrl->memcfg1);
133 	writel(COL_HI_MASK, &ctrl->memcfg2);
134 	writel(COL_LO_MASK, &ctrl->memcfg3);
135 	writel(BA_MASK | (CS_ADDR_MASK << 8), &ctrl->memcfg4);
136 
137 	/* Refresh Config */
138 	writel(REFCNT_CLK(DIV_ROUND_UP(T_RFI, T_CK_CTRL) - 2) |
139 	       REFDLY_CLK(DIV_ROUND_UP(T_RFC_MIN, T_CK_CTRL) - 2) |
140 	       MAX_PEND_REF(7),
141 	       &ctrl->refcfg);
142 
143 	/* Power Config */
144 	writel(ECC_EN(0) | ERR_CORR_EN(0) | EN_AUTO_PWR_DN(0) |
145 	       EN_AUTO_SELF_REF(3) | PWR_DN_DLY(8) |
146 	       SELF_REF_DLY(17) | PRECH_PWR_DN_ONLY(0),
147 	       &ctrl->pwrcfg);
148 
149 	/* Delay Config */
150 	wr2rd = max_t(u32, DIV_ROUND_UP(T_WTR, T_CK_CTRL),
151 		      DIV_ROUND_UP(T_WTR_TCK, 2)) + WL + BL;
152 	wr2rd_cs = max_t(u32, wr2rd - 1, 3);
153 	wr2prech = DIV_ROUND_UP(T_WR, T_CK_CTRL) + WL + BL;
154 	rd2prech = max_t(u32, DIV_ROUND_UP(T_RTP, T_CK_CTRL),
155 			 DIV_ROUND_UP(T_RTP_TCK, 2)) + BL - 2;
156 	ras2ras = max_t(u32, DIV_ROUND_UP(T_RRD, T_CK_CTRL),
157 			DIV_ROUND_UP(T_RRD_TCK, 2)) - 1;
158 	ras2cas = DIV_ROUND_UP(T_RCD, T_CK_CTRL) - 1;
159 	prech2ras = DIV_ROUND_UP(T_RP, T_CK_CTRL) - 1;
160 
161 	writel(((wr2rd & 0x0f) |
162 	       ((wr2rd_cs & 0x0f) << 4) |
163 	       ((BL - 1) << 8) |
164 	       (BL << 12) |
165 	       ((BL - 1) << 16) |
166 	       ((BL - 1) << 20) |
167 	       ((BL + 2) << 24) |
168 	       ((RL - WL + 3) << 28)), &ctrl->dlycfg0);
169 
170 	writel(((T_CKE_TCK - 1) |
171 	       (((DIV_ROUND_UP(T_DLLK, 2) - 2) & 0xff) << 8) |
172 	       ((T_CKE_TCK - 1) << 16) |
173 	       ((max_t(u32, T_XP_TCK, T_CKE_TCK) - 1) << 20) |
174 	       ((wr2prech >> 4) << 26) |
175 	       ((wr2rd >> 4) << 27) |
176 	       ((wr2rd_cs >> 4) << 28) |
177 	       (((RL + 5) >> 4) << 29) |
178 	       ((DIV_ROUND_UP(T_DLLK, 2) >> 8) << 30)), &ctrl->dlycfg1);
179 
180 	writel((DIV_ROUND_UP(T_RP, T_CK_CTRL) |
181 	       (rd2prech << 8) |
182 	       ((wr2prech & 0x0f) << 12) |
183 	       (ras2ras << 16) |
184 	       (ras2cas << 20) |
185 	       (prech2ras << 24) |
186 	       ((RL + 3) << 28)), &ctrl->dlycfg2);
187 
188 	writel(((DIV_ROUND_UP(T_RAS_MIN, T_CK_CTRL) - 1) |
189 	       ((DIV_ROUND_UP(T_RC, T_CK_CTRL) - 1) << 8) |
190 	       ((DIV_ROUND_UP(T_FAW, T_CK_CTRL) - 1) << 16)),
191 	       &ctrl->dlycfg3);
192 
193 	/* ODT Config */
194 	writel(0x0, &ctrl->odtcfg);
195 	writel(BIT(16), &ctrl->odtencfg);
196 	writel(ODTRDLY(RL - 3) | ODTWDLY(WL - 3) | ODTRLEN(2) | ODTWLEN(3),
197 	       &ctrl->odtcfg);
198 
199 	/* Transfer Configuration */
200 	writel(NXTDATRQDLY(2) | NXDATAVDLY(4) | RDATENDLY(2) |
201 	       MAX_BURST(3) | (7 << 28) | BIG_ENDIAN(0),
202 	       &ctrl->xfercfg);
203 
204 	/* DRAM Initialization */
205 	/* CKE high after reset and wait 400 nsec */
206 	host_load_cmd(ctrl, 0, 0, IDLE_NOP, 400000);
207 
208 	/* issue precharge all command */
209 	host_load_cmd(ctrl, 1, 0x04, PRECH_ALL_CMD, T_RP + T_CK);
210 
211 	/* initialize EMR2 */
212 	host_load_cmd(ctrl, 2, 0x200, LOAD_MODE_CMD, T_MRD_TCK * T_CK);
213 
214 	/* initialize EMR3 */
215 	host_load_cmd(ctrl, 3, 0x300, LOAD_MODE_CMD, T_MRD_TCK * T_CK);
216 
217 	/*
218 	 * RDQS disable, DQSB enable, OCD exit, 150 ohm termination,
219 	 * AL=0, DLL enable
220 	 */
221 	host_load_cmd(ctrl, 4, 0x100,
222 		      LOAD_MODE_CMD | (0x40 << 24), T_MRD_TCK * T_CK);
223 	/*
224 	 * PD fast exit, WR REC = T_WR in clocks -1,
225 	 * DLL reset, CAS = RL, burst = 4
226 	 */
227 	temp = ((DIV_ROUND_UP(T_WR, T_CK) - 1) << 1) | 1;
228 	host_load_cmd(ctrl, 5, temp, LOAD_MODE_CMD | (RL << 28) | (2 << 24),
229 		      T_MRD_TCK * T_CK);
230 
231 	/* issue precharge all command */
232 	host_load_cmd(ctrl, 6, 4, PRECH_ALL_CMD, T_RP + T_CK);
233 
234 	/* issue refresh command */
235 	host_load_cmd(ctrl, 7, 0, REF_CMD, T_RFC_MIN);
236 
237 	/* issue refresh command */
238 	host_load_cmd(ctrl, 8, 0, REF_CMD, T_RFC_MIN);
239 
240 	/* Mode register programming as before without DLL reset */
241 	host_load_cmd(ctrl, 9, temp, LOAD_MODE_CMD | (RL << 28) | (3 << 24),
242 		      T_MRD_TCK * T_CK);
243 
244 	/* extended mode register same as before with OCD default */
245 	host_load_cmd(ctrl, 10, 0x103, LOAD_MODE_CMD | (0xc << 24),
246 		      T_MRD_TCK * T_CK);
247 
248 	/* extended mode register same as before with OCD exit */
249 	host_load_cmd(ctrl, 11, 0x100, LOAD_MODE_CMD | (0x4 << 28),
250 		      140 * T_CK);
251 
252 	writel(CMD_VALID | NUMHOSTCMD(11), &ctrl->cmdissue);
253 
254 	/* start memory initialization */
255 	writel(INIT_START, &ctrl->memcon);
256 
257 	/* wait for all host cmds to be transmitted */
258 	wait_for_bit_le32(&ctrl->cmdissue, CMD_VALID, false,
259 			  CONFIG_SYS_HZ, false);
260 
261 	/* inform all cmds issued, ready for normal operation */
262 	writel(INIT_START | INIT_DONE, &ctrl->memcon);
263 
264 	/* perform phy caliberation */
265 	if (ddr2_phy_calib_start())
266 		printf("ddr2: phy calib failed\n");
267 }
268 
ddr2_calculate_size(void)269 phys_size_t ddr2_calculate_size(void)
270 {
271 	u32 temp;
272 
273 	temp = 1 << (COL_BITS + BA_BITS + ROW_BITS);
274 	/* 16-bit data width between controller and DIMM */
275 	temp = temp * CS_BITS * (16 / 8);
276 	return (phys_size_t)temp;
277 }
278