1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) Marvell International Ltd. and its affiliates
4 */
5
6 #include "ddr3_init.h"
7 #include "mv_ddr_regs.h"
8 #include "ddr_training_ip_db.h"
9
10 #define PATTERN_1 0x55555555
11 #define PATTERN_2 0xaaaaaaaa
12
13 #define VALIDATE_TRAINING_LIMIT(e1, e2) \
14 ((((e2) - (e1) + 1) > 33) && ((e1) < 67))
15
16 u32 phy_reg_bk[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
17
18 u32 training_res[MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS *
19 HWS_SEARCH_DIR_LIMIT];
20 u8 byte_status[MAX_INTERFACE_NUM][MAX_BUS_NUM]; /* holds the bit status in the byte in wrapper function*/
21
22 u16 mask_results_dq_reg_map[] = {
23 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
24 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
25 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
26 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
27 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
28 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
29 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
30 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
31 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
32 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
33 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
34 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
35 RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
36 RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
37 RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
38 RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG,
39 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
40 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
41 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
42 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
43 #if MAX_BUS_NUM == 9
44 RESULT_CONTROL_PUP_5_BIT_0_REG, RESULT_CONTROL_PUP_5_BIT_1_REG,
45 RESULT_CONTROL_PUP_5_BIT_2_REG, RESULT_CONTROL_PUP_5_BIT_3_REG,
46 RESULT_CONTROL_PUP_5_BIT_4_REG, RESULT_CONTROL_PUP_5_BIT_5_REG,
47 RESULT_CONTROL_PUP_5_BIT_6_REG, RESULT_CONTROL_PUP_5_BIT_7_REG,
48 RESULT_CONTROL_PUP_6_BIT_0_REG, RESULT_CONTROL_PUP_6_BIT_1_REG,
49 RESULT_CONTROL_PUP_6_BIT_2_REG, RESULT_CONTROL_PUP_6_BIT_3_REG,
50 RESULT_CONTROL_PUP_6_BIT_4_REG, RESULT_CONTROL_PUP_6_BIT_5_REG,
51 RESULT_CONTROL_PUP_6_BIT_6_REG, RESULT_CONTROL_PUP_6_BIT_7_REG,
52 RESULT_CONTROL_PUP_7_BIT_0_REG, RESULT_CONTROL_PUP_7_BIT_1_REG,
53 RESULT_CONTROL_PUP_7_BIT_2_REG, RESULT_CONTROL_PUP_7_BIT_3_REG,
54 RESULT_CONTROL_PUP_7_BIT_4_REG, RESULT_CONTROL_PUP_7_BIT_5_REG,
55 RESULT_CONTROL_PUP_7_BIT_6_REG, RESULT_CONTROL_PUP_7_BIT_7_REG,
56 RESULT_CONTROL_PUP_8_BIT_0_REG, RESULT_CONTROL_PUP_8_BIT_1_REG,
57 RESULT_CONTROL_PUP_8_BIT_2_REG, RESULT_CONTROL_PUP_8_BIT_3_REG,
58 RESULT_CONTROL_PUP_8_BIT_4_REG, RESULT_CONTROL_PUP_8_BIT_5_REG,
59 RESULT_CONTROL_PUP_8_BIT_6_REG, RESULT_CONTROL_PUP_8_BIT_7_REG,
60 #endif
61 0xffff
62 };
63
64 u16 mask_results_pup_reg_map[] = {
65 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
66 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_3_REG,
67 RESULT_CONTROL_BYTE_PUP_4_REG,
68 #if MAX_BUS_NUM == 9
69 RESULT_CONTROL_BYTE_PUP_5_REG, RESULT_CONTROL_BYTE_PUP_6_REG,
70 RESULT_CONTROL_BYTE_PUP_7_REG, RESULT_CONTROL_BYTE_PUP_8_REG,
71 #endif
72 0xffff
73 };
74
75 #if MAX_BUS_NUM == 5
76 u16 mask_results_dq_reg_map_pup3_ecc[] = {
77 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
78 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
79 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
80 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
81 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
82 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
83 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
84 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
85 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
86 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
87 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
88 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
89 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
90 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
91 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
92 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
93 RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
94 RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
95 RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
96 RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG
97 };
98 #endif
99
100 #if MAX_BUS_NUM == 5
101 u16 mask_results_pup_reg_map_pup3_ecc[] = {
102 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
103 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_4_REG,
104 RESULT_CONTROL_BYTE_PUP_4_REG
105 };
106 #endif
107
108 struct pattern_info pattern_table_64[] = {
109 /*
110 * num_of_phases_tx, tx_burst_size;
111 * delay_between_bursts, num_of_phases_rx,
112 * start_addr, pattern_len
113 */
114 {0x7, 0x7, 2, 0x7, 0x00000, 8}, /* PATTERN_PBS1 */
115 {0x7, 0x7, 2, 0x7, 0x00080, 8}, /* PATTERN_PBS2 */
116 {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_PBS3 */
117 {0x7, 0x7, 2, 0x7, 0x00030, 8}, /* PATTERN_TEST */
118 {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_RL */
119 {0x7, 0x7, 2, 0x7, 0x00100, 8}, /* PATTERN_RL2 */
120 {0x1f, 0xf, 2, 0xf, 0x00680, 32}, /* PATTERN_STATIC_PBS */
121 {0x1f, 0xf, 2, 0xf, 0x00a80, 32}, /* PATTERN_KILLER_DQ0 */
122 {0x1f, 0xf, 2, 0xf, 0x01280, 32}, /* PATTERN_KILLER_DQ1 */
123 {0x1f, 0xf, 2, 0xf, 0x01a80, 32}, /* PATTERN_KILLER_DQ2 */
124 {0x1f, 0xf, 2, 0xf, 0x02280, 32}, /* PATTERN_KILLER_DQ3 */
125 {0x1f, 0xf, 2, 0xf, 0x02a80, 32}, /* PATTERN_KILLER_DQ4 */
126 {0x1f, 0xf, 2, 0xf, 0x03280, 32}, /* PATTERN_KILLER_DQ5 */
127 {0x1f, 0xf, 2, 0xf, 0x03a80, 32}, /* PATTERN_KILLER_DQ6 */
128 {0x1f, 0xf, 2, 0xf, 0x04280, 32}, /* PATTERN_KILLER_DQ7 */
129 {0x1f, 0xf, 2, 0xf, 0x00e80, 32}, /* PATTERN_KILLER_DQ0_64 */
130 {0x1f, 0xf, 2, 0xf, 0x01680, 32}, /* PATTERN_KILLER_DQ1_64 */
131 {0x1f, 0xf, 2, 0xf, 0x01e80, 32}, /* PATTERN_KILLER_DQ2_64 */
132 {0x1f, 0xf, 2, 0xf, 0x02680, 32}, /* PATTERN_KILLER_DQ3_64 */
133 {0x1f, 0xf, 2, 0xf, 0x02e80, 32}, /* PATTERN_KILLER_DQ4_64 */
134 {0x1f, 0xf, 2, 0xf, 0x03680, 32}, /* PATTERN_KILLER_DQ5_64 */
135 {0x1f, 0xf, 2, 0xf, 0x03e80, 32}, /* PATTERN_KILLER_DQ6_64 */
136 {0x1f, 0xf, 2, 0xf, 0x04680, 32}, /* PATTERN_KILLER_DQ7_64 */
137 {0x1f, 0xf, 2, 0xf, 0x04a80, 32}, /* PATTERN_KILLER_DQ0_INV */
138 {0x1f, 0xf, 2, 0xf, 0x05280, 32}, /* PATTERN_KILLER_DQ1_INV */
139 {0x1f, 0xf, 2, 0xf, 0x05a80, 32}, /* PATTERN_KILLER_DQ2_INV */
140 {0x1f, 0xf, 2, 0xf, 0x06280, 32}, /* PATTERN_KILLER_DQ3_INV */
141 {0x1f, 0xf, 2, 0xf, 0x06a80, 32}, /* PATTERN_KILLER_DQ4_INV */
142 {0x1f, 0xf, 2, 0xf, 0x07280, 32}, /* PATTERN_KILLER_DQ5_INV */
143 {0x1f, 0xf, 2, 0xf, 0x07a80, 32}, /* PATTERN_KILLER_DQ6_INV */
144 {0x1f, 0xf, 2, 0xf, 0x08280, 32}, /* PATTERN_KILLER_DQ7_INV */
145 {0x1f, 0xf, 2, 0xf, 0x04e80, 32}, /* PATTERN_KILLER_DQ0_INV_64 */
146 {0x1f, 0xf, 2, 0xf, 0x05680, 32}, /* PATTERN_KILLER_DQ1_INV_64 */
147 {0x1f, 0xf, 2, 0xf, 0x05e80, 32}, /* PATTERN_KILLER_DQ2_INV_64 */
148 {0x1f, 0xf, 2, 0xf, 0x06680, 32}, /* PATTERN_KILLER_DQ3_INV_64 */
149 {0x1f, 0xf, 2, 0xf, 0x06e80, 32}, /* PATTERN_KILLER_DQ4_INV_64 */
150 {0x1f, 0xf, 2, 0xf, 0x07680, 32}, /* PATTERN_KILLER_DQ5_INV_64 */
151 {0x1f, 0xf, 2, 0xf, 0x07e80, 32}, /* PATTERN_KILLER_DQ6_INV_64 */
152 {0x1f, 0xf, 2, 0xf, 0x08680, 32}, /* PATTERN_KILLER_DQ7_INV_64 */
153 {0x1f, 0xf, 2, 0xf, 0x08a80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0 */
154 {0x1f, 0xf, 2, 0xf, 0x09280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
155 {0x1f, 0xf, 2, 0xf, 0x09a80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2 */
156 {0x1f, 0xf, 2, 0xf, 0x0a280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3 */
157 {0x1f, 0xf, 2, 0xf, 0x0aa80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4 */
158 {0x1f, 0xf, 2, 0xf, 0x0b280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5 */
159 {0x1f, 0xf, 2, 0xf, 0x0ba80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6 */
160 {0x1f, 0xf, 2, 0xf, 0x0c280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7 */
161 {0x1f, 0xf, 2, 0xf, 0x08e80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0_64 */
162 {0x1f, 0xf, 2, 0xf, 0x09680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1_64 */
163 {0x1f, 0xf, 2, 0xf, 0x09e80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2_64 */
164 {0x1f, 0xf, 2, 0xf, 0x0a680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3_64 */
165 {0x1f, 0xf, 2, 0xf, 0x0ae80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4_64 */
166 {0x1f, 0xf, 2, 0xf, 0x0b680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5_64 */
167 {0x1f, 0xf, 2, 0xf, 0x0be80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6_64 */
168 {0x1f, 0xf, 2, 0xf, 0x0c680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7_64 */
169 {0x1f, 0xf, 2, 0xf, 0x0ca80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0 */
170 {0x1f, 0xf, 2, 0xf, 0x0d280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1 */
171 {0x1f, 0xf, 2, 0xf, 0x0da80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2 */
172 {0x1f, 0xf, 2, 0xf, 0x0e280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3 */
173 {0x1f, 0xf, 2, 0xf, 0x0ea80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4 */
174 {0x1f, 0xf, 2, 0xf, 0x0f280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5 */
175 {0x1f, 0xf, 2, 0xf, 0x0fa80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6 */
176 {0x1f, 0xf, 2, 0xf, 0x10280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7 */
177 {0x1f, 0xf, 2, 0xf, 0x0ce80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0_64 */
178 {0x1f, 0xf, 2, 0xf, 0x0d680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1_64 */
179 {0x1f, 0xf, 2, 0xf, 0x0de80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2_64 */
180 {0x1f, 0xf, 2, 0xf, 0x0e680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3_64 */
181 {0x1f, 0xf, 2, 0xf, 0x0ee80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4_64 */
182 {0x1f, 0xf, 2, 0xf, 0x0f680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5_64 */
183 {0x1f, 0xf, 2, 0xf, 0x0fe80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6_64 */
184 {0x1f, 0xf, 2, 0xf, 0x10680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7_64 */
185 {0x1f, 0xf, 2, 0xf, 0x10a80, 32}, /* PATTERN_ISI_XTALK_FREE */
186 {0x1f, 0xf, 2, 0xf, 0x10e80, 32}, /* PATTERN_ISI_XTALK_FREE_64 */
187 {0x1f, 0xf, 2, 0xf, 0x11280, 32}, /* PATTERN_VREF */
188 {0x1f, 0xf, 2, 0xf, 0x11680, 32}, /* PATTERN_VREF_64 */
189 {0x1f, 0xf, 2, 0xf, 0x11a80, 32}, /* PATTERN_VREF_INV */
190 {0x1f, 0xf, 2, 0xf, 0x11e80, 32}, /* PATTERN_FULL_SSO_0T */
191 {0x1f, 0xf, 2, 0xf, 0x12280, 32}, /* PATTERN_FULL_SSO_1T */
192 {0x1f, 0xf, 2, 0xf, 0x12680, 32}, /* PATTERN_FULL_SSO_2T */
193 {0x1f, 0xf, 2, 0xf, 0x12a80, 32}, /* PATTERN_FULL_SSO_3T */
194 {0x1f, 0xf, 2, 0xf, 0x12e80, 32}, /* PATTERN_RESONANCE_1T */
195 {0x1f, 0xf, 2, 0xf, 0x13280, 32}, /* PATTERN_RESONANCE_2T */
196 {0x1f, 0xf, 2, 0xf, 0x13680, 32}, /* PATTERN_RESONANCE_3T */
197 {0x1f, 0xf, 2, 0xf, 0x13a80, 32}, /* PATTERN_RESONANCE_4T */
198 {0x1f, 0xf, 2, 0xf, 0x13e80, 32}, /* PATTERN_RESONANCE_5T */
199 {0x1f, 0xf, 2, 0xf, 0x14280, 32}, /* PATTERN_RESONANCE_6T */
200 {0x1f, 0xf, 2, 0xf, 0x14680, 32}, /* PATTERN_RESONANCE_7T */
201 {0x1f, 0xf, 2, 0xf, 0x14a80, 32}, /* PATTERN_RESONANCE_8T */
202 {0x1f, 0xf, 2, 0xf, 0x14e80, 32}, /* PATTERN_RESONANCE_9T */
203 {0x1f, 0xf, 2, 0xf, 0x15280, 32}, /* PATTERN_ZERO */
204 {0x1f, 0xf, 2, 0xf, 0x15680, 32} /* PATTERN_ONE */
205 /* Note: actual start_address is "<< 3" of defined address */
206 };
207
208 struct pattern_info pattern_table_16[] = {
209 /*
210 * num tx phases, tx burst, delay between, rx pattern,
211 * start_address, pattern_len
212 */
213 {1, 1, 2, 1, 0x0080, 2}, /* PATTERN_PBS1 */
214 {1, 1, 2, 1, 0x00c0, 2}, /* PATTERN_PBS2 */
215 {1, 1, 2, 1, 0x0380, 2}, /* PATTERN_PBS3 */
216 {1, 1, 2, 1, 0x0040, 2}, /* PATTERN_TEST */
217 {1, 1, 2, 1, 0x0100, 2}, /* PATTERN_RL */
218 {1, 1, 2, 1, 0x0000, 2}, /* PATTERN_RL2 */
219 {0xf, 0x7, 2, 0x7, 0x0140, 16}, /* PATTERN_STATIC_PBS */
220 {0xf, 0x7, 2, 0x7, 0x0190, 16}, /* PATTERN_KILLER_DQ0 */
221 {0xf, 0x7, 2, 0x7, 0x01d0, 16}, /* PATTERN_KILLER_DQ1 */
222 {0xf, 0x7, 2, 0x7, 0x0210, 16}, /* PATTERN_KILLER_DQ2 */
223 {0xf, 0x7, 2, 0x7, 0x0250, 16}, /* PATTERN_KILLER_DQ3 */
224 {0xf, 0x7, 2, 0x7, 0x0290, 16}, /* PATTERN_KILLER_DQ4 */
225 {0xf, 0x7, 2, 0x7, 0x02d0, 16}, /* PATTERN_KILLER_DQ5 */
226 {0xf, 0x7, 2, 0x7, 0x0310, 16}, /* PATTERN_KILLER_DQ6 */
227 {0xf, 0x7, 2, 0x7, 0x0350, 16}, /* PATTERN_KILLER_DQ7 */
228 {0xf, 0x7, 2, 0x7, 0x04c0, 16}, /* PATTERN_VREF */
229 {0xf, 0x7, 2, 0x7, 0x03c0, 16}, /* PATTERN_FULL_SSO_1T */
230 {0xf, 0x7, 2, 0x7, 0x0400, 16}, /* PATTERN_FULL_SSO_2T */
231 {0xf, 0x7, 2, 0x7, 0x0440, 16}, /* PATTERN_FULL_SSO_3T */
232 {0xf, 0x7, 2, 0x7, 0x0480, 16}, /* PATTERN_FULL_SSO_4T */
233 {0xf, 7, 2, 7, 0x6280, 16}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
234 {0xf, 7, 2, 7, 0x6680, 16}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
235 {0xf, 7, 2, 7, 0x6A80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ2 */
236 {0xf, 7, 2, 7, 0x6E80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ3 */
237 {0xf, 7, 2, 7, 0x7280, 16}, /* PATTERN_SSO_FULL_XTALK_DQ4 */
238 {0xf, 7, 2, 7, 0x7680, 16}, /* PATTERN_SSO_FULL_XTALK_DQ5 */
239 {0xf, 7, 2, 7, 0x7A80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ6 */
240 {0xf, 7, 2, 7, 0x7E80, 16}, /* PATTERN_SSO_FULL_XTALK_DQ7 */
241 {0xf, 7, 2, 7, 0x8280, 16}, /* PATTERN_SSO_XTALK_FREE_DQ0 */
242 {0xf, 7, 2, 7, 0x8680, 16}, /* PATTERN_SSO_XTALK_FREE_DQ1 */
243 {0xf, 7, 2, 7, 0x8A80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ2 */
244 {0xf, 7, 2, 7, 0x8E80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ3 */
245 {0xf, 7, 2, 7, 0x9280, 16}, /* PATTERN_SSO_XTALK_FREE_DQ4 */
246 {0xf, 7, 2, 7, 0x9680, 16}, /* PATTERN_SSO_XTALK_FREE_DQ5 */
247 {0xf, 7, 2, 7, 0x9A80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ6 */
248 {0xf, 7, 2, 7, 0x9E80, 16}, /* PATTERN_SSO_XTALK_FREE_DQ7 */
249 {0xf, 7, 2, 7, 0xA280, 16} /* PATTERN_ISI_XTALK_FREE */
250 /* Note: actual start_address is "<< 3" of defined address */
251 };
252
253 struct pattern_info pattern_table_32[] = {
254 /*
255 * num tx phases, tx burst, delay between, rx pattern,
256 * start_address, pattern_len
257 */
258 {3, 3, 2, 3, 0x0080, 4}, /* PATTERN_PBS1 */
259 {3, 3, 2, 3, 0x00c0, 4}, /* PATTERN_PBS2 */
260 {3, 3, 2, 3, 0x0380, 4}, /* PATTERN_PBS3 */
261 {3, 3, 2, 3, 0x0040, 4}, /* PATTERN_TEST */
262 {3, 3, 2, 3, 0x0100, 4}, /* PATTERN_RL */
263 {3, 3, 2, 3, 0x0000, 4}, /* PATTERN_RL2 */
264 {0x1f, 0xf, 2, 0xf, 0x0140, 32}, /* PATTERN_STATIC_PBS */
265 {0x1f, 0xf, 2, 0xf, 0x0190, 32}, /* PATTERN_KILLER_DQ0 */
266 {0x1f, 0xf, 2, 0xf, 0x01d0, 32}, /* PATTERN_KILLER_DQ1 */
267 {0x1f, 0xf, 2, 0xf, 0x0210, 32}, /* PATTERN_KILLER_DQ2 */
268 {0x1f, 0xf, 2, 0xf, 0x0250, 32}, /* PATTERN_KILLER_DQ3 */
269 {0x1f, 0xf, 2, 0xf, 0x0290, 32}, /* PATTERN_KILLER_DQ4 */
270 {0x1f, 0xf, 2, 0xf, 0x02d0, 32}, /* PATTERN_KILLER_DQ5 */
271 {0x1f, 0xf, 2, 0xf, 0x0310, 32}, /* PATTERN_KILLER_DQ6 */
272 {0x1f, 0xf, 2, 0xf, 0x0350, 32}, /* PATTERN_KILLER_DQ7 */
273 {0x1f, 0xf, 2, 0xf, 0x04c0, 32}, /* PATTERN_VREF */
274 {0x1f, 0xf, 2, 0xf, 0x03c0, 32}, /* PATTERN_FULL_SSO_1T */
275 {0x1f, 0xf, 2, 0xf, 0x0400, 32}, /* PATTERN_FULL_SSO_2T */
276 {0x1f, 0xf, 2, 0xf, 0x0440, 32}, /* PATTERN_FULL_SSO_3T */
277 {0x1f, 0xf, 2, 0xf, 0x0480, 32}, /* PATTERN_FULL_SSO_4T */
278 {0x1f, 0xF, 2, 0xf, 0x6280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ0 */
279 {0x1f, 0xF, 2, 0xf, 0x6680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ1 */
280 {0x1f, 0xF, 2, 0xf, 0x6A80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ2 */
281 {0x1f, 0xF, 2, 0xf, 0x6E80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ3 */
282 {0x1f, 0xF, 2, 0xf, 0x7280, 32}, /* PATTERN_SSO_FULL_XTALK_DQ4 */
283 {0x1f, 0xF, 2, 0xf, 0x7680, 32}, /* PATTERN_SSO_FULL_XTALK_DQ5 */
284 {0x1f, 0xF, 2, 0xf, 0x7A80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ6 */
285 {0x1f, 0xF, 2, 0xf, 0x7E80, 32}, /* PATTERN_SSO_FULL_XTALK_DQ7 */
286 {0x1f, 0xF, 2, 0xf, 0x8280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ0 */
287 {0x1f, 0xF, 2, 0xf, 0x8680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ1 */
288 {0x1f, 0xF, 2, 0xf, 0x8A80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ2 */
289 {0x1f, 0xF, 2, 0xf, 0x8E80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ3 */
290 {0x1f, 0xF, 2, 0xf, 0x9280, 32}, /* PATTERN_SSO_XTALK_FREE_DQ4 */
291 {0x1f, 0xF, 2, 0xf, 0x9680, 32}, /* PATTERN_SSO_XTALK_FREE_DQ5 */
292 {0x1f, 0xF, 2, 0xf, 0x9A80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ6 */
293 {0x1f, 0xF, 2, 0xf, 0x9E80, 32}, /* PATTERN_SSO_XTALK_FREE_DQ7 */
294 {0x1f, 0xF, 2, 0xf, 0xA280, 32} /* PATTERN_ISI_XTALK_FREE */
295 /* Note: actual start_address is "<< 3" of defined address */
296 };
297
298 u32 train_dev_num;
299 enum hws_ddr_cs traintrain_cs_type;
300 u32 train_pup_num;
301 enum hws_training_result train_result_type;
302 enum hws_control_element train_control_element;
303 enum hws_search_dir traine_search_dir;
304 enum hws_dir train_direction;
305 u32 train_if_select;
306 u32 train_init_value;
307 u32 train_number_iterations;
308 enum hws_pattern train_pattern;
309 enum hws_edge_compare train_edge_compare;
310 u32 train_cs_num;
311 u32 train_if_acess, train_if_id, train_pup_access;
312 u32 max_polling_for_done = 1000000;
313
ddr3_tip_get_buf_ptr(u32 dev_num,enum hws_search_dir search,enum hws_training_result result_type,u32 interface_num)314 u32 *ddr3_tip_get_buf_ptr(u32 dev_num, enum hws_search_dir search,
315 enum hws_training_result result_type,
316 u32 interface_num)
317 {
318 u32 *buf_ptr = NULL;
319
320 buf_ptr = &training_res
321 [MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS * search +
322 interface_num * MAX_BUS_NUM * BUS_WIDTH_IN_BITS];
323
324 return buf_ptr;
325 }
326
327 enum {
328 PASS,
329 FAIL
330 };
331 /*
332 * IP Training search
333 * Note: for one edge search only from fail to pass, else jitter can
334 * be be entered into solution.
335 */
ddr3_tip_ip_training(u32 dev_num,enum hws_access_type access_type,u32 interface_num,enum hws_access_type pup_access_type,u32 pup_num,enum hws_training_result result_type,enum hws_control_element control_element,enum hws_search_dir search_dir,enum hws_dir direction,u32 interface_mask,u32 init_value,u32 num_iter,enum hws_pattern pattern,enum hws_edge_compare edge_comp,enum hws_ddr_cs cs_type,u32 cs_num,enum hws_training_ip_stat * train_status)336 int ddr3_tip_ip_training(u32 dev_num, enum hws_access_type access_type,
337 u32 interface_num,
338 enum hws_access_type pup_access_type,
339 u32 pup_num, enum hws_training_result result_type,
340 enum hws_control_element control_element,
341 enum hws_search_dir search_dir, enum hws_dir direction,
342 u32 interface_mask, u32 init_value, u32 num_iter,
343 enum hws_pattern pattern,
344 enum hws_edge_compare edge_comp,
345 enum hws_ddr_cs cs_type, u32 cs_num,
346 enum hws_training_ip_stat *train_status)
347 {
348 u32 mask_dq_num_of_regs, mask_pup_num_of_regs, index_cnt,
349 reg_data, pup_id;
350 u32 tx_burst_size;
351 u32 delay_between_burst;
352 u32 rd_mode;
353 u32 data;
354 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
355 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
356 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
357 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
358 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
359
360 if (pup_num >= octets_per_if_num) {
361 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
362 ("pup_num %d not valid\n", pup_num));
363 }
364 if (interface_num >= MAX_INTERFACE_NUM) {
365 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
366 ("if_id %d not valid\n",
367 interface_num));
368 }
369 if (train_status == NULL) {
370 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
371 ("error param 4\n"));
372 return MV_BAD_PARAM;
373 }
374
375 /* load pattern */
376 if (cs_type == CS_SINGLE) {
377 /* All CSs to CS0 */
378 CHECK_STATUS(ddr3_tip_if_write
379 (dev_num, access_type, interface_num,
380 DUAL_DUNIT_CFG_REG, 1 << 3, 1 << 3));
381 /* All CSs to CS0 */
382 CHECK_STATUS(ddr3_tip_if_write
383 (dev_num, access_type, interface_num,
384 ODPG_DATA_CTRL_REG,
385 (0x3 | (effective_cs << 26)), 0xc000003));
386 } else {
387 CHECK_STATUS(ddr3_tip_if_write
388 (dev_num, access_type, interface_num,
389 DUAL_DUNIT_CFG_REG, 0, 1 << 3));
390 /* CS select */
391 CHECK_STATUS(ddr3_tip_if_write
392 (dev_num, access_type, interface_num,
393 ODPG_DATA_CTRL_REG, 0x3 | cs_num << 26,
394 0x3 | 3 << 26));
395 }
396
397 /* load pattern to ODPG */
398 ddr3_tip_load_pattern_to_odpg(dev_num, access_type, interface_num,
399 pattern,
400 pattern_table[pattern].start_addr);
401 tx_burst_size = (direction == OPER_WRITE) ?
402 pattern_table[pattern].tx_burst_size : 0;
403 delay_between_burst = (direction == OPER_WRITE) ? 2 : 0;
404 rd_mode = (direction == OPER_WRITE) ? 1 : 0;
405 CHECK_STATUS(ddr3_tip_configure_odpg
406 (dev_num, access_type, interface_num, direction,
407 pattern_table[pattern].num_of_phases_tx, tx_burst_size,
408 pattern_table[pattern].num_of_phases_rx,
409 delay_between_burst, rd_mode, effective_cs, STRESS_NONE,
410 DURATION_SINGLE));
411 reg_data = (direction == OPER_READ) ? 0 : (0x3 << 30);
412 reg_data |= (direction == OPER_READ) ? 0x60 : 0xfa;
413 CHECK_STATUS(ddr3_tip_if_write
414 (dev_num, access_type, interface_num,
415 ODPG_WR_RD_MODE_ENA_REG, reg_data,
416 MASK_ALL_BITS));
417 reg_data = (edge_comp == EDGE_PF || edge_comp == EDGE_FP) ? 0 : 1 << 6;
418 reg_data |= (edge_comp == EDGE_PF || edge_comp == EDGE_PFP) ?
419 (1 << 7) : 0;
420
421 /* change from Pass to Fail will lock the result */
422 if (pup_access_type == ACCESS_TYPE_MULTICAST)
423 reg_data |= 0xe << 14;
424 else
425 reg_data |= pup_num << 14;
426
427 if (edge_comp == EDGE_FP) {
428 /* don't search for readl edge change, only the state */
429 reg_data |= (0 << 20);
430 } else if (edge_comp == EDGE_FPF) {
431 reg_data |= (0 << 20);
432 } else {
433 reg_data |= (3 << 20);
434 }
435
436 CHECK_STATUS(ddr3_tip_if_write
437 (dev_num, access_type, interface_num,
438 GENERAL_TRAINING_OPCODE_REG,
439 reg_data | (0x7 << 8) | (0x7 << 11),
440 (0x3 | (0x3 << 2) | (0x3 << 6) | (1 << 5) | (0x7 << 8) |
441 (0x7 << 11) | (0xf << 14) | (0x3 << 18) | (3 << 20))));
442 reg_data = (search_dir == HWS_LOW2HIGH) ? 0 : (1 << 8);
443 CHECK_STATUS(ddr3_tip_if_write
444 (dev_num, access_type, interface_num, OPCODE_REG0_REG(1),
445 1 | reg_data | init_value << 9 | (1 << 25) | (1 << 26),
446 0xff | (1 << 8) | (0xffff << 9) | (1 << 25) | (1 << 26)));
447
448 /*
449 * Write2_dunit(0x10b4, Number_iteration , [15:0])
450 * Max number of iterations
451 */
452 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
453 OPCODE_REG1_REG(1), num_iter,
454 0xffff));
455 if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
456 direction == OPER_READ) {
457 /*
458 * Write2_dunit(0x10c0, 0x5f , [7:0])
459 * MC PBS Reg Address at DDR PHY
460 */
461 reg_data = PBS_RX_BCAST_PHY_REG(effective_cs);
462 } else if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
463 direction == OPER_WRITE) {
464 reg_data = PBS_TX_BCAST_PHY_REG(effective_cs);
465 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
466 direction == OPER_WRITE) {
467 /*
468 * LOOP 0x00000001 + 4*n:
469 * where n (0-3) represents M_CS number
470 */
471 /*
472 * Write2_dunit(0x10c0, 0x1 , [7:0])
473 * ADLL WR Reg Address at DDR PHY
474 */
475 reg_data = CTX_PHY_REG(effective_cs);
476 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
477 direction == OPER_READ) {
478 /* ADLL RD Reg Address at DDR PHY */
479 reg_data = CRX_PHY_REG(effective_cs);
480 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
481 direction == OPER_WRITE) {
482 /* TBD not defined in 0.5.0 requirement */
483 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
484 direction == OPER_READ) {
485 /* TBD not defined in 0.5.0 requirement */
486 }
487
488 reg_data |= (0x6 << 28);
489 CHECK_STATUS(ddr3_tip_if_write
490 (dev_num, access_type, interface_num, CAL_PHY_REG(1),
491 reg_data | (init_value << 8),
492 0xff | (0xffff << 8) | (0xf << 24) | (u32) (0xf << 28)));
493
494 mask_dq_num_of_regs = octets_per_if_num * BUS_WIDTH_IN_BITS;
495 mask_pup_num_of_regs = octets_per_if_num;
496
497 if (result_type == RESULT_PER_BIT) {
498 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
499 index_cnt++) {
500 CHECK_STATUS(ddr3_tip_if_write
501 (dev_num, access_type, interface_num,
502 mask_results_dq_reg_map[index_cnt], 0,
503 1 << 24));
504 }
505
506 /* Mask disabled buses */
507 for (pup_id = 0; pup_id < octets_per_if_num;
508 pup_id++) {
509 if (IS_BUS_ACTIVE(tm->bus_act_mask, pup_id) == 1)
510 continue;
511
512 for (index_cnt = (pup_id * 8); index_cnt < (pup_id + 1) * 8; index_cnt++) {
513 CHECK_STATUS(ddr3_tip_if_write
514 (dev_num, access_type,
515 interface_num,
516 mask_results_dq_reg_map
517 [index_cnt], (1 << 24), 1 << 24));
518 }
519 }
520
521 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
522 index_cnt++) {
523 CHECK_STATUS(ddr3_tip_if_write
524 (dev_num, access_type, interface_num,
525 mask_results_pup_reg_map[index_cnt],
526 (1 << 24), 1 << 24));
527 }
528 } else if (result_type == RESULT_PER_BYTE) {
529 /* write to adll */
530 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
531 index_cnt++) {
532 CHECK_STATUS(ddr3_tip_if_write
533 (dev_num, access_type, interface_num,
534 mask_results_pup_reg_map[index_cnt], 0,
535 1 << 24));
536 }
537 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
538 index_cnt++) {
539 CHECK_STATUS(ddr3_tip_if_write
540 (dev_num, access_type, interface_num,
541 mask_results_dq_reg_map[index_cnt],
542 (1 << 24), (1 << 24)));
543 }
544 }
545
546 /* trigger training */
547 mv_ddr_training_enable();
548
549 /* wa for 16-bit mode: wait for all rfu tests to finish or timeout */
550 mdelay(1);
551
552 /* check for training done */
553 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, &data) != MV_OK) {
554 train_status[0] = HWS_TRAINING_IP_STATUS_TIMEOUT;
555 } else { /* training done; check for pass */
556 if (data == PASS)
557 train_status[0] = HWS_TRAINING_IP_STATUS_SUCCESS;
558 else
559 train_status[0] = HWS_TRAINING_IP_STATUS_FAIL;
560 }
561
562 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
563 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS);
564
565 return MV_OK;
566 }
567
568 /*
569 * Load expected Pattern to ODPG
570 */
ddr3_tip_load_pattern_to_odpg(u32 dev_num,enum hws_access_type access_type,u32 if_id,enum hws_pattern pattern,u32 load_addr)571 int ddr3_tip_load_pattern_to_odpg(u32 dev_num, enum hws_access_type access_type,
572 u32 if_id, enum hws_pattern pattern,
573 u32 load_addr)
574 {
575 u32 pattern_length_cnt = 0;
576 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
577 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
578
579 for (pattern_length_cnt = 0;
580 pattern_length_cnt < pattern_table[pattern].pattern_len;
581 pattern_length_cnt++) { /* FIXME: the ecc patch below is only for a7040 A0 */
582 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)/* || tm->bus_act_mask == MV_DDR_32BIT_ECC_PUP8_BUS_MASK*/) {
583 CHECK_STATUS(ddr3_tip_if_write
584 (dev_num, access_type, if_id,
585 ODPG_DATA_WR_DATA_LOW_REG,
586 pattern_table_get_word(dev_num, pattern,
587 (u8) (pattern_length_cnt)),
588 MASK_ALL_BITS));
589 CHECK_STATUS(ddr3_tip_if_write
590 (dev_num, access_type, if_id,
591 ODPG_DATA_WR_DATA_HIGH_REG,
592 pattern_table_get_word(dev_num, pattern,
593 (u8) (pattern_length_cnt)),
594 MASK_ALL_BITS));
595 } else {
596 CHECK_STATUS(ddr3_tip_if_write
597 (dev_num, access_type, if_id,
598 ODPG_DATA_WR_DATA_LOW_REG,
599 pattern_table_get_word(dev_num, pattern,
600 (u8) (pattern_length_cnt * 2)),
601 MASK_ALL_BITS));
602 CHECK_STATUS(ddr3_tip_if_write
603 (dev_num, access_type, if_id,
604 ODPG_DATA_WR_DATA_HIGH_REG,
605 pattern_table_get_word(dev_num, pattern,
606 (u8) (pattern_length_cnt * 2 + 1)),
607 MASK_ALL_BITS));
608 }
609 CHECK_STATUS(ddr3_tip_if_write
610 (dev_num, access_type, if_id,
611 ODPG_DATA_WR_ADDR_REG, pattern_length_cnt,
612 MASK_ALL_BITS));
613 }
614
615 CHECK_STATUS(ddr3_tip_if_write
616 (dev_num, access_type, if_id,
617 ODPG_DATA_BUFFER_OFFS_REG, load_addr, MASK_ALL_BITS));
618
619 return MV_OK;
620 }
621
622 /*
623 * Configure ODPG
624 */
ddr3_tip_configure_odpg(u32 dev_num,enum hws_access_type access_type,u32 if_id,enum hws_dir direction,u32 tx_phases,u32 tx_burst_size,u32 rx_phases,u32 delay_between_burst,u32 rd_mode,u32 cs_num,u32 addr_stress_jump,u32 single_pattern)625 int ddr3_tip_configure_odpg(u32 dev_num, enum hws_access_type access_type,
626 u32 if_id, enum hws_dir direction, u32 tx_phases,
627 u32 tx_burst_size, u32 rx_phases,
628 u32 delay_between_burst, u32 rd_mode, u32 cs_num,
629 u32 addr_stress_jump, u32 single_pattern)
630 {
631 u32 data_value = 0;
632 int ret;
633
634 data_value = ((single_pattern << 2) | (tx_phases << 5) |
635 (tx_burst_size << 11) | (delay_between_burst << 15) |
636 (rx_phases << 21) | (rd_mode << 25) | (cs_num << 26) |
637 (addr_stress_jump << 29));
638 ret = ddr3_tip_if_write(dev_num, access_type, if_id,
639 ODPG_DATA_CTRL_REG, data_value, 0xaffffffc);
640 if (ret != MV_OK)
641 return ret;
642
643 return MV_OK;
644 }
645
ddr3_tip_process_result(u32 * ar_result,enum hws_edge e_edge,enum hws_edge_search e_edge_search,u32 * edge_result)646 int ddr3_tip_process_result(u32 *ar_result, enum hws_edge e_edge,
647 enum hws_edge_search e_edge_search,
648 u32 *edge_result)
649 {
650 u32 i, res;
651 int tap_val, max_val = -10000, min_val = 10000;
652 int lock_success = 1;
653
654 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
655 res = GET_LOCK_RESULT(ar_result[i]);
656 if (res == 0) {
657 lock_success = 0;
658 break;
659 }
660 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
661 ("lock failed for bit %d\n", i));
662 }
663
664 if (lock_success == 1) {
665 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
666 tap_val = GET_TAP_RESULT(ar_result[i], e_edge);
667 if (tap_val > max_val)
668 max_val = tap_val;
669 if (tap_val < min_val)
670 min_val = tap_val;
671 if (e_edge_search == TRAINING_EDGE_MAX)
672 *edge_result = (u32) max_val;
673 else
674 *edge_result = (u32) min_val;
675
676 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
677 ("i %d ar_result[i] 0x%x tap_val %d max_val %d min_val %d Edge_result %d\n",
678 i, ar_result[i], tap_val,
679 max_val, min_val,
680 *edge_result));
681 }
682 } else {
683 return MV_FAIL;
684 }
685
686 return MV_OK;
687 }
688
689 /*
690 * Read training search result
691 */
ddr3_tip_read_training_result(u32 dev_num,u32 if_id,enum hws_access_type pup_access_type,u32 pup_num,u32 bit_num,enum hws_search_dir search,enum hws_dir direction,enum hws_training_result result_type,enum hws_training_load_op operation,u32 cs_num_type,u32 ** load_res,int is_read_from_db,u8 cons_tap,int is_check_result_validity)692 int ddr3_tip_read_training_result(u32 dev_num, u32 if_id,
693 enum hws_access_type pup_access_type,
694 u32 pup_num, u32 bit_num,
695 enum hws_search_dir search,
696 enum hws_dir direction,
697 enum hws_training_result result_type,
698 enum hws_training_load_op operation,
699 u32 cs_num_type, u32 **load_res,
700 int is_read_from_db, u8 cons_tap,
701 int is_check_result_validity)
702 {
703 u32 reg_offset, pup_cnt, start_pup, end_pup, start_reg, end_reg;
704 u32 *interface_train_res = NULL;
705 u16 *reg_addr = NULL;
706 u32 read_data[MAX_INTERFACE_NUM];
707 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
708 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
709 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
710 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
711
712 /*
713 * Agreed assumption: all CS mask contain same number of bits,
714 * i.e. in multi CS, the number of CS per memory is the same for
715 * all pups
716 */
717 CHECK_STATUS(ddr3_tip_if_write
718 (dev_num, ACCESS_TYPE_UNICAST, if_id, DUAL_DUNIT_CFG_REG,
719 (cs_num_type == 0) ? 1 << 3 : 0, (1 << 3)));
720 CHECK_STATUS(ddr3_tip_if_write
721 (dev_num, ACCESS_TYPE_UNICAST, if_id,
722 ODPG_DATA_CTRL_REG, (cs_num_type << 26), (3 << 26)));
723 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
724 ("Read_from_d_b %d cs_type %d oper %d result_type %d direction %d search %d pup_num %d if_id %d pup_access_type %d\n",
725 is_read_from_db, cs_num_type, operation,
726 result_type, direction, search, pup_num,
727 if_id, pup_access_type));
728
729 if ((load_res == NULL) && (is_read_from_db == 1)) {
730 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
731 ("ddr3_tip_read_training_result load_res = NULL"));
732 return MV_FAIL;
733 }
734 if (pup_num >= octets_per_if_num) {
735 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
736 ("pup_num %d not valid\n", pup_num));
737 }
738 if (if_id >= MAX_INTERFACE_NUM) {
739 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
740 ("if_id %d not valid\n", if_id));
741 }
742 if (result_type == RESULT_PER_BIT)
743 reg_addr = mask_results_dq_reg_map;
744 else
745 reg_addr = mask_results_pup_reg_map;
746 if (pup_access_type == ACCESS_TYPE_UNICAST) {
747 start_pup = pup_num;
748 end_pup = pup_num;
749 } else { /*pup_access_type == ACCESS_TYPE_MULTICAST) */
750
751 start_pup = 0;
752 end_pup = octets_per_if_num - 1;
753 }
754
755 for (pup_cnt = start_pup; pup_cnt <= end_pup; pup_cnt++) {
756 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup_cnt);
757 DEBUG_TRAINING_IP_ENGINE(
758 DEBUG_LEVEL_TRACE,
759 ("if_id %d start_pup %d end_pup %d pup_cnt %d\n",
760 if_id, start_pup, end_pup, pup_cnt));
761 if (result_type == RESULT_PER_BIT) {
762 if (bit_num == ALL_BITS_PER_PUP) {
763 start_reg = pup_cnt * BUS_WIDTH_IN_BITS;
764 end_reg = (pup_cnt + 1) * BUS_WIDTH_IN_BITS - 1;
765 } else {
766 start_reg =
767 pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
768 end_reg = pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
769 }
770 } else {
771 start_reg = pup_cnt;
772 end_reg = pup_cnt;
773 }
774
775 interface_train_res =
776 ddr3_tip_get_buf_ptr(dev_num, search, result_type,
777 if_id);
778 DEBUG_TRAINING_IP_ENGINE(
779 DEBUG_LEVEL_TRACE,
780 ("start_reg %d end_reg %d interface %p\n",
781 start_reg, end_reg, interface_train_res));
782 if (interface_train_res == NULL) {
783 DEBUG_TRAINING_IP_ENGINE(
784 DEBUG_LEVEL_ERROR,
785 ("interface_train_res is NULL\n"));
786 return MV_FAIL;
787 }
788
789 for (reg_offset = start_reg; reg_offset <= end_reg;
790 reg_offset++) {
791 if (operation == TRAINING_LOAD_OPERATION_UNLOAD) {
792 if (is_read_from_db == 0) {
793 CHECK_STATUS(ddr3_tip_if_read
794 (dev_num,
795 ACCESS_TYPE_UNICAST,
796 if_id,
797 reg_addr[reg_offset],
798 read_data,
799 MASK_ALL_BITS));
800 if (is_check_result_validity == 1) {
801 if ((read_data[if_id] &
802 TIP_ENG_LOCK) == 0) {
803 interface_train_res
804 [reg_offset] =
805 TIP_ENG_LOCK +
806 TIP_TX_DLL_RANGE_MAX;
807 } else {
808 interface_train_res
809 [reg_offset] =
810 read_data
811 [if_id] +
812 cons_tap;
813 }
814 } else {
815 interface_train_res[reg_offset]
816 = read_data[if_id] +
817 cons_tap;
818 }
819 DEBUG_TRAINING_IP_ENGINE
820 (DEBUG_LEVEL_TRACE,
821 ("reg_offset %d value 0x%x addr %p\n",
822 reg_offset,
823 interface_train_res
824 [reg_offset],
825 &interface_train_res
826 [reg_offset]));
827 } else {
828 *load_res =
829 &interface_train_res[start_reg];
830 DEBUG_TRAINING_IP_ENGINE
831 (DEBUG_LEVEL_TRACE,
832 ("*load_res %p\n", *load_res));
833 }
834 } else {
835 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
836 ("not supported\n"));
837 }
838 }
839 }
840
841 return MV_OK;
842 }
843
844 /*
845 * Load all pattern to memory using ODPG
846 */
ddr3_tip_load_all_pattern_to_mem(u32 dev_num)847 int ddr3_tip_load_all_pattern_to_mem(u32 dev_num)
848 {
849 u32 pattern = 0, if_id;
850 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
851
852 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
853 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
854 training_result[training_stage][if_id] = TEST_SUCCESS;
855 }
856
857 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
858 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
859 /* enable single cs */
860 CHECK_STATUS(ddr3_tip_if_write
861 (dev_num, ACCESS_TYPE_UNICAST, if_id,
862 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3)));
863 }
864
865 for (pattern = 0; pattern < PATTERN_LAST; pattern++) {
866 if (pattern == PATTERN_TEST)
867 continue;
868 ddr3_tip_load_pattern_to_mem(dev_num, pattern);
869 }
870
871 return MV_OK;
872 }
873
874 /*
875 * Load specific pattern to memory using ODPG
876 */
ddr3_tip_load_pattern_to_mem(u32 dev_num,enum hws_pattern pattern)877 int ddr3_tip_load_pattern_to_mem(u32 dev_num, enum hws_pattern pattern)
878 {
879 u32 reg_data, if_id;
880 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
881 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
882
883 /* load pattern to memory */
884 /*
885 * Write Tx mode, CS0, phases, Tx burst size, delay between burst,
886 * rx pattern phases
887 */
888 reg_data =
889 0x1 | (pattern_table[pattern].num_of_phases_tx << 5) |
890 (pattern_table[pattern].tx_burst_size << 11) |
891 (pattern_table[pattern].delay_between_bursts << 15) |
892 (pattern_table[pattern].num_of_phases_rx << 21) | (0x1 << 25) |
893 (effective_cs << 26);
894 CHECK_STATUS(ddr3_tip_if_write
895 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
896 ODPG_DATA_CTRL_REG, reg_data, MASK_ALL_BITS));
897 /* ODPG Write enable from BIST */
898 CHECK_STATUS(ddr3_tip_if_write
899 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
900 ODPG_DATA_CTRL_REG, (0x1 | (effective_cs << 26)),
901 0xc000003));
902 /* disable error injection */
903 CHECK_STATUS(ddr3_tip_if_write
904 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
905 ODPG_DATA_WR_DATA_ERR_REG, 0, 0x1));
906 /* load pattern to ODPG */
907 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
908 PARAM_NOT_CARE, pattern,
909 pattern_table[pattern].start_addr);
910
911 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) {
912 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
913 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
914
915 CHECK_STATUS(ddr3_tip_if_write
916 (dev_num, ACCESS_TYPE_UNICAST, if_id,
917 SDRAM_ODT_CTRL_HIGH_REG,
918 0x3, 0xf));
919 }
920
921 mv_ddr_odpg_enable();
922 } else {
923 CHECK_STATUS(ddr3_tip_if_write
924 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
925 ODPG_DATA_CTRL_REG, (u32)(0x1 << 31),
926 (u32)(0x1 << 31)));
927 }
928 mdelay(1);
929
930 if (mv_ddr_is_odpg_done(MAX_POLLING_ITERATIONS) != MV_OK)
931 return MV_FAIL;
932
933 /* Disable ODPG and stop write to memory */
934 CHECK_STATUS(ddr3_tip_if_write
935 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
936 ODPG_DATA_CTRL_REG, (0x1 << 30), (u32) (0x3 << 30)));
937
938 /* return to default */
939 CHECK_STATUS(ddr3_tip_if_write
940 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
941 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS));
942
943 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) {
944 /* Disable odt0 for CS0 training - need to adjust for multy CS */
945 CHECK_STATUS(ddr3_tip_if_write
946 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
947 SDRAM_ODT_CTRL_HIGH_REG, 0x0, 0xf));
948 }
949 /* temporary added */
950 mdelay(1);
951
952 return MV_OK;
953 }
954
955 /*
956 * Training search routine
957 */
ddr3_tip_ip_training_wrapper_int(u32 dev_num,enum hws_access_type access_type,u32 if_id,enum hws_access_type pup_access_type,u32 pup_num,u32 bit_num,enum hws_training_result result_type,enum hws_control_element control_element,enum hws_search_dir search_dir,enum hws_dir direction,u32 interface_mask,u32 init_value_l2h,u32 init_value_h2l,u32 num_iter,enum hws_pattern pattern,enum hws_edge_compare edge_comp,enum hws_ddr_cs train_cs_type,u32 cs_num,enum hws_training_ip_stat * train_status)958 int ddr3_tip_ip_training_wrapper_int(u32 dev_num,
959 enum hws_access_type access_type,
960 u32 if_id,
961 enum hws_access_type pup_access_type,
962 u32 pup_num, u32 bit_num,
963 enum hws_training_result result_type,
964 enum hws_control_element control_element,
965 enum hws_search_dir search_dir,
966 enum hws_dir direction,
967 u32 interface_mask, u32 init_value_l2h,
968 u32 init_value_h2l, u32 num_iter,
969 enum hws_pattern pattern,
970 enum hws_edge_compare edge_comp,
971 enum hws_ddr_cs train_cs_type, u32 cs_num,
972 enum hws_training_ip_stat *train_status)
973 {
974 u32 interface_num = 0, start_if, end_if, init_value_used;
975 enum hws_search_dir search_dir_id, start_search, end_search;
976 enum hws_edge_compare edge_comp_used;
977 u8 cons_tap = 0;
978 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
979 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
980
981 if (train_status == NULL) {
982 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
983 ("train_status is NULL\n"));
984 return MV_FAIL;
985 }
986
987 if ((train_cs_type > CS_NON_SINGLE) ||
988 (edge_comp >= EDGE_PFP) ||
989 (pattern >= PATTERN_LAST) ||
990 (direction > OPER_WRITE_AND_READ) ||
991 (search_dir > HWS_HIGH2LOW) ||
992 (control_element > HWS_CONTROL_ELEMENT_DQS_SKEW) ||
993 (result_type > RESULT_PER_BYTE) ||
994 (pup_num >= octets_per_if_num) ||
995 (pup_access_type > ACCESS_TYPE_MULTICAST) ||
996 (if_id > 11) || (access_type > ACCESS_TYPE_MULTICAST)) {
997 DEBUG_TRAINING_IP_ENGINE(
998 DEBUG_LEVEL_ERROR,
999 ("wrong parameter train_cs_type %d edge_comp %d pattern %d direction %d search_dir %d control_element %d result_type %d pup_num %d pup_access_type %d if_id %d access_type %d\n",
1000 train_cs_type, edge_comp, pattern, direction,
1001 search_dir, control_element, result_type, pup_num,
1002 pup_access_type, if_id, access_type));
1003 return MV_FAIL;
1004 }
1005
1006 if (edge_comp == EDGE_FPF) {
1007 start_search = HWS_LOW2HIGH;
1008 end_search = HWS_HIGH2LOW;
1009 edge_comp_used = EDGE_FP;
1010 } else {
1011 start_search = search_dir;
1012 end_search = search_dir;
1013 edge_comp_used = edge_comp;
1014 }
1015
1016 for (search_dir_id = start_search; search_dir_id <= end_search;
1017 search_dir_id++) {
1018 init_value_used = (search_dir_id == HWS_LOW2HIGH) ?
1019 init_value_l2h : init_value_h2l;
1020 DEBUG_TRAINING_IP_ENGINE(
1021 DEBUG_LEVEL_TRACE,
1022 ("dev_num %d, access_type %d, if_id %d, pup_access_type %d,pup_num %d, result_type %d, control_element %d search_dir_id %d, direction %d, interface_mask %d,init_value_used %d, num_iter %d, pattern %d, edge_comp_used %d, train_cs_type %d, cs_num %d\n",
1023 dev_num, access_type, if_id, pup_access_type, pup_num,
1024 result_type, control_element, search_dir_id,
1025 direction, interface_mask, init_value_used, num_iter,
1026 pattern, edge_comp_used, train_cs_type, cs_num));
1027
1028 ddr3_tip_ip_training(dev_num, access_type, if_id,
1029 pup_access_type, pup_num, result_type,
1030 control_element, search_dir_id, direction,
1031 interface_mask, init_value_used, num_iter,
1032 pattern, edge_comp_used, train_cs_type,
1033 cs_num, train_status);
1034 if (access_type == ACCESS_TYPE_MULTICAST) {
1035 start_if = 0;
1036 end_if = MAX_INTERFACE_NUM - 1;
1037 } else {
1038 start_if = if_id;
1039 end_if = if_id;
1040 }
1041
1042 for (interface_num = start_if; interface_num <= end_if;
1043 interface_num++) {
1044 VALIDATE_IF_ACTIVE(tm->if_act_mask, interface_num);
1045 cs_num = 0;
1046 CHECK_STATUS(ddr3_tip_read_training_result
1047 (dev_num, interface_num, pup_access_type,
1048 pup_num, bit_num, search_dir_id,
1049 direction, result_type,
1050 TRAINING_LOAD_OPERATION_UNLOAD,
1051 train_cs_type, NULL, 0, cons_tap,
1052 0));
1053 }
1054 }
1055
1056 return MV_OK;
1057 }
1058 /*
1059 * Training search & read result routine
1060 * This function implements the search algorithm
1061 * first it calls the function ddr3_tip_ip_training_wrapper_int which triggers the search from l2h and h2l
1062 * this function handles rx and tx search cases
1063 * in case of rx it only triggers the search (l2h and h2l)
1064 * in case of tx there are 3 optional algorithm phases:
1065 * phase 1:
1066 * it first triggers the search and handles the results as following (phase 1):
1067 * each bit, which defined by the search two edges (e1 or VW_L and e2 or VW_H), match on of cases:
1068 * 1. BIT_LOW_UI 0 =< VW =< 31 in case of jitter use: VW_L <= 31, VW_H <= 31
1069 * 2. BIT_HIGH_UI 32 =< VW =< 63 in case of jitter use: VW_L >= 32, VW_H >= 32
1070 * 3. BIT_SPLIT_IN VW_L <= 31 & VW_H >= 32
1071 * 4. BIT_SPLIT_OUT* VW_H < 32 & VW_L > 32
1072 * note: the VW units is adll taps
1073 * phase 2:
1074 * only bit case BIT_SPLIT_OUT requires another search (phase 2) from the middle range in two directions h2l and l2h
1075 * because only this case is not locked by the search engine in the first search trigger (phase 1).
1076 * phase 3:
1077 * each subphy is categorized according to its bits definition.
1078 * the sub-phy cases are as follows:
1079 * 1.BYTE_NOT_DEFINED the byte has not yet been categorized
1080 * 2.BYTE_HOMOGENEOUS_LOW 0 =< VW =< 31
1081 * 3.BYTE_HOMOGENEOUS_HIGH 32 =< VW =< 63
1082 * 4.BYTE_HOMOGENEOUS_SPLIT_IN VW_L <= 31 & VW_H >= 32
1083 * or the center of all bits in the byte =< 31
1084 * 5.BYTE_HOMOGENEOUS_SPLIT_OUT VW_H < 32 & VW_L > 32
1085 * 6.BYTE_SPLIT_OUT_MIX at least one bits is in split out state and one bit is in other
1086 * or the center of all bits in the byte => 32
1087 * after the two phases above a center valid window for each subphy is calculated accordingly:
1088 * center valid window = maximum center of all bits in the subphy - minimum center of all bits in the subphy.
1089 * now decisions are made in each subphy as following:
1090 * all subphys which are homogeneous remains as is
1091 * all subphys which are homogeneous low | homogeneous high and the subphy center valid window is less than 32
1092 * mark this subphy as homogeneous split in.
1093 * now the bits in the bytes which are BYTE_SPLIT_OUT_MIX needed to be reorganized and handles as following
1094 * all bits which are BIT_LOW_UI will be added with 64 adll,
1095 * this will hopefully ensures that all the bits in the sub phy can be sampled by the dqs
1096 */
ddr3_tip_ip_training_wrapper(u32 dev_num,enum hws_access_type access_type,u32 if_id,enum hws_access_type pup_access_type,u32 pup_num,enum hws_training_result result_type,enum hws_control_element control_element,enum hws_search_dir search_dir,enum hws_dir direction,u32 interface_mask,u32 init_value_l2h,u32 init_value_h2l,u32 num_iter,enum hws_pattern pattern,enum hws_edge_compare edge_comp,enum hws_ddr_cs train_cs_type,u32 cs_num,enum hws_training_ip_stat * train_status)1097 int ddr3_tip_ip_training_wrapper(u32 dev_num, enum hws_access_type access_type,
1098 u32 if_id,
1099 enum hws_access_type pup_access_type,
1100 u32 pup_num,
1101 enum hws_training_result result_type,
1102 enum hws_control_element control_element,
1103 enum hws_search_dir search_dir,
1104 enum hws_dir direction, u32 interface_mask,
1105 u32 init_value_l2h, u32 init_value_h2l,
1106 u32 num_iter, enum hws_pattern pattern,
1107 enum hws_edge_compare edge_comp,
1108 enum hws_ddr_cs train_cs_type, u32 cs_num,
1109 enum hws_training_ip_stat *train_status)
1110 {
1111 u8 e1, e2;
1112 u32 bit_id, start_if, end_if, bit_end = 0;
1113 u32 *result[HWS_SEARCH_DIR_LIMIT] = { 0 };
1114 u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
1115 u8 bit_bit_mask[MAX_BUS_NUM] = { 0 }, bit_bit_mask_active = 0;
1116 u8 bit_state[MAX_BUS_NUM * BUS_WIDTH_IN_BITS] = {0};
1117 u8 h2l_adll_value[MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
1118 u8 l2h_adll_value[MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
1119 u8 center_subphy_adll_window[MAX_BUS_NUM];
1120 u8 min_center_subphy_adll[MAX_BUS_NUM];
1121 u8 max_center_subphy_adll[MAX_BUS_NUM];
1122 u32 *l2h_if_train_res = NULL;
1123 u32 *h2l_if_train_res = NULL;
1124 enum hws_search_dir search_dir_id;
1125 int status;
1126 u32 bit_lock_result;
1127
1128 u8 sybphy_id;
1129 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1130 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1131
1132 if (pup_num >= octets_per_if_num) {
1133 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1134 ("pup_num %d not valid\n", pup_num));
1135 }
1136
1137 if (if_id >= MAX_INTERFACE_NUM) {
1138 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1139 ("if_id %d not valid\n", if_id));
1140 }
1141
1142 status = ddr3_tip_ip_training_wrapper_int
1143 (dev_num, access_type, if_id, pup_access_type, pup_num,
1144 ALL_BITS_PER_PUP, result_type, control_element,
1145 search_dir, direction, interface_mask, init_value_l2h,
1146 init_value_h2l, num_iter, pattern, edge_comp,
1147 train_cs_type, cs_num, train_status);
1148
1149 if (MV_OK != status)
1150 return status;
1151
1152 if (access_type == ACCESS_TYPE_MULTICAST) {
1153 start_if = 0;
1154 end_if = MAX_INTERFACE_NUM - 1;
1155 } else {
1156 start_if = if_id;
1157 end_if = if_id;
1158 }
1159
1160 for (if_id = start_if; if_id <= end_if; if_id++) {
1161 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1162 /* zero the database */
1163 bit_bit_mask_active = 0; /* clean the flag for level2 search */
1164 memset(bit_state, 0, sizeof(bit_state));
1165 /* phase 1 */
1166 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1167 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1168 if (result_type == RESULT_PER_BIT)
1169 bit_end = BUS_WIDTH_IN_BITS;
1170 else
1171 bit_end = 0;
1172
1173 /* zero the data base */
1174 bit_bit_mask[sybphy_id] = 0;
1175 byte_status[if_id][sybphy_id] = BYTE_NOT_DEFINED;
1176 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1177 h2l_adll_value[sybphy_id][bit_id] = 64;
1178 l2h_adll_value[sybphy_id][bit_id] = 0;
1179 for (search_dir_id = HWS_LOW2HIGH; search_dir_id <= HWS_HIGH2LOW;
1180 search_dir_id++) {
1181 status = ddr3_tip_read_training_result
1182 (dev_num, if_id,
1183 ACCESS_TYPE_UNICAST, sybphy_id, bit_id,
1184 search_dir_id, direction, result_type,
1185 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
1186 &result[search_dir_id], 1, 0, 0);
1187
1188 if (MV_OK != status)
1189 return status;
1190 }
1191
1192 e1 = GET_TAP_RESULT(result[HWS_LOW2HIGH][0], EDGE_1);
1193 e2 = GET_TAP_RESULT(result[HWS_HIGH2LOW][0], EDGE_1);
1194 DEBUG_TRAINING_IP_ENGINE
1195 (DEBUG_LEVEL_INFO,
1196 ("if_id %d sybphy_id %d bit %d l2h 0x%x (e1 0x%x) h2l 0x%x (e2 0x%x)\n",
1197 if_id, sybphy_id, bit_id, result[HWS_LOW2HIGH][0], e1,
1198 result[HWS_HIGH2LOW][0], e2));
1199 bit_lock_result =
1200 (GET_LOCK_RESULT(result[HWS_LOW2HIGH][0]) &&
1201 GET_LOCK_RESULT(result[HWS_HIGH2LOW][0]));
1202
1203 if (bit_lock_result) {
1204 /* in case of read operation set the byte status as homogeneous low */
1205 if (direction == OPER_READ) {
1206 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_LOW;
1207 } else if ((e2 - e1) > 32) { /* oper_write */
1208 /* split out */
1209 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1210 BIT_SPLIT_OUT;
1211 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_SPLIT_OUT;
1212 /* mark problem bits */
1213 bit_bit_mask[sybphy_id] |= (1 << bit_id);
1214 bit_bit_mask_active = 1;
1215 DEBUG_TRAINING_IP_ENGINE
1216 (DEBUG_LEVEL_TRACE,
1217 ("if_id %d sybphy_id %d bit %d BIT_SPLIT_OUT\n",
1218 if_id, sybphy_id, bit_id));
1219 } else {
1220 /* low ui */
1221 if (e1 <= 31 && e2 <= 31) {
1222 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1223 BIT_LOW_UI;
1224 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_LOW;
1225 l2h_adll_value[sybphy_id][bit_id] = e1;
1226 h2l_adll_value[sybphy_id][bit_id] = e2;
1227 DEBUG_TRAINING_IP_ENGINE
1228 (DEBUG_LEVEL_TRACE,
1229 ("if_id %d sybphy_id %d bit %d BIT_LOW_UI\n",
1230 if_id, sybphy_id, bit_id));
1231 }
1232 /* high ui */
1233 if (e1 >= 32 && e2 >= 32) {
1234 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1235 BIT_HIGH_UI;
1236 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_HIGH;
1237 l2h_adll_value[sybphy_id][bit_id] = e1;
1238 h2l_adll_value[sybphy_id][bit_id] = e2;
1239 DEBUG_TRAINING_IP_ENGINE
1240 (DEBUG_LEVEL_TRACE,
1241 ("if_id %d sybphy_id %d bit %d BIT_HIGH_UI\n",
1242 if_id, sybphy_id, bit_id));
1243 }
1244 /* split in */
1245 if (e1 <= 31 && e2 >= 32) {
1246 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1247 BIT_SPLIT_IN;
1248 byte_status[if_id][sybphy_id] |=
1249 BYTE_HOMOGENEOUS_SPLIT_IN;
1250 l2h_adll_value[sybphy_id][bit_id] = e1;
1251 h2l_adll_value[sybphy_id][bit_id] = e2;
1252 DEBUG_TRAINING_IP_ENGINE
1253 (DEBUG_LEVEL_TRACE,
1254 ("if_id %d sybphy_id %d bit %d BIT_SPLIT_IN\n",
1255 if_id, sybphy_id, bit_id));
1256 }
1257 }
1258 } else {
1259 DEBUG_TRAINING_IP_ENGINE
1260 (DEBUG_LEVEL_INFO,
1261 ("if_id %d sybphy_id %d bit %d l2h 0x%x (e1 0x%x)"
1262 "h2l 0x%x (e2 0x%x): bit cannot be categorized\n",
1263 if_id, sybphy_id, bit_id, result[HWS_LOW2HIGH][0], e1,
1264 result[HWS_HIGH2LOW][0], e2));
1265 /* mark the byte as not defined */
1266 byte_status[if_id][sybphy_id] = BYTE_NOT_DEFINED;
1267 break; /* continue to next pup - no reason to analyze this byte */
1268 }
1269 } /* for all bits */
1270 } /* for all PUPs */
1271
1272 /* phase 2 will occur only in write operation */
1273 if (bit_bit_mask_active != 0) {
1274 l2h_if_train_res = ddr3_tip_get_buf_ptr(dev_num, HWS_LOW2HIGH, result_type, if_id);
1275 h2l_if_train_res = ddr3_tip_get_buf_ptr(dev_num, HWS_HIGH2LOW, result_type, if_id);
1276 /* search from middle to end */
1277 ddr3_tip_ip_training
1278 (dev_num, ACCESS_TYPE_UNICAST,
1279 if_id, ACCESS_TYPE_MULTICAST,
1280 PARAM_NOT_CARE, result_type,
1281 control_element, HWS_LOW2HIGH,
1282 direction, interface_mask,
1283 num_iter / 2, num_iter / 2,
1284 pattern, EDGE_FP, train_cs_type,
1285 cs_num, train_status);
1286
1287 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1288 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1289 if (byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) {
1290 if (bit_bit_mask[sybphy_id] == 0)
1291 continue; /* this byte bits have no split out state */
1292
1293 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1294 if ((bit_bit_mask[sybphy_id] & (1 << bit_id)) == 0)
1295 continue; /* this bit is non split goto next bit */
1296
1297 /* enter the result to the data base */
1298 status = ddr3_tip_read_training_result
1299 (dev_num, if_id, ACCESS_TYPE_UNICAST, sybphy_id,
1300 bit_id, HWS_LOW2HIGH, direction, result_type,
1301 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
1302 &l2h_if_train_res, 0, 0, 1);
1303
1304 if (MV_OK != status)
1305 return status;
1306
1307 l2h_adll_value[sybphy_id][bit_id] =
1308 l2h_if_train_res[sybphy_id *
1309 BUS_WIDTH_IN_BITS + bit_id] & PUP_RESULT_EDGE_1_MASK;
1310 }
1311 }
1312 }
1313 /* Search from middle to start */
1314 ddr3_tip_ip_training
1315 (dev_num, ACCESS_TYPE_UNICAST,
1316 if_id, ACCESS_TYPE_MULTICAST,
1317 PARAM_NOT_CARE, result_type,
1318 control_element, HWS_HIGH2LOW,
1319 direction, interface_mask,
1320 num_iter / 2, num_iter / 2,
1321 pattern, EDGE_FP, train_cs_type,
1322 cs_num, train_status);
1323
1324 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1325 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1326 if (byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) {
1327 if (bit_bit_mask[sybphy_id] == 0)
1328 continue;
1329
1330 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1331 if ((bit_bit_mask[sybphy_id] & (1 << bit_id)) == 0)
1332 continue;
1333
1334 status = ddr3_tip_read_training_result
1335 (dev_num, if_id, ACCESS_TYPE_UNICAST, sybphy_id,
1336 bit_id, HWS_HIGH2LOW, direction, result_type,
1337 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
1338 &h2l_if_train_res, 0, cons_tap, 1);
1339
1340 if (MV_OK != status)
1341 return status;
1342
1343 h2l_adll_value[sybphy_id][bit_id] =
1344 h2l_if_train_res[sybphy_id *
1345 BUS_WIDTH_IN_BITS + bit_id] & PUP_RESULT_EDGE_1_MASK;
1346 }
1347 }
1348 }
1349 } /* end if bit_bit_mask_active */
1350 /*
1351 * phase 3 will occur only in write operation
1352 * find the maximum and the minimum center of each subphy
1353 */
1354 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1355 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1356
1357 if ((byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) && (direction == OPER_WRITE)) {
1358 /* clear the arrays and parameters */
1359 center_subphy_adll_window[sybphy_id] = 0;
1360 max_center_subphy_adll[sybphy_id] = 0;
1361 min_center_subphy_adll[sybphy_id] = 64;
1362 /* find the max and min center adll value in the current subphy */
1363 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1364 /* debug print all the bit edges after alignment */
1365 DEBUG_TRAINING_IP_ENGINE
1366 (DEBUG_LEVEL_TRACE,
1367 ("if_id %d sybphy_id %d bit %d l2h %d h2l %d\n",
1368 if_id, sybphy_id, bit_id, l2h_adll_value[sybphy_id][bit_id],
1369 h2l_adll_value[sybphy_id][bit_id]));
1370
1371 if (((l2h_adll_value[sybphy_id][bit_id] +
1372 h2l_adll_value[sybphy_id][bit_id]) / 2) >
1373 max_center_subphy_adll[sybphy_id])
1374 max_center_subphy_adll[sybphy_id] =
1375 (l2h_adll_value[sybphy_id][bit_id] +
1376 h2l_adll_value[sybphy_id][bit_id]) / 2;
1377 if (((l2h_adll_value[sybphy_id][bit_id] +
1378 h2l_adll_value[sybphy_id][bit_id]) / 2) <
1379 min_center_subphy_adll[sybphy_id])
1380 min_center_subphy_adll[sybphy_id] =
1381 (l2h_adll_value[sybphy_id][bit_id] +
1382 h2l_adll_value[sybphy_id][bit_id]) / 2;
1383 }
1384
1385 /* calculate the center of the current subphy */
1386 center_subphy_adll_window[sybphy_id] =
1387 max_center_subphy_adll[sybphy_id] -
1388 min_center_subphy_adll[sybphy_id];
1389 DEBUG_TRAINING_IP_ENGINE
1390 (DEBUG_LEVEL_TRACE,
1391 ("if_id %d sybphy_id %d min center %d max center %d center %d\n",
1392 if_id, sybphy_id, min_center_subphy_adll[sybphy_id],
1393 max_center_subphy_adll[sybphy_id],
1394 center_subphy_adll_window[sybphy_id]));
1395 }
1396 }
1397 /*
1398 * check byte state and fix bits state if needed
1399 * in case the level 1 and 2 above subphy results are
1400 * homogeneous continue to the next subphy
1401 */
1402 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1403 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1404 if ((byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_LOW) ||
1405 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_HIGH) ||
1406 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_SPLIT_IN) ||
1407 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_SPLIT_OUT) ||
1408 (byte_status[if_id][sybphy_id] == BYTE_NOT_DEFINED))
1409 continue;
1410
1411 /*
1412 * in case all of the bits in the current subphy are
1413 * less than 32 which will find alignment in the subphy bits
1414 * mark this subphy as homogeneous split in
1415 */
1416 if (center_subphy_adll_window[sybphy_id] <= 31)
1417 byte_status[if_id][sybphy_id] = BYTE_HOMOGENEOUS_SPLIT_IN;
1418
1419 /*
1420 * in case the current byte is split_out and the center is bigger than 31
1421 * the byte can be aligned. in this case add 64 to the the low ui bits aligning it
1422 * to the other ui bits
1423 */
1424 if (center_subphy_adll_window[sybphy_id] >= 32) {
1425 byte_status[if_id][sybphy_id] = BYTE_SPLIT_OUT_MIX;
1426
1427 DEBUG_TRAINING_IP_ENGINE
1428 (DEBUG_LEVEL_TRACE,
1429 ("if_id %d sybphy_id %d byte state 0x%x\n",
1430 if_id, sybphy_id, byte_status[if_id][sybphy_id]));
1431 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1432 if (bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] == BIT_LOW_UI) {
1433 l2h_if_train_res[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] += 64;
1434 h2l_if_train_res[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] += 64;
1435 }
1436 DEBUG_TRAINING_IP_ENGINE
1437 (DEBUG_LEVEL_TRACE,
1438 ("if_id %d sybphy_id %d bit_id %d added 64 adlls\n",
1439 if_id, sybphy_id, bit_id));
1440 }
1441 }
1442 }
1443 } /* for all interfaces */
1444
1445 return MV_OK;
1446 }
1447
mv_ddr_tip_sub_phy_byte_status_get(u32 if_id,u32 subphy_id)1448 u8 mv_ddr_tip_sub_phy_byte_status_get(u32 if_id, u32 subphy_id)
1449 {
1450 return byte_status[if_id][subphy_id];
1451 }
1452
mv_ddr_tip_sub_phy_byte_status_set(u32 if_id,u32 subphy_id,u8 byte_status_data)1453 void mv_ddr_tip_sub_phy_byte_status_set(u32 if_id, u32 subphy_id, u8 byte_status_data)
1454 {
1455 byte_status[if_id][subphy_id] = byte_status_data;
1456 }
1457
1458 /*
1459 * Load phy values
1460 */
ddr3_tip_load_phy_values(int b_load)1461 int ddr3_tip_load_phy_values(int b_load)
1462 {
1463 u32 bus_cnt = 0, if_id, dev_num = 0;
1464 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1465 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1466
1467 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1468 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1469 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
1470 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
1471 if (b_load == 1) {
1472 CHECK_STATUS(ddr3_tip_bus_read
1473 (dev_num, if_id,
1474 ACCESS_TYPE_UNICAST, bus_cnt,
1475 DDR_PHY_DATA,
1476 CTX_PHY_REG(effective_cs),
1477 &phy_reg_bk[if_id][bus_cnt]
1478 [0]));
1479 CHECK_STATUS(ddr3_tip_bus_read
1480 (dev_num, if_id,
1481 ACCESS_TYPE_UNICAST, bus_cnt,
1482 DDR_PHY_DATA,
1483 RL_PHY_REG(effective_cs),
1484 &phy_reg_bk[if_id][bus_cnt]
1485 [1]));
1486 CHECK_STATUS(ddr3_tip_bus_read
1487 (dev_num, if_id,
1488 ACCESS_TYPE_UNICAST, bus_cnt,
1489 DDR_PHY_DATA,
1490 CRX_PHY_REG(effective_cs),
1491 &phy_reg_bk[if_id][bus_cnt]
1492 [2]));
1493 } else {
1494 CHECK_STATUS(ddr3_tip_bus_write
1495 (dev_num, ACCESS_TYPE_UNICAST,
1496 if_id, ACCESS_TYPE_UNICAST,
1497 bus_cnt, DDR_PHY_DATA,
1498 CTX_PHY_REG(effective_cs),
1499 phy_reg_bk[if_id][bus_cnt]
1500 [0]));
1501 CHECK_STATUS(ddr3_tip_bus_write
1502 (dev_num, ACCESS_TYPE_UNICAST,
1503 if_id, ACCESS_TYPE_UNICAST,
1504 bus_cnt, DDR_PHY_DATA,
1505 RL_PHY_REG(effective_cs),
1506 phy_reg_bk[if_id][bus_cnt]
1507 [1]));
1508 CHECK_STATUS(ddr3_tip_bus_write
1509 (dev_num, ACCESS_TYPE_UNICAST,
1510 if_id, ACCESS_TYPE_UNICAST,
1511 bus_cnt, DDR_PHY_DATA,
1512 CRX_PHY_REG(effective_cs),
1513 phy_reg_bk[if_id][bus_cnt]
1514 [2]));
1515 }
1516 }
1517 }
1518
1519 return MV_OK;
1520 }
1521
ddr3_tip_training_ip_test(u32 dev_num,enum hws_training_result result_type,enum hws_search_dir search_dir,enum hws_dir direction,enum hws_edge_compare edge,u32 init_val1,u32 init_val2,u32 num_of_iterations,u32 start_pattern,u32 end_pattern)1522 int ddr3_tip_training_ip_test(u32 dev_num, enum hws_training_result result_type,
1523 enum hws_search_dir search_dir,
1524 enum hws_dir direction,
1525 enum hws_edge_compare edge,
1526 u32 init_val1, u32 init_val2,
1527 u32 num_of_iterations,
1528 u32 start_pattern, u32 end_pattern)
1529 {
1530 u32 pattern, if_id, pup_id;
1531 enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
1532 u32 *res = NULL;
1533 u32 search_state = 0;
1534 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1535 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1536
1537 ddr3_tip_load_phy_values(1);
1538
1539 for (pattern = start_pattern; pattern <= end_pattern; pattern++) {
1540 for (search_state = 0; search_state < HWS_SEARCH_DIR_LIMIT;
1541 search_state++) {
1542 ddr3_tip_ip_training_wrapper(dev_num,
1543 ACCESS_TYPE_MULTICAST, 0,
1544 ACCESS_TYPE_MULTICAST, 0,
1545 result_type,
1546 HWS_CONTROL_ELEMENT_ADLL,
1547 search_dir, direction,
1548 0xfff, init_val1,
1549 init_val2,
1550 num_of_iterations, pattern,
1551 edge, CS_SINGLE,
1552 PARAM_NOT_CARE,
1553 train_status);
1554
1555 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
1556 if_id++) {
1557 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1558 for (pup_id = 0; pup_id <
1559 octets_per_if_num;
1560 pup_id++) {
1561 VALIDATE_BUS_ACTIVE(tm->bus_act_mask,
1562 pup_id);
1563 CHECK_STATUS
1564 (ddr3_tip_read_training_result
1565 (dev_num, if_id,
1566 ACCESS_TYPE_UNICAST, pup_id,
1567 ALL_BITS_PER_PUP,
1568 search_state,
1569 direction, result_type,
1570 TRAINING_LOAD_OPERATION_UNLOAD,
1571 CS_SINGLE, &res, 1, 0,
1572 0));
1573 if (result_type == RESULT_PER_BYTE) {
1574 DEBUG_TRAINING_IP_ENGINE
1575 (DEBUG_LEVEL_INFO,
1576 ("search_state %d if_id %d pup_id %d 0x%x\n",
1577 search_state, if_id,
1578 pup_id, res[0]));
1579 } else {
1580 DEBUG_TRAINING_IP_ENGINE
1581 (DEBUG_LEVEL_INFO,
1582 ("search_state %d if_id %d pup_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1583 search_state, if_id,
1584 pup_id, res[0],
1585 res[1], res[2],
1586 res[3], res[4],
1587 res[5], res[6],
1588 res[7]));
1589 }
1590 }
1591 } /* interface */
1592 } /* search */
1593 } /* pattern */
1594
1595 ddr3_tip_load_phy_values(0);
1596
1597 return MV_OK;
1598 }
1599
mv_ddr_pattern_start_addr_set(struct pattern_info * pattern_tbl,enum hws_pattern pattern,u32 addr)1600 int mv_ddr_pattern_start_addr_set(struct pattern_info *pattern_tbl, enum hws_pattern pattern, u32 addr)
1601 {
1602 pattern_tbl[pattern].start_addr = addr;
1603
1604 return 0;
1605 }
1606
ddr3_tip_get_pattern_table()1607 struct pattern_info *ddr3_tip_get_pattern_table()
1608 {
1609 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1610
1611 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask))
1612 return pattern_table_64;
1613 else if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 0)
1614 return pattern_table_32;
1615 else
1616 return pattern_table_16;
1617 }
1618
ddr3_tip_get_mask_results_dq_reg()1619 u16 *ddr3_tip_get_mask_results_dq_reg()
1620 {
1621 #if MAX_BUS_NUM == 5
1622 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1623
1624 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1625 return mask_results_dq_reg_map_pup3_ecc;
1626 else
1627 #endif
1628 return mask_results_dq_reg_map;
1629 }
1630
ddr3_tip_get_mask_results_pup_reg_map()1631 u16 *ddr3_tip_get_mask_results_pup_reg_map()
1632 {
1633 #if MAX_BUS_NUM == 5
1634 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1635
1636 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1637 return mask_results_pup_reg_map_pup3_ecc;
1638 else
1639 #endif
1640 return mask_results_pup_reg_map;
1641 }
1642
1643 /* load expected dm pattern to odpg */
1644 #define LOW_NIBBLE_BYTE_MASK 0xf
1645 #define HIGH_NIBBLE_BYTE_MASK 0xf0
mv_ddr_load_dm_pattern_to_odpg(enum hws_access_type access_type,enum hws_pattern pattern,enum dm_direction dm_dir)1646 int mv_ddr_load_dm_pattern_to_odpg(enum hws_access_type access_type, enum hws_pattern pattern,
1647 enum dm_direction dm_dir)
1648 {
1649 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
1650 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1651 u32 pattern_len = 0;
1652 u32 data_low, data_high;
1653 u8 dm_data;
1654
1655 for (pattern_len = 0;
1656 pattern_len < pattern_table[pattern].pattern_len;
1657 pattern_len++) {
1658 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) {
1659 data_low = pattern_table_get_word(0, pattern, (u8)pattern_len);
1660 data_high = data_low;
1661 } else {
1662 data_low = pattern_table_get_word(0, pattern, (u8)(pattern_len * 2));
1663 data_high = pattern_table_get_word(0, pattern, (u8)(pattern_len * 2 + 1));
1664 }
1665
1666 /* odpg mbus dm definition is opposite to ddr4 protocol */
1667 if (dm_dir == DM_DIR_INVERSE)
1668 dm_data = ~((data_low & LOW_NIBBLE_BYTE_MASK) | (data_high & HIGH_NIBBLE_BYTE_MASK));
1669 else
1670 dm_data = (data_low & LOW_NIBBLE_BYTE_MASK) | (data_high & HIGH_NIBBLE_BYTE_MASK);
1671
1672 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_DATA_LOW_REG, data_low, MASK_ALL_BITS);
1673 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_DATA_HIGH_REG, data_high, MASK_ALL_BITS);
1674 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_ADDR_REG,
1675 pattern_len | ((dm_data & ODPG_DATA_WR_DATA_MASK) << ODPG_DATA_WR_DATA_OFFS),
1676 MASK_ALL_BITS);
1677 }
1678
1679 return MV_OK;
1680 }
1681