1
2
3
4
5
6#include "ddr3_init.h"
7#include "mv_ddr_regs.h"
8#include "ddr_training_ip_db.h"
9
10#define PATTERN_1 0x55555555
11#define PATTERN_2 0xaaaaaaaa
12
13#define VALIDATE_TRAINING_LIMIT(e1, e2) \
14 ((((e2) - (e1) + 1) > 33) && ((e1) < 67))
15
16u32 phy_reg_bk[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
17
18u32 training_res[MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS *
19 HWS_SEARCH_DIR_LIMIT];
20u8 byte_status[MAX_INTERFACE_NUM][MAX_BUS_NUM];
21
22u16 mask_results_dq_reg_map[] = {
23 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
24 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
25 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
26 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
27 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
28 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
29 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
30 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
31 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
32 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
33 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
34 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
35 RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
36 RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
37 RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
38 RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG,
39 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
40 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
41 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
42 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
43#if MAX_BUS_NUM == 9
44 RESULT_CONTROL_PUP_5_BIT_0_REG, RESULT_CONTROL_PUP_5_BIT_1_REG,
45 RESULT_CONTROL_PUP_5_BIT_2_REG, RESULT_CONTROL_PUP_5_BIT_3_REG,
46 RESULT_CONTROL_PUP_5_BIT_4_REG, RESULT_CONTROL_PUP_5_BIT_5_REG,
47 RESULT_CONTROL_PUP_5_BIT_6_REG, RESULT_CONTROL_PUP_5_BIT_7_REG,
48 RESULT_CONTROL_PUP_6_BIT_0_REG, RESULT_CONTROL_PUP_6_BIT_1_REG,
49 RESULT_CONTROL_PUP_6_BIT_2_REG, RESULT_CONTROL_PUP_6_BIT_3_REG,
50 RESULT_CONTROL_PUP_6_BIT_4_REG, RESULT_CONTROL_PUP_6_BIT_5_REG,
51 RESULT_CONTROL_PUP_6_BIT_6_REG, RESULT_CONTROL_PUP_6_BIT_7_REG,
52 RESULT_CONTROL_PUP_7_BIT_0_REG, RESULT_CONTROL_PUP_7_BIT_1_REG,
53 RESULT_CONTROL_PUP_7_BIT_2_REG, RESULT_CONTROL_PUP_7_BIT_3_REG,
54 RESULT_CONTROL_PUP_7_BIT_4_REG, RESULT_CONTROL_PUP_7_BIT_5_REG,
55 RESULT_CONTROL_PUP_7_BIT_6_REG, RESULT_CONTROL_PUP_7_BIT_7_REG,
56 RESULT_CONTROL_PUP_8_BIT_0_REG, RESULT_CONTROL_PUP_8_BIT_1_REG,
57 RESULT_CONTROL_PUP_8_BIT_2_REG, RESULT_CONTROL_PUP_8_BIT_3_REG,
58 RESULT_CONTROL_PUP_8_BIT_4_REG, RESULT_CONTROL_PUP_8_BIT_5_REG,
59 RESULT_CONTROL_PUP_8_BIT_6_REG, RESULT_CONTROL_PUP_8_BIT_7_REG,
60#endif
61 0xffff
62};
63
64u16 mask_results_pup_reg_map[] = {
65 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
66 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_3_REG,
67 RESULT_CONTROL_BYTE_PUP_4_REG,
68#if MAX_BUS_NUM == 9
69 RESULT_CONTROL_BYTE_PUP_5_REG, RESULT_CONTROL_BYTE_PUP_6_REG,
70 RESULT_CONTROL_BYTE_PUP_7_REG, RESULT_CONTROL_BYTE_PUP_8_REG,
71#endif
72 0xffff
73};
74
75#if MAX_BUS_NUM == 5
76u16 mask_results_dq_reg_map_pup3_ecc[] = {
77 RESULT_CONTROL_PUP_0_BIT_0_REG, RESULT_CONTROL_PUP_0_BIT_1_REG,
78 RESULT_CONTROL_PUP_0_BIT_2_REG, RESULT_CONTROL_PUP_0_BIT_3_REG,
79 RESULT_CONTROL_PUP_0_BIT_4_REG, RESULT_CONTROL_PUP_0_BIT_5_REG,
80 RESULT_CONTROL_PUP_0_BIT_6_REG, RESULT_CONTROL_PUP_0_BIT_7_REG,
81 RESULT_CONTROL_PUP_1_BIT_0_REG, RESULT_CONTROL_PUP_1_BIT_1_REG,
82 RESULT_CONTROL_PUP_1_BIT_2_REG, RESULT_CONTROL_PUP_1_BIT_3_REG,
83 RESULT_CONTROL_PUP_1_BIT_4_REG, RESULT_CONTROL_PUP_1_BIT_5_REG,
84 RESULT_CONTROL_PUP_1_BIT_6_REG, RESULT_CONTROL_PUP_1_BIT_7_REG,
85 RESULT_CONTROL_PUP_2_BIT_0_REG, RESULT_CONTROL_PUP_2_BIT_1_REG,
86 RESULT_CONTROL_PUP_2_BIT_2_REG, RESULT_CONTROL_PUP_2_BIT_3_REG,
87 RESULT_CONTROL_PUP_2_BIT_4_REG, RESULT_CONTROL_PUP_2_BIT_5_REG,
88 RESULT_CONTROL_PUP_2_BIT_6_REG, RESULT_CONTROL_PUP_2_BIT_7_REG,
89 RESULT_CONTROL_PUP_4_BIT_0_REG, RESULT_CONTROL_PUP_4_BIT_1_REG,
90 RESULT_CONTROL_PUP_4_BIT_2_REG, RESULT_CONTROL_PUP_4_BIT_3_REG,
91 RESULT_CONTROL_PUP_4_BIT_4_REG, RESULT_CONTROL_PUP_4_BIT_5_REG,
92 RESULT_CONTROL_PUP_4_BIT_6_REG, RESULT_CONTROL_PUP_4_BIT_7_REG,
93 RESULT_CONTROL_PUP_3_BIT_0_REG, RESULT_CONTROL_PUP_3_BIT_1_REG,
94 RESULT_CONTROL_PUP_3_BIT_2_REG, RESULT_CONTROL_PUP_3_BIT_3_REG,
95 RESULT_CONTROL_PUP_3_BIT_4_REG, RESULT_CONTROL_PUP_3_BIT_5_REG,
96 RESULT_CONTROL_PUP_3_BIT_6_REG, RESULT_CONTROL_PUP_3_BIT_7_REG
97};
98#endif
99
100#if MAX_BUS_NUM == 5
101u16 mask_results_pup_reg_map_pup3_ecc[] = {
102 RESULT_CONTROL_BYTE_PUP_0_REG, RESULT_CONTROL_BYTE_PUP_1_REG,
103 RESULT_CONTROL_BYTE_PUP_2_REG, RESULT_CONTROL_BYTE_PUP_4_REG,
104 RESULT_CONTROL_BYTE_PUP_4_REG
105};
106#endif
107
108struct pattern_info pattern_table_64[] = {
109
110
111
112
113
114 {0x7, 0x7, 2, 0x7, 0x00000, 8},
115 {0x7, 0x7, 2, 0x7, 0x00080, 8},
116 {0x7, 0x7, 2, 0x7, 0x00100, 8},
117 {0x7, 0x7, 2, 0x7, 0x00030, 8},
118 {0x7, 0x7, 2, 0x7, 0x00100, 8},
119 {0x7, 0x7, 2, 0x7, 0x00100, 8},
120 {0x1f, 0xf, 2, 0xf, 0x00680, 32},
121 {0x1f, 0xf, 2, 0xf, 0x00a80, 32},
122 {0x1f, 0xf, 2, 0xf, 0x01280, 32},
123 {0x1f, 0xf, 2, 0xf, 0x01a80, 32},
124 {0x1f, 0xf, 2, 0xf, 0x02280, 32},
125 {0x1f, 0xf, 2, 0xf, 0x02a80, 32},
126 {0x1f, 0xf, 2, 0xf, 0x03280, 32},
127 {0x1f, 0xf, 2, 0xf, 0x03a80, 32},
128 {0x1f, 0xf, 2, 0xf, 0x04280, 32},
129 {0x1f, 0xf, 2, 0xf, 0x00e80, 32},
130 {0x1f, 0xf, 2, 0xf, 0x01680, 32},
131 {0x1f, 0xf, 2, 0xf, 0x01e80, 32},
132 {0x1f, 0xf, 2, 0xf, 0x02680, 32},
133 {0x1f, 0xf, 2, 0xf, 0x02e80, 32},
134 {0x1f, 0xf, 2, 0xf, 0x03680, 32},
135 {0x1f, 0xf, 2, 0xf, 0x03e80, 32},
136 {0x1f, 0xf, 2, 0xf, 0x04680, 32},
137 {0x1f, 0xf, 2, 0xf, 0x04a80, 32},
138 {0x1f, 0xf, 2, 0xf, 0x05280, 32},
139 {0x1f, 0xf, 2, 0xf, 0x05a80, 32},
140 {0x1f, 0xf, 2, 0xf, 0x06280, 32},
141 {0x1f, 0xf, 2, 0xf, 0x06a80, 32},
142 {0x1f, 0xf, 2, 0xf, 0x07280, 32},
143 {0x1f, 0xf, 2, 0xf, 0x07a80, 32},
144 {0x1f, 0xf, 2, 0xf, 0x08280, 32},
145 {0x1f, 0xf, 2, 0xf, 0x04e80, 32},
146 {0x1f, 0xf, 2, 0xf, 0x05680, 32},
147 {0x1f, 0xf, 2, 0xf, 0x05e80, 32},
148 {0x1f, 0xf, 2, 0xf, 0x06680, 32},
149 {0x1f, 0xf, 2, 0xf, 0x06e80, 32},
150 {0x1f, 0xf, 2, 0xf, 0x07680, 32},
151 {0x1f, 0xf, 2, 0xf, 0x07e80, 32},
152 {0x1f, 0xf, 2, 0xf, 0x08680, 32},
153 {0x1f, 0xf, 2, 0xf, 0x08a80, 32},
154 {0x1f, 0xf, 2, 0xf, 0x09280, 32},
155 {0x1f, 0xf, 2, 0xf, 0x09a80, 32},
156 {0x1f, 0xf, 2, 0xf, 0x0a280, 32},
157 {0x1f, 0xf, 2, 0xf, 0x0aa80, 32},
158 {0x1f, 0xf, 2, 0xf, 0x0b280, 32},
159 {0x1f, 0xf, 2, 0xf, 0x0ba80, 32},
160 {0x1f, 0xf, 2, 0xf, 0x0c280, 32},
161 {0x1f, 0xf, 2, 0xf, 0x08e80, 32},
162 {0x1f, 0xf, 2, 0xf, 0x09680, 32},
163 {0x1f, 0xf, 2, 0xf, 0x09e80, 32},
164 {0x1f, 0xf, 2, 0xf, 0x0a680, 32},
165 {0x1f, 0xf, 2, 0xf, 0x0ae80, 32},
166 {0x1f, 0xf, 2, 0xf, 0x0b680, 32},
167 {0x1f, 0xf, 2, 0xf, 0x0be80, 32},
168 {0x1f, 0xf, 2, 0xf, 0x0c680, 32},
169 {0x1f, 0xf, 2, 0xf, 0x0ca80, 32},
170 {0x1f, 0xf, 2, 0xf, 0x0d280, 32},
171 {0x1f, 0xf, 2, 0xf, 0x0da80, 32},
172 {0x1f, 0xf, 2, 0xf, 0x0e280, 32},
173 {0x1f, 0xf, 2, 0xf, 0x0ea80, 32},
174 {0x1f, 0xf, 2, 0xf, 0x0f280, 32},
175 {0x1f, 0xf, 2, 0xf, 0x0fa80, 32},
176 {0x1f, 0xf, 2, 0xf, 0x10280, 32},
177 {0x1f, 0xf, 2, 0xf, 0x0ce80, 32},
178 {0x1f, 0xf, 2, 0xf, 0x0d680, 32},
179 {0x1f, 0xf, 2, 0xf, 0x0de80, 32},
180 {0x1f, 0xf, 2, 0xf, 0x0e680, 32},
181 {0x1f, 0xf, 2, 0xf, 0x0ee80, 32},
182 {0x1f, 0xf, 2, 0xf, 0x0f680, 32},
183 {0x1f, 0xf, 2, 0xf, 0x0fe80, 32},
184 {0x1f, 0xf, 2, 0xf, 0x10680, 32},
185 {0x1f, 0xf, 2, 0xf, 0x10a80, 32},
186 {0x1f, 0xf, 2, 0xf, 0x10e80, 32},
187 {0x1f, 0xf, 2, 0xf, 0x11280, 32},
188 {0x1f, 0xf, 2, 0xf, 0x11680, 32},
189 {0x1f, 0xf, 2, 0xf, 0x11a80, 32},
190 {0x1f, 0xf, 2, 0xf, 0x11e80, 32},
191 {0x1f, 0xf, 2, 0xf, 0x12280, 32},
192 {0x1f, 0xf, 2, 0xf, 0x12680, 32},
193 {0x1f, 0xf, 2, 0xf, 0x12a80, 32},
194 {0x1f, 0xf, 2, 0xf, 0x12e80, 32},
195 {0x1f, 0xf, 2, 0xf, 0x13280, 32},
196 {0x1f, 0xf, 2, 0xf, 0x13680, 32},
197 {0x1f, 0xf, 2, 0xf, 0x13a80, 32},
198 {0x1f, 0xf, 2, 0xf, 0x13e80, 32},
199 {0x1f, 0xf, 2, 0xf, 0x14280, 32},
200 {0x1f, 0xf, 2, 0xf, 0x14680, 32},
201 {0x1f, 0xf, 2, 0xf, 0x14a80, 32},
202 {0x1f, 0xf, 2, 0xf, 0x14e80, 32},
203 {0x1f, 0xf, 2, 0xf, 0x15280, 32},
204 {0x1f, 0xf, 2, 0xf, 0x15680, 32}
205
206};
207
208struct pattern_info pattern_table_16[] = {
209
210
211
212
213 {1, 1, 2, 1, 0x0080, 2},
214 {1, 1, 2, 1, 0x00c0, 2},
215 {1, 1, 2, 1, 0x0380, 2},
216 {1, 1, 2, 1, 0x0040, 2},
217 {1, 1, 2, 1, 0x0100, 2},
218 {1, 1, 2, 1, 0x0000, 2},
219 {0xf, 0x7, 2, 0x7, 0x0140, 16},
220 {0xf, 0x7, 2, 0x7, 0x0190, 16},
221 {0xf, 0x7, 2, 0x7, 0x01d0, 16},
222 {0xf, 0x7, 2, 0x7, 0x0210, 16},
223 {0xf, 0x7, 2, 0x7, 0x0250, 16},
224 {0xf, 0x7, 2, 0x7, 0x0290, 16},
225 {0xf, 0x7, 2, 0x7, 0x02d0, 16},
226 {0xf, 0x7, 2, 0x7, 0x0310, 16},
227 {0xf, 0x7, 2, 0x7, 0x0350, 16},
228 {0xf, 0x7, 2, 0x7, 0x04c0, 16},
229 {0xf, 0x7, 2, 0x7, 0x03c0, 16},
230 {0xf, 0x7, 2, 0x7, 0x0400, 16},
231 {0xf, 0x7, 2, 0x7, 0x0440, 16},
232 {0xf, 0x7, 2, 0x7, 0x0480, 16},
233 {0xf, 7, 2, 7, 0x6280, 16},
234 {0xf, 7, 2, 7, 0x6680, 16},
235 {0xf, 7, 2, 7, 0x6A80, 16},
236 {0xf, 7, 2, 7, 0x6E80, 16},
237 {0xf, 7, 2, 7, 0x7280, 16},
238 {0xf, 7, 2, 7, 0x7680, 16},
239 {0xf, 7, 2, 7, 0x7A80, 16},
240 {0xf, 7, 2, 7, 0x7E80, 16},
241 {0xf, 7, 2, 7, 0x8280, 16},
242 {0xf, 7, 2, 7, 0x8680, 16},
243 {0xf, 7, 2, 7, 0x8A80, 16},
244 {0xf, 7, 2, 7, 0x8E80, 16},
245 {0xf, 7, 2, 7, 0x9280, 16},
246 {0xf, 7, 2, 7, 0x9680, 16},
247 {0xf, 7, 2, 7, 0x9A80, 16},
248 {0xf, 7, 2, 7, 0x9E80, 16},
249 {0xf, 7, 2, 7, 0xA280, 16}
250
251};
252
253struct pattern_info pattern_table_32[] = {
254
255
256
257
258 {3, 3, 2, 3, 0x0080, 4},
259 {3, 3, 2, 3, 0x00c0, 4},
260 {3, 3, 2, 3, 0x0380, 4},
261 {3, 3, 2, 3, 0x0040, 4},
262 {3, 3, 2, 3, 0x0100, 4},
263 {3, 3, 2, 3, 0x0000, 4},
264 {0x1f, 0xf, 2, 0xf, 0x0140, 32},
265 {0x1f, 0xf, 2, 0xf, 0x0190, 32},
266 {0x1f, 0xf, 2, 0xf, 0x01d0, 32},
267 {0x1f, 0xf, 2, 0xf, 0x0210, 32},
268 {0x1f, 0xf, 2, 0xf, 0x0250, 32},
269 {0x1f, 0xf, 2, 0xf, 0x0290, 32},
270 {0x1f, 0xf, 2, 0xf, 0x02d0, 32},
271 {0x1f, 0xf, 2, 0xf, 0x0310, 32},
272 {0x1f, 0xf, 2, 0xf, 0x0350, 32},
273 {0x1f, 0xf, 2, 0xf, 0x04c0, 32},
274 {0x1f, 0xf, 2, 0xf, 0x03c0, 32},
275 {0x1f, 0xf, 2, 0xf, 0x0400, 32},
276 {0x1f, 0xf, 2, 0xf, 0x0440, 32},
277 {0x1f, 0xf, 2, 0xf, 0x0480, 32},
278 {0x1f, 0xF, 2, 0xf, 0x6280, 32},
279 {0x1f, 0xF, 2, 0xf, 0x6680, 32},
280 {0x1f, 0xF, 2, 0xf, 0x6A80, 32},
281 {0x1f, 0xF, 2, 0xf, 0x6E80, 32},
282 {0x1f, 0xF, 2, 0xf, 0x7280, 32},
283 {0x1f, 0xF, 2, 0xf, 0x7680, 32},
284 {0x1f, 0xF, 2, 0xf, 0x7A80, 32},
285 {0x1f, 0xF, 2, 0xf, 0x7E80, 32},
286 {0x1f, 0xF, 2, 0xf, 0x8280, 32},
287 {0x1f, 0xF, 2, 0xf, 0x8680, 32},
288 {0x1f, 0xF, 2, 0xf, 0x8A80, 32},
289 {0x1f, 0xF, 2, 0xf, 0x8E80, 32},
290 {0x1f, 0xF, 2, 0xf, 0x9280, 32},
291 {0x1f, 0xF, 2, 0xf, 0x9680, 32},
292 {0x1f, 0xF, 2, 0xf, 0x9A80, 32},
293 {0x1f, 0xF, 2, 0xf, 0x9E80, 32},
294 {0x1f, 0xF, 2, 0xf, 0xA280, 32}
295
296};
297
298u32 train_dev_num;
299enum hws_ddr_cs traintrain_cs_type;
300u32 train_pup_num;
301enum hws_training_result train_result_type;
302enum hws_control_element train_control_element;
303enum hws_search_dir traine_search_dir;
304enum hws_dir train_direction;
305u32 train_if_select;
306u32 train_init_value;
307u32 train_number_iterations;
308enum hws_pattern train_pattern;
309enum hws_edge_compare train_edge_compare;
310u32 train_cs_num;
311u32 train_if_acess, train_if_id, train_pup_access;
312u32 max_polling_for_done = 1000000;
313
314u32 *ddr3_tip_get_buf_ptr(u32 dev_num, enum hws_search_dir search,
315 enum hws_training_result result_type,
316 u32 interface_num)
317{
318 u32 *buf_ptr = NULL;
319
320 buf_ptr = &training_res
321 [MAX_INTERFACE_NUM * MAX_BUS_NUM * BUS_WIDTH_IN_BITS * search +
322 interface_num * MAX_BUS_NUM * BUS_WIDTH_IN_BITS];
323
324 return buf_ptr;
325}
326
327enum {
328 PASS,
329 FAIL
330};
331
332
333
334
335
336int ddr3_tip_ip_training(u32 dev_num, enum hws_access_type access_type,
337 u32 interface_num,
338 enum hws_access_type pup_access_type,
339 u32 pup_num, enum hws_training_result result_type,
340 enum hws_control_element control_element,
341 enum hws_search_dir search_dir, enum hws_dir direction,
342 u32 interface_mask, u32 init_value, u32 num_iter,
343 enum hws_pattern pattern,
344 enum hws_edge_compare edge_comp,
345 enum hws_ddr_cs cs_type, u32 cs_num,
346 enum hws_training_ip_stat *train_status)
347{
348 u32 mask_dq_num_of_regs, mask_pup_num_of_regs, index_cnt,
349 reg_data, pup_id;
350 u32 tx_burst_size;
351 u32 delay_between_burst;
352 u32 rd_mode;
353 u32 data;
354 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
355 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
356 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
357 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
358 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
359
360 if (pup_num >= octets_per_if_num) {
361 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
362 ("pup_num %d not valid\n", pup_num));
363 }
364 if (interface_num >= MAX_INTERFACE_NUM) {
365 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
366 ("if_id %d not valid\n",
367 interface_num));
368 }
369 if (train_status == NULL) {
370 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
371 ("error param 4\n"));
372 return MV_BAD_PARAM;
373 }
374
375
376 if (cs_type == CS_SINGLE) {
377
378 CHECK_STATUS(ddr3_tip_if_write
379 (dev_num, access_type, interface_num,
380 DUAL_DUNIT_CFG_REG, 1 << 3, 1 << 3));
381
382 CHECK_STATUS(ddr3_tip_if_write
383 (dev_num, access_type, interface_num,
384 ODPG_DATA_CTRL_REG,
385 (0x3 | (effective_cs << 26)), 0xc000003));
386 } else {
387 CHECK_STATUS(ddr3_tip_if_write
388 (dev_num, access_type, interface_num,
389 DUAL_DUNIT_CFG_REG, 0, 1 << 3));
390
391 CHECK_STATUS(ddr3_tip_if_write
392 (dev_num, access_type, interface_num,
393 ODPG_DATA_CTRL_REG, 0x3 | cs_num << 26,
394 0x3 | 3 << 26));
395 }
396
397
398 ddr3_tip_load_pattern_to_odpg(dev_num, access_type, interface_num,
399 pattern,
400 pattern_table[pattern].start_addr);
401 tx_burst_size = (direction == OPER_WRITE) ?
402 pattern_table[pattern].tx_burst_size : 0;
403 delay_between_burst = (direction == OPER_WRITE) ? 2 : 0;
404 rd_mode = (direction == OPER_WRITE) ? 1 : 0;
405 CHECK_STATUS(ddr3_tip_configure_odpg
406 (dev_num, access_type, interface_num, direction,
407 pattern_table[pattern].num_of_phases_tx, tx_burst_size,
408 pattern_table[pattern].num_of_phases_rx,
409 delay_between_burst, rd_mode, effective_cs, STRESS_NONE,
410 DURATION_SINGLE));
411 reg_data = (direction == OPER_READ) ? 0 : (0x3 << 30);
412 reg_data |= (direction == OPER_READ) ? 0x60 : 0xfa;
413 CHECK_STATUS(ddr3_tip_if_write
414 (dev_num, access_type, interface_num,
415 ODPG_WR_RD_MODE_ENA_REG, reg_data,
416 MASK_ALL_BITS));
417 reg_data = (edge_comp == EDGE_PF || edge_comp == EDGE_FP) ? 0 : 1 << 6;
418 reg_data |= (edge_comp == EDGE_PF || edge_comp == EDGE_PFP) ?
419 (1 << 7) : 0;
420
421
422 if (pup_access_type == ACCESS_TYPE_MULTICAST)
423 reg_data |= 0xe << 14;
424 else
425 reg_data |= pup_num << 14;
426
427 if (edge_comp == EDGE_FP) {
428
429 reg_data |= (0 << 20);
430 } else if (edge_comp == EDGE_FPF) {
431 reg_data |= (0 << 20);
432 } else {
433 reg_data |= (3 << 20);
434 }
435
436 CHECK_STATUS(ddr3_tip_if_write
437 (dev_num, access_type, interface_num,
438 GENERAL_TRAINING_OPCODE_REG,
439 reg_data | (0x7 << 8) | (0x7 << 11),
440 (0x3 | (0x3 << 2) | (0x3 << 6) | (1 << 5) | (0x7 << 8) |
441 (0x7 << 11) | (0xf << 14) | (0x3 << 18) | (3 << 20))));
442 reg_data = (search_dir == HWS_LOW2HIGH) ? 0 : (1 << 8);
443 CHECK_STATUS(ddr3_tip_if_write
444 (dev_num, access_type, interface_num, OPCODE_REG0_REG(1),
445 1 | reg_data | init_value << 9 | (1 << 25) | (1 << 26),
446 0xff | (1 << 8) | (0xffff << 9) | (1 << 25) | (1 << 26)));
447
448
449
450
451
452 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, interface_num,
453 OPCODE_REG1_REG(1), num_iter,
454 0xffff));
455 if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
456 direction == OPER_READ) {
457
458
459
460
461 reg_data = PBS_RX_BCAST_PHY_REG(effective_cs);
462 } else if (control_element == HWS_CONTROL_ELEMENT_DQ_SKEW &&
463 direction == OPER_WRITE) {
464 reg_data = PBS_TX_BCAST_PHY_REG(effective_cs);
465 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
466 direction == OPER_WRITE) {
467
468
469
470
471
472
473
474
475 reg_data = CTX_PHY_REG(effective_cs);
476 } else if (control_element == HWS_CONTROL_ELEMENT_ADLL &&
477 direction == OPER_READ) {
478
479 reg_data = CRX_PHY_REG(effective_cs);
480 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
481 direction == OPER_WRITE) {
482
483 } else if (control_element == HWS_CONTROL_ELEMENT_DQS_SKEW &&
484 direction == OPER_READ) {
485
486 }
487
488 reg_data |= (0x6 << 28);
489 CHECK_STATUS(ddr3_tip_if_write
490 (dev_num, access_type, interface_num, CAL_PHY_REG(1),
491 reg_data | (init_value << 8),
492 0xff | (0xffff << 8) | (0xf << 24) | (u32) (0xf << 28)));
493
494 mask_dq_num_of_regs = octets_per_if_num * BUS_WIDTH_IN_BITS;
495 mask_pup_num_of_regs = octets_per_if_num;
496
497 if (result_type == RESULT_PER_BIT) {
498 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
499 index_cnt++) {
500 CHECK_STATUS(ddr3_tip_if_write
501 (dev_num, access_type, interface_num,
502 mask_results_dq_reg_map[index_cnt], 0,
503 1 << 24));
504 }
505
506
507 for (pup_id = 0; pup_id < octets_per_if_num;
508 pup_id++) {
509 if (IS_BUS_ACTIVE(tm->bus_act_mask, pup_id) == 1)
510 continue;
511
512 for (index_cnt = (pup_id * 8); index_cnt < (pup_id + 1) * 8; index_cnt++) {
513 CHECK_STATUS(ddr3_tip_if_write
514 (dev_num, access_type,
515 interface_num,
516 mask_results_dq_reg_map
517 [index_cnt], (1 << 24), 1 << 24));
518 }
519 }
520
521 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
522 index_cnt++) {
523 CHECK_STATUS(ddr3_tip_if_write
524 (dev_num, access_type, interface_num,
525 mask_results_pup_reg_map[index_cnt],
526 (1 << 24), 1 << 24));
527 }
528 } else if (result_type == RESULT_PER_BYTE) {
529
530 for (index_cnt = 0; index_cnt < mask_pup_num_of_regs;
531 index_cnt++) {
532 CHECK_STATUS(ddr3_tip_if_write
533 (dev_num, access_type, interface_num,
534 mask_results_pup_reg_map[index_cnt], 0,
535 1 << 24));
536 }
537 for (index_cnt = 0; index_cnt < mask_dq_num_of_regs;
538 index_cnt++) {
539 CHECK_STATUS(ddr3_tip_if_write
540 (dev_num, access_type, interface_num,
541 mask_results_dq_reg_map[index_cnt],
542 (1 << 24), (1 << 24)));
543 }
544 }
545
546
547 mv_ddr_training_enable();
548
549
550 mdelay(1);
551
552
553 if (mv_ddr_is_training_done(MAX_POLLING_ITERATIONS, &data) != MV_OK) {
554 train_status[0] = HWS_TRAINING_IP_STATUS_TIMEOUT;
555 } else {
556 if (data == PASS)
557 train_status[0] = HWS_TRAINING_IP_STATUS_SUCCESS;
558 else
559 train_status[0] = HWS_TRAINING_IP_STATUS_FAIL;
560 }
561
562 ddr3_tip_if_write(0, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
563 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS);
564
565 return MV_OK;
566}
567
568
569
570
571int ddr3_tip_load_pattern_to_odpg(u32 dev_num, enum hws_access_type access_type,
572 u32 if_id, enum hws_pattern pattern,
573 u32 load_addr)
574{
575 u32 pattern_length_cnt = 0;
576 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
577 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
578
579 for (pattern_length_cnt = 0;
580 pattern_length_cnt < pattern_table[pattern].pattern_len;
581 pattern_length_cnt++) {
582 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) {
583 CHECK_STATUS(ddr3_tip_if_write
584 (dev_num, access_type, if_id,
585 ODPG_DATA_WR_DATA_LOW_REG,
586 pattern_table_get_word(dev_num, pattern,
587 (u8) (pattern_length_cnt)),
588 MASK_ALL_BITS));
589 CHECK_STATUS(ddr3_tip_if_write
590 (dev_num, access_type, if_id,
591 ODPG_DATA_WR_DATA_HIGH_REG,
592 pattern_table_get_word(dev_num, pattern,
593 (u8) (pattern_length_cnt)),
594 MASK_ALL_BITS));
595 } else {
596 CHECK_STATUS(ddr3_tip_if_write
597 (dev_num, access_type, if_id,
598 ODPG_DATA_WR_DATA_LOW_REG,
599 pattern_table_get_word(dev_num, pattern,
600 (u8) (pattern_length_cnt * 2)),
601 MASK_ALL_BITS));
602 CHECK_STATUS(ddr3_tip_if_write
603 (dev_num, access_type, if_id,
604 ODPG_DATA_WR_DATA_HIGH_REG,
605 pattern_table_get_word(dev_num, pattern,
606 (u8) (pattern_length_cnt * 2 + 1)),
607 MASK_ALL_BITS));
608 }
609 CHECK_STATUS(ddr3_tip_if_write
610 (dev_num, access_type, if_id,
611 ODPG_DATA_WR_ADDR_REG, pattern_length_cnt,
612 MASK_ALL_BITS));
613 }
614
615 CHECK_STATUS(ddr3_tip_if_write
616 (dev_num, access_type, if_id,
617 ODPG_DATA_BUFFER_OFFS_REG, load_addr, MASK_ALL_BITS));
618
619 return MV_OK;
620}
621
622
623
624
625int ddr3_tip_configure_odpg(u32 dev_num, enum hws_access_type access_type,
626 u32 if_id, enum hws_dir direction, u32 tx_phases,
627 u32 tx_burst_size, u32 rx_phases,
628 u32 delay_between_burst, u32 rd_mode, u32 cs_num,
629 u32 addr_stress_jump, u32 single_pattern)
630{
631 u32 data_value = 0;
632 int ret;
633
634 data_value = ((single_pattern << 2) | (tx_phases << 5) |
635 (tx_burst_size << 11) | (delay_between_burst << 15) |
636 (rx_phases << 21) | (rd_mode << 25) | (cs_num << 26) |
637 (addr_stress_jump << 29));
638 ret = ddr3_tip_if_write(dev_num, access_type, if_id,
639 ODPG_DATA_CTRL_REG, data_value, 0xaffffffc);
640 if (ret != MV_OK)
641 return ret;
642
643 return MV_OK;
644}
645
646int ddr3_tip_process_result(u32 *ar_result, enum hws_edge e_edge,
647 enum hws_edge_search e_edge_search,
648 u32 *edge_result)
649{
650 u32 i, res;
651 int tap_val, max_val = -10000, min_val = 10000;
652 int lock_success = 1;
653
654 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
655 res = GET_LOCK_RESULT(ar_result[i]);
656 if (res == 0) {
657 lock_success = 0;
658 break;
659 }
660 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
661 ("lock failed for bit %d\n", i));
662 }
663
664 if (lock_success == 1) {
665 for (i = 0; i < BUS_WIDTH_IN_BITS; i++) {
666 tap_val = GET_TAP_RESULT(ar_result[i], e_edge);
667 if (tap_val > max_val)
668 max_val = tap_val;
669 if (tap_val < min_val)
670 min_val = tap_val;
671 if (e_edge_search == TRAINING_EDGE_MAX)
672 *edge_result = (u32) max_val;
673 else
674 *edge_result = (u32) min_val;
675
676 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
677 ("i %d ar_result[i] 0x%x tap_val %d max_val %d min_val %d Edge_result %d\n",
678 i, ar_result[i], tap_val,
679 max_val, min_val,
680 *edge_result));
681 }
682 } else {
683 return MV_FAIL;
684 }
685
686 return MV_OK;
687}
688
689
690
691
692int ddr3_tip_read_training_result(u32 dev_num, u32 if_id,
693 enum hws_access_type pup_access_type,
694 u32 pup_num, u32 bit_num,
695 enum hws_search_dir search,
696 enum hws_dir direction,
697 enum hws_training_result result_type,
698 enum hws_training_load_op operation,
699 u32 cs_num_type, u32 **load_res,
700 int is_read_from_db, u8 cons_tap,
701 int is_check_result_validity)
702{
703 u32 reg_offset, pup_cnt, start_pup, end_pup, start_reg, end_reg;
704 u32 *interface_train_res = NULL;
705 u16 *reg_addr = NULL;
706 u32 read_data[MAX_INTERFACE_NUM];
707 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
708 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
709 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
710 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
711
712
713
714
715
716
717 CHECK_STATUS(ddr3_tip_if_write
718 (dev_num, ACCESS_TYPE_UNICAST, if_id, DUAL_DUNIT_CFG_REG,
719 (cs_num_type == 0) ? 1 << 3 : 0, (1 << 3)));
720 CHECK_STATUS(ddr3_tip_if_write
721 (dev_num, ACCESS_TYPE_UNICAST, if_id,
722 ODPG_DATA_CTRL_REG, (cs_num_type << 26), (3 << 26)));
723 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
724 ("Read_from_d_b %d cs_type %d oper %d result_type %d direction %d search %d pup_num %d if_id %d pup_access_type %d\n",
725 is_read_from_db, cs_num_type, operation,
726 result_type, direction, search, pup_num,
727 if_id, pup_access_type));
728
729 if ((load_res == NULL) && (is_read_from_db == 1)) {
730 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
731 ("ddr3_tip_read_training_result load_res = NULL"));
732 return MV_FAIL;
733 }
734 if (pup_num >= octets_per_if_num) {
735 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
736 ("pup_num %d not valid\n", pup_num));
737 }
738 if (if_id >= MAX_INTERFACE_NUM) {
739 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
740 ("if_id %d not valid\n", if_id));
741 }
742 if (result_type == RESULT_PER_BIT)
743 reg_addr = mask_results_dq_reg_map;
744 else
745 reg_addr = mask_results_pup_reg_map;
746 if (pup_access_type == ACCESS_TYPE_UNICAST) {
747 start_pup = pup_num;
748 end_pup = pup_num;
749 } else {
750
751 start_pup = 0;
752 end_pup = octets_per_if_num - 1;
753 }
754
755 for (pup_cnt = start_pup; pup_cnt <= end_pup; pup_cnt++) {
756 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup_cnt);
757 DEBUG_TRAINING_IP_ENGINE(
758 DEBUG_LEVEL_TRACE,
759 ("if_id %d start_pup %d end_pup %d pup_cnt %d\n",
760 if_id, start_pup, end_pup, pup_cnt));
761 if (result_type == RESULT_PER_BIT) {
762 if (bit_num == ALL_BITS_PER_PUP) {
763 start_reg = pup_cnt * BUS_WIDTH_IN_BITS;
764 end_reg = (pup_cnt + 1) * BUS_WIDTH_IN_BITS - 1;
765 } else {
766 start_reg =
767 pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
768 end_reg = pup_cnt * BUS_WIDTH_IN_BITS + bit_num;
769 }
770 } else {
771 start_reg = pup_cnt;
772 end_reg = pup_cnt;
773 }
774
775 interface_train_res =
776 ddr3_tip_get_buf_ptr(dev_num, search, result_type,
777 if_id);
778 DEBUG_TRAINING_IP_ENGINE(
779 DEBUG_LEVEL_TRACE,
780 ("start_reg %d end_reg %d interface %p\n",
781 start_reg, end_reg, interface_train_res));
782 if (interface_train_res == NULL) {
783 DEBUG_TRAINING_IP_ENGINE(
784 DEBUG_LEVEL_ERROR,
785 ("interface_train_res is NULL\n"));
786 return MV_FAIL;
787 }
788
789 for (reg_offset = start_reg; reg_offset <= end_reg;
790 reg_offset++) {
791 if (operation == TRAINING_LOAD_OPERATION_UNLOAD) {
792 if (is_read_from_db == 0) {
793 CHECK_STATUS(ddr3_tip_if_read
794 (dev_num,
795 ACCESS_TYPE_UNICAST,
796 if_id,
797 reg_addr[reg_offset],
798 read_data,
799 MASK_ALL_BITS));
800 if (is_check_result_validity == 1) {
801 if ((read_data[if_id] &
802 TIP_ENG_LOCK) == 0) {
803 interface_train_res
804 [reg_offset] =
805 TIP_ENG_LOCK +
806 TIP_TX_DLL_RANGE_MAX;
807 } else {
808 interface_train_res
809 [reg_offset] =
810 read_data
811 [if_id] +
812 cons_tap;
813 }
814 } else {
815 interface_train_res[reg_offset]
816 = read_data[if_id] +
817 cons_tap;
818 }
819 DEBUG_TRAINING_IP_ENGINE
820 (DEBUG_LEVEL_TRACE,
821 ("reg_offset %d value 0x%x addr %p\n",
822 reg_offset,
823 interface_train_res
824 [reg_offset],
825 &interface_train_res
826 [reg_offset]));
827 } else {
828 *load_res =
829 &interface_train_res[start_reg];
830 DEBUG_TRAINING_IP_ENGINE
831 (DEBUG_LEVEL_TRACE,
832 ("*load_res %p\n", *load_res));
833 }
834 } else {
835 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_TRACE,
836 ("not supported\n"));
837 }
838 }
839 }
840
841 return MV_OK;
842}
843
844
845
846
847int ddr3_tip_load_all_pattern_to_mem(u32 dev_num)
848{
849 u32 pattern = 0, if_id;
850 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
851
852 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
853 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
854 training_result[training_stage][if_id] = TEST_SUCCESS;
855 }
856
857 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
858 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
859
860 CHECK_STATUS(ddr3_tip_if_write
861 (dev_num, ACCESS_TYPE_UNICAST, if_id,
862 DUAL_DUNIT_CFG_REG, (1 << 3), (1 << 3)));
863 }
864
865 for (pattern = 0; pattern < PATTERN_LAST; pattern++) {
866 if (pattern == PATTERN_TEST)
867 continue;
868 ddr3_tip_load_pattern_to_mem(dev_num, pattern);
869 }
870
871 return MV_OK;
872}
873
874
875
876
877int ddr3_tip_load_pattern_to_mem(u32 dev_num, enum hws_pattern pattern)
878{
879 u32 reg_data, if_id;
880 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
881 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
882
883
884
885
886
887
888 reg_data =
889 0x1 | (pattern_table[pattern].num_of_phases_tx << 5) |
890 (pattern_table[pattern].tx_burst_size << 11) |
891 (pattern_table[pattern].delay_between_bursts << 15) |
892 (pattern_table[pattern].num_of_phases_rx << 21) | (0x1 << 25) |
893 (effective_cs << 26);
894 CHECK_STATUS(ddr3_tip_if_write
895 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
896 ODPG_DATA_CTRL_REG, reg_data, MASK_ALL_BITS));
897
898 CHECK_STATUS(ddr3_tip_if_write
899 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
900 ODPG_DATA_CTRL_REG, (0x1 | (effective_cs << 26)),
901 0xc000003));
902
903 CHECK_STATUS(ddr3_tip_if_write
904 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
905 ODPG_DATA_WR_DATA_ERR_REG, 0, 0x1));
906
907 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
908 PARAM_NOT_CARE, pattern,
909 pattern_table[pattern].start_addr);
910
911 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) {
912 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
913 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
914
915 CHECK_STATUS(ddr3_tip_if_write
916 (dev_num, ACCESS_TYPE_UNICAST, if_id,
917 SDRAM_ODT_CTRL_HIGH_REG,
918 0x3, 0xf));
919 }
920
921 mv_ddr_odpg_enable();
922 } else {
923 CHECK_STATUS(ddr3_tip_if_write
924 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
925 ODPG_DATA_CTRL_REG, (u32)(0x1 << 31),
926 (u32)(0x1 << 31)));
927 }
928 mdelay(1);
929
930 if (mv_ddr_is_odpg_done(MAX_POLLING_ITERATIONS) != MV_OK)
931 return MV_FAIL;
932
933
934 CHECK_STATUS(ddr3_tip_if_write
935 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
936 ODPG_DATA_CTRL_REG, (0x1 << 30), (u32) (0x3 << 30)));
937
938
939 CHECK_STATUS(ddr3_tip_if_write
940 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
941 ODPG_DATA_CTRL_REG, 0, MASK_ALL_BITS));
942
943 if (ddr3_tip_dev_attr_get(dev_num, MV_ATTR_TIP_REV) >= MV_TIP_REV_3) {
944
945 CHECK_STATUS(ddr3_tip_if_write
946 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
947 SDRAM_ODT_CTRL_HIGH_REG, 0x0, 0xf));
948 }
949
950 mdelay(1);
951
952 return MV_OK;
953}
954
955
956
957
958int ddr3_tip_ip_training_wrapper_int(u32 dev_num,
959 enum hws_access_type access_type,
960 u32 if_id,
961 enum hws_access_type pup_access_type,
962 u32 pup_num, u32 bit_num,
963 enum hws_training_result result_type,
964 enum hws_control_element control_element,
965 enum hws_search_dir search_dir,
966 enum hws_dir direction,
967 u32 interface_mask, u32 init_value_l2h,
968 u32 init_value_h2l, u32 num_iter,
969 enum hws_pattern pattern,
970 enum hws_edge_compare edge_comp,
971 enum hws_ddr_cs train_cs_type, u32 cs_num,
972 enum hws_training_ip_stat *train_status)
973{
974 u32 interface_num = 0, start_if, end_if, init_value_used;
975 enum hws_search_dir search_dir_id, start_search, end_search;
976 enum hws_edge_compare edge_comp_used;
977 u8 cons_tap = 0;
978 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
979 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
980
981 if (train_status == NULL) {
982 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
983 ("train_status is NULL\n"));
984 return MV_FAIL;
985 }
986
987 if ((train_cs_type > CS_NON_SINGLE) ||
988 (edge_comp >= EDGE_PFP) ||
989 (pattern >= PATTERN_LAST) ||
990 (direction > OPER_WRITE_AND_READ) ||
991 (search_dir > HWS_HIGH2LOW) ||
992 (control_element > HWS_CONTROL_ELEMENT_DQS_SKEW) ||
993 (result_type > RESULT_PER_BYTE) ||
994 (pup_num >= octets_per_if_num) ||
995 (pup_access_type > ACCESS_TYPE_MULTICAST) ||
996 (if_id > 11) || (access_type > ACCESS_TYPE_MULTICAST)) {
997 DEBUG_TRAINING_IP_ENGINE(
998 DEBUG_LEVEL_ERROR,
999 ("wrong parameter train_cs_type %d edge_comp %d pattern %d direction %d search_dir %d control_element %d result_type %d pup_num %d pup_access_type %d if_id %d access_type %d\n",
1000 train_cs_type, edge_comp, pattern, direction,
1001 search_dir, control_element, result_type, pup_num,
1002 pup_access_type, if_id, access_type));
1003 return MV_FAIL;
1004 }
1005
1006 if (edge_comp == EDGE_FPF) {
1007 start_search = HWS_LOW2HIGH;
1008 end_search = HWS_HIGH2LOW;
1009 edge_comp_used = EDGE_FP;
1010 } else {
1011 start_search = search_dir;
1012 end_search = search_dir;
1013 edge_comp_used = edge_comp;
1014 }
1015
1016 for (search_dir_id = start_search; search_dir_id <= end_search;
1017 search_dir_id++) {
1018 init_value_used = (search_dir_id == HWS_LOW2HIGH) ?
1019 init_value_l2h : init_value_h2l;
1020 DEBUG_TRAINING_IP_ENGINE(
1021 DEBUG_LEVEL_TRACE,
1022 ("dev_num %d, access_type %d, if_id %d, pup_access_type %d,pup_num %d, result_type %d, control_element %d search_dir_id %d, direction %d, interface_mask %d,init_value_used %d, num_iter %d, pattern %d, edge_comp_used %d, train_cs_type %d, cs_num %d\n",
1023 dev_num, access_type, if_id, pup_access_type, pup_num,
1024 result_type, control_element, search_dir_id,
1025 direction, interface_mask, init_value_used, num_iter,
1026 pattern, edge_comp_used, train_cs_type, cs_num));
1027
1028 ddr3_tip_ip_training(dev_num, access_type, if_id,
1029 pup_access_type, pup_num, result_type,
1030 control_element, search_dir_id, direction,
1031 interface_mask, init_value_used, num_iter,
1032 pattern, edge_comp_used, train_cs_type,
1033 cs_num, train_status);
1034 if (access_type == ACCESS_TYPE_MULTICAST) {
1035 start_if = 0;
1036 end_if = MAX_INTERFACE_NUM - 1;
1037 } else {
1038 start_if = if_id;
1039 end_if = if_id;
1040 }
1041
1042 for (interface_num = start_if; interface_num <= end_if;
1043 interface_num++) {
1044 VALIDATE_IF_ACTIVE(tm->if_act_mask, interface_num);
1045 cs_num = 0;
1046 CHECK_STATUS(ddr3_tip_read_training_result
1047 (dev_num, interface_num, pup_access_type,
1048 pup_num, bit_num, search_dir_id,
1049 direction, result_type,
1050 TRAINING_LOAD_OPERATION_UNLOAD,
1051 train_cs_type, NULL, 0, cons_tap,
1052 0));
1053 }
1054 }
1055
1056 return MV_OK;
1057}
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097int ddr3_tip_ip_training_wrapper(u32 dev_num, enum hws_access_type access_type,
1098 u32 if_id,
1099 enum hws_access_type pup_access_type,
1100 u32 pup_num,
1101 enum hws_training_result result_type,
1102 enum hws_control_element control_element,
1103 enum hws_search_dir search_dir,
1104 enum hws_dir direction, u32 interface_mask,
1105 u32 init_value_l2h, u32 init_value_h2l,
1106 u32 num_iter, enum hws_pattern pattern,
1107 enum hws_edge_compare edge_comp,
1108 enum hws_ddr_cs train_cs_type, u32 cs_num,
1109 enum hws_training_ip_stat *train_status)
1110{
1111 u8 e1, e2;
1112 u32 bit_id, start_if, end_if, bit_end = 0;
1113 u32 *result[HWS_SEARCH_DIR_LIMIT] = { 0 };
1114 u8 cons_tap = (direction == OPER_WRITE) ? (64) : (0);
1115 u8 bit_bit_mask[MAX_BUS_NUM] = { 0 }, bit_bit_mask_active = 0;
1116 u8 bit_state[MAX_BUS_NUM * BUS_WIDTH_IN_BITS] = {0};
1117 u8 h2l_adll_value[MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
1118 u8 l2h_adll_value[MAX_BUS_NUM][BUS_WIDTH_IN_BITS];
1119 u8 center_subphy_adll_window[MAX_BUS_NUM];
1120 u8 min_center_subphy_adll[MAX_BUS_NUM];
1121 u8 max_center_subphy_adll[MAX_BUS_NUM];
1122 u32 *l2h_if_train_res = NULL;
1123 u32 *h2l_if_train_res = NULL;
1124 enum hws_search_dir search_dir_id;
1125 int status;
1126 u32 bit_lock_result;
1127
1128 u8 sybphy_id;
1129 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1130 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1131
1132 if (pup_num >= octets_per_if_num) {
1133 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1134 ("pup_num %d not valid\n", pup_num));
1135 }
1136
1137 if (if_id >= MAX_INTERFACE_NUM) {
1138 DEBUG_TRAINING_IP_ENGINE(DEBUG_LEVEL_ERROR,
1139 ("if_id %d not valid\n", if_id));
1140 }
1141
1142 status = ddr3_tip_ip_training_wrapper_int
1143 (dev_num, access_type, if_id, pup_access_type, pup_num,
1144 ALL_BITS_PER_PUP, result_type, control_element,
1145 search_dir, direction, interface_mask, init_value_l2h,
1146 init_value_h2l, num_iter, pattern, edge_comp,
1147 train_cs_type, cs_num, train_status);
1148
1149 if (MV_OK != status)
1150 return status;
1151
1152 if (access_type == ACCESS_TYPE_MULTICAST) {
1153 start_if = 0;
1154 end_if = MAX_INTERFACE_NUM - 1;
1155 } else {
1156 start_if = if_id;
1157 end_if = if_id;
1158 }
1159
1160 for (if_id = start_if; if_id <= end_if; if_id++) {
1161 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1162
1163 bit_bit_mask_active = 0;
1164 memset(bit_state, 0, sizeof(bit_state));
1165
1166 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1167 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1168 if (result_type == RESULT_PER_BIT)
1169 bit_end = BUS_WIDTH_IN_BITS;
1170 else
1171 bit_end = 0;
1172
1173
1174 bit_bit_mask[sybphy_id] = 0;
1175 byte_status[if_id][sybphy_id] = BYTE_NOT_DEFINED;
1176 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1177 h2l_adll_value[sybphy_id][bit_id] = 64;
1178 l2h_adll_value[sybphy_id][bit_id] = 0;
1179 for (search_dir_id = HWS_LOW2HIGH; search_dir_id <= HWS_HIGH2LOW;
1180 search_dir_id++) {
1181 status = ddr3_tip_read_training_result
1182 (dev_num, if_id,
1183 ACCESS_TYPE_UNICAST, sybphy_id, bit_id,
1184 search_dir_id, direction, result_type,
1185 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
1186 &result[search_dir_id], 1, 0, 0);
1187
1188 if (MV_OK != status)
1189 return status;
1190 }
1191
1192 e1 = GET_TAP_RESULT(result[HWS_LOW2HIGH][0], EDGE_1);
1193 e2 = GET_TAP_RESULT(result[HWS_HIGH2LOW][0], EDGE_1);
1194 DEBUG_TRAINING_IP_ENGINE
1195 (DEBUG_LEVEL_INFO,
1196 ("if_id %d sybphy_id %d bit %d l2h 0x%x (e1 0x%x) h2l 0x%x (e2 0x%x)\n",
1197 if_id, sybphy_id, bit_id, result[HWS_LOW2HIGH][0], e1,
1198 result[HWS_HIGH2LOW][0], e2));
1199 bit_lock_result =
1200 (GET_LOCK_RESULT(result[HWS_LOW2HIGH][0]) &&
1201 GET_LOCK_RESULT(result[HWS_HIGH2LOW][0]));
1202
1203 if (bit_lock_result) {
1204
1205 if (direction == OPER_READ) {
1206 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_LOW;
1207 } else if ((e2 - e1) > 32) {
1208
1209 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1210 BIT_SPLIT_OUT;
1211 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_SPLIT_OUT;
1212
1213 bit_bit_mask[sybphy_id] |= (1 << bit_id);
1214 bit_bit_mask_active = 1;
1215 DEBUG_TRAINING_IP_ENGINE
1216 (DEBUG_LEVEL_TRACE,
1217 ("if_id %d sybphy_id %d bit %d BIT_SPLIT_OUT\n",
1218 if_id, sybphy_id, bit_id));
1219 } else {
1220
1221 if (e1 <= 31 && e2 <= 31) {
1222 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1223 BIT_LOW_UI;
1224 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_LOW;
1225 l2h_adll_value[sybphy_id][bit_id] = e1;
1226 h2l_adll_value[sybphy_id][bit_id] = e2;
1227 DEBUG_TRAINING_IP_ENGINE
1228 (DEBUG_LEVEL_TRACE,
1229 ("if_id %d sybphy_id %d bit %d BIT_LOW_UI\n",
1230 if_id, sybphy_id, bit_id));
1231 }
1232
1233 if (e1 >= 32 && e2 >= 32) {
1234 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1235 BIT_HIGH_UI;
1236 byte_status[if_id][sybphy_id] |= BYTE_HOMOGENEOUS_HIGH;
1237 l2h_adll_value[sybphy_id][bit_id] = e1;
1238 h2l_adll_value[sybphy_id][bit_id] = e2;
1239 DEBUG_TRAINING_IP_ENGINE
1240 (DEBUG_LEVEL_TRACE,
1241 ("if_id %d sybphy_id %d bit %d BIT_HIGH_UI\n",
1242 if_id, sybphy_id, bit_id));
1243 }
1244
1245 if (e1 <= 31 && e2 >= 32) {
1246 bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] =
1247 BIT_SPLIT_IN;
1248 byte_status[if_id][sybphy_id] |=
1249 BYTE_HOMOGENEOUS_SPLIT_IN;
1250 l2h_adll_value[sybphy_id][bit_id] = e1;
1251 h2l_adll_value[sybphy_id][bit_id] = e2;
1252 DEBUG_TRAINING_IP_ENGINE
1253 (DEBUG_LEVEL_TRACE,
1254 ("if_id %d sybphy_id %d bit %d BIT_SPLIT_IN\n",
1255 if_id, sybphy_id, bit_id));
1256 }
1257 }
1258 } else {
1259 DEBUG_TRAINING_IP_ENGINE
1260 (DEBUG_LEVEL_INFO,
1261 ("if_id %d sybphy_id %d bit %d l2h 0x%x (e1 0x%x)"
1262 "h2l 0x%x (e2 0x%x): bit cannot be categorized\n",
1263 if_id, sybphy_id, bit_id, result[HWS_LOW2HIGH][0], e1,
1264 result[HWS_HIGH2LOW][0], e2));
1265
1266 byte_status[if_id][sybphy_id] = BYTE_NOT_DEFINED;
1267 break;
1268 }
1269 }
1270 }
1271
1272
1273 if (bit_bit_mask_active != 0) {
1274 l2h_if_train_res = ddr3_tip_get_buf_ptr(dev_num, HWS_LOW2HIGH, result_type, if_id);
1275 h2l_if_train_res = ddr3_tip_get_buf_ptr(dev_num, HWS_HIGH2LOW, result_type, if_id);
1276
1277 ddr3_tip_ip_training
1278 (dev_num, ACCESS_TYPE_UNICAST,
1279 if_id, ACCESS_TYPE_MULTICAST,
1280 PARAM_NOT_CARE, result_type,
1281 control_element, HWS_LOW2HIGH,
1282 direction, interface_mask,
1283 num_iter / 2, num_iter / 2,
1284 pattern, EDGE_FP, train_cs_type,
1285 cs_num, train_status);
1286
1287 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1288 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1289 if (byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) {
1290 if (bit_bit_mask[sybphy_id] == 0)
1291 continue;
1292
1293 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1294 if ((bit_bit_mask[sybphy_id] & (1 << bit_id)) == 0)
1295 continue;
1296
1297
1298 status = ddr3_tip_read_training_result
1299 (dev_num, if_id, ACCESS_TYPE_UNICAST, sybphy_id,
1300 bit_id, HWS_LOW2HIGH, direction, result_type,
1301 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
1302 &l2h_if_train_res, 0, 0, 1);
1303
1304 if (MV_OK != status)
1305 return status;
1306
1307 l2h_adll_value[sybphy_id][bit_id] =
1308 l2h_if_train_res[sybphy_id *
1309 BUS_WIDTH_IN_BITS + bit_id] & PUP_RESULT_EDGE_1_MASK;
1310 }
1311 }
1312 }
1313
1314 ddr3_tip_ip_training
1315 (dev_num, ACCESS_TYPE_UNICAST,
1316 if_id, ACCESS_TYPE_MULTICAST,
1317 PARAM_NOT_CARE, result_type,
1318 control_element, HWS_HIGH2LOW,
1319 direction, interface_mask,
1320 num_iter / 2, num_iter / 2,
1321 pattern, EDGE_FP, train_cs_type,
1322 cs_num, train_status);
1323
1324 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1325 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1326 if (byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) {
1327 if (bit_bit_mask[sybphy_id] == 0)
1328 continue;
1329
1330 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1331 if ((bit_bit_mask[sybphy_id] & (1 << bit_id)) == 0)
1332 continue;
1333
1334 status = ddr3_tip_read_training_result
1335 (dev_num, if_id, ACCESS_TYPE_UNICAST, sybphy_id,
1336 bit_id, HWS_HIGH2LOW, direction, result_type,
1337 TRAINING_LOAD_OPERATION_UNLOAD, CS_SINGLE,
1338 &h2l_if_train_res, 0, cons_tap, 1);
1339
1340 if (MV_OK != status)
1341 return status;
1342
1343 h2l_adll_value[sybphy_id][bit_id] =
1344 h2l_if_train_res[sybphy_id *
1345 BUS_WIDTH_IN_BITS + bit_id] & PUP_RESULT_EDGE_1_MASK;
1346 }
1347 }
1348 }
1349 }
1350
1351
1352
1353
1354 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1355 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1356
1357 if ((byte_status[if_id][sybphy_id] != BYTE_NOT_DEFINED) && (direction == OPER_WRITE)) {
1358
1359 center_subphy_adll_window[sybphy_id] = 0;
1360 max_center_subphy_adll[sybphy_id] = 0;
1361 min_center_subphy_adll[sybphy_id] = 64;
1362
1363 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1364
1365 DEBUG_TRAINING_IP_ENGINE
1366 (DEBUG_LEVEL_TRACE,
1367 ("if_id %d sybphy_id %d bit %d l2h %d h2l %d\n",
1368 if_id, sybphy_id, bit_id, l2h_adll_value[sybphy_id][bit_id],
1369 h2l_adll_value[sybphy_id][bit_id]));
1370
1371 if (((l2h_adll_value[sybphy_id][bit_id] +
1372 h2l_adll_value[sybphy_id][bit_id]) / 2) >
1373 max_center_subphy_adll[sybphy_id])
1374 max_center_subphy_adll[sybphy_id] =
1375 (l2h_adll_value[sybphy_id][bit_id] +
1376 h2l_adll_value[sybphy_id][bit_id]) / 2;
1377 if (((l2h_adll_value[sybphy_id][bit_id] +
1378 h2l_adll_value[sybphy_id][bit_id]) / 2) <
1379 min_center_subphy_adll[sybphy_id])
1380 min_center_subphy_adll[sybphy_id] =
1381 (l2h_adll_value[sybphy_id][bit_id] +
1382 h2l_adll_value[sybphy_id][bit_id]) / 2;
1383 }
1384
1385
1386 center_subphy_adll_window[sybphy_id] =
1387 max_center_subphy_adll[sybphy_id] -
1388 min_center_subphy_adll[sybphy_id];
1389 DEBUG_TRAINING_IP_ENGINE
1390 (DEBUG_LEVEL_TRACE,
1391 ("if_id %d sybphy_id %d min center %d max center %d center %d\n",
1392 if_id, sybphy_id, min_center_subphy_adll[sybphy_id],
1393 max_center_subphy_adll[sybphy_id],
1394 center_subphy_adll_window[sybphy_id]));
1395 }
1396 }
1397
1398
1399
1400
1401
1402 for (sybphy_id = 0; sybphy_id < octets_per_if_num; sybphy_id++) {
1403 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, sybphy_id);
1404 if ((byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_LOW) ||
1405 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_HIGH) ||
1406 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_SPLIT_IN) ||
1407 (byte_status[if_id][sybphy_id] == BYTE_HOMOGENEOUS_SPLIT_OUT) ||
1408 (byte_status[if_id][sybphy_id] == BYTE_NOT_DEFINED))
1409 continue;
1410
1411
1412
1413
1414
1415
1416 if (center_subphy_adll_window[sybphy_id] <= 31)
1417 byte_status[if_id][sybphy_id] = BYTE_HOMOGENEOUS_SPLIT_IN;
1418
1419
1420
1421
1422
1423
1424 if (center_subphy_adll_window[sybphy_id] >= 32) {
1425 byte_status[if_id][sybphy_id] = BYTE_SPLIT_OUT_MIX;
1426
1427 DEBUG_TRAINING_IP_ENGINE
1428 (DEBUG_LEVEL_TRACE,
1429 ("if_id %d sybphy_id %d byte state 0x%x\n",
1430 if_id, sybphy_id, byte_status[if_id][sybphy_id]));
1431 for (bit_id = 0; bit_id < bit_end; bit_id++) {
1432 if (bit_state[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] == BIT_LOW_UI) {
1433 l2h_if_train_res[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] += 64;
1434 h2l_if_train_res[sybphy_id * BUS_WIDTH_IN_BITS + bit_id] += 64;
1435 }
1436 DEBUG_TRAINING_IP_ENGINE
1437 (DEBUG_LEVEL_TRACE,
1438 ("if_id %d sybphy_id %d bit_id %d added 64 adlls\n",
1439 if_id, sybphy_id, bit_id));
1440 }
1441 }
1442 }
1443 }
1444
1445 return MV_OK;
1446}
1447
1448u8 mv_ddr_tip_sub_phy_byte_status_get(u32 if_id, u32 subphy_id)
1449{
1450 return byte_status[if_id][subphy_id];
1451}
1452
1453void mv_ddr_tip_sub_phy_byte_status_set(u32 if_id, u32 subphy_id, u8 byte_status_data)
1454{
1455 byte_status[if_id][subphy_id] = byte_status_data;
1456}
1457
1458
1459
1460
1461int ddr3_tip_load_phy_values(int b_load)
1462{
1463 u32 bus_cnt = 0, if_id, dev_num = 0;
1464 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1465 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1466
1467 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1468 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1469 for (bus_cnt = 0; bus_cnt < octets_per_if_num; bus_cnt++) {
1470 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
1471 if (b_load == 1) {
1472 CHECK_STATUS(ddr3_tip_bus_read
1473 (dev_num, if_id,
1474 ACCESS_TYPE_UNICAST, bus_cnt,
1475 DDR_PHY_DATA,
1476 CTX_PHY_REG(effective_cs),
1477 &phy_reg_bk[if_id][bus_cnt]
1478 [0]));
1479 CHECK_STATUS(ddr3_tip_bus_read
1480 (dev_num, if_id,
1481 ACCESS_TYPE_UNICAST, bus_cnt,
1482 DDR_PHY_DATA,
1483 RL_PHY_REG(effective_cs),
1484 &phy_reg_bk[if_id][bus_cnt]
1485 [1]));
1486 CHECK_STATUS(ddr3_tip_bus_read
1487 (dev_num, if_id,
1488 ACCESS_TYPE_UNICAST, bus_cnt,
1489 DDR_PHY_DATA,
1490 CRX_PHY_REG(effective_cs),
1491 &phy_reg_bk[if_id][bus_cnt]
1492 [2]));
1493 } else {
1494 CHECK_STATUS(ddr3_tip_bus_write
1495 (dev_num, ACCESS_TYPE_UNICAST,
1496 if_id, ACCESS_TYPE_UNICAST,
1497 bus_cnt, DDR_PHY_DATA,
1498 CTX_PHY_REG(effective_cs),
1499 phy_reg_bk[if_id][bus_cnt]
1500 [0]));
1501 CHECK_STATUS(ddr3_tip_bus_write
1502 (dev_num, ACCESS_TYPE_UNICAST,
1503 if_id, ACCESS_TYPE_UNICAST,
1504 bus_cnt, DDR_PHY_DATA,
1505 RL_PHY_REG(effective_cs),
1506 phy_reg_bk[if_id][bus_cnt]
1507 [1]));
1508 CHECK_STATUS(ddr3_tip_bus_write
1509 (dev_num, ACCESS_TYPE_UNICAST,
1510 if_id, ACCESS_TYPE_UNICAST,
1511 bus_cnt, DDR_PHY_DATA,
1512 CRX_PHY_REG(effective_cs),
1513 phy_reg_bk[if_id][bus_cnt]
1514 [2]));
1515 }
1516 }
1517 }
1518
1519 return MV_OK;
1520}
1521
1522int ddr3_tip_training_ip_test(u32 dev_num, enum hws_training_result result_type,
1523 enum hws_search_dir search_dir,
1524 enum hws_dir direction,
1525 enum hws_edge_compare edge,
1526 u32 init_val1, u32 init_val2,
1527 u32 num_of_iterations,
1528 u32 start_pattern, u32 end_pattern)
1529{
1530 u32 pattern, if_id, pup_id;
1531 enum hws_training_ip_stat train_status[MAX_INTERFACE_NUM];
1532 u32 *res = NULL;
1533 u32 search_state = 0;
1534 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1535 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1536
1537 ddr3_tip_load_phy_values(1);
1538
1539 for (pattern = start_pattern; pattern <= end_pattern; pattern++) {
1540 for (search_state = 0; search_state < HWS_SEARCH_DIR_LIMIT;
1541 search_state++) {
1542 ddr3_tip_ip_training_wrapper(dev_num,
1543 ACCESS_TYPE_MULTICAST, 0,
1544 ACCESS_TYPE_MULTICAST, 0,
1545 result_type,
1546 HWS_CONTROL_ELEMENT_ADLL,
1547 search_dir, direction,
1548 0xfff, init_val1,
1549 init_val2,
1550 num_of_iterations, pattern,
1551 edge, CS_SINGLE,
1552 PARAM_NOT_CARE,
1553 train_status);
1554
1555 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
1556 if_id++) {
1557 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1558 for (pup_id = 0; pup_id <
1559 octets_per_if_num;
1560 pup_id++) {
1561 VALIDATE_BUS_ACTIVE(tm->bus_act_mask,
1562 pup_id);
1563 CHECK_STATUS
1564 (ddr3_tip_read_training_result
1565 (dev_num, if_id,
1566 ACCESS_TYPE_UNICAST, pup_id,
1567 ALL_BITS_PER_PUP,
1568 search_state,
1569 direction, result_type,
1570 TRAINING_LOAD_OPERATION_UNLOAD,
1571 CS_SINGLE, &res, 1, 0,
1572 0));
1573 if (result_type == RESULT_PER_BYTE) {
1574 DEBUG_TRAINING_IP_ENGINE
1575 (DEBUG_LEVEL_INFO,
1576 ("search_state %d if_id %d pup_id %d 0x%x\n",
1577 search_state, if_id,
1578 pup_id, res[0]));
1579 } else {
1580 DEBUG_TRAINING_IP_ENGINE
1581 (DEBUG_LEVEL_INFO,
1582 ("search_state %d if_id %d pup_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1583 search_state, if_id,
1584 pup_id, res[0],
1585 res[1], res[2],
1586 res[3], res[4],
1587 res[5], res[6],
1588 res[7]));
1589 }
1590 }
1591 }
1592 }
1593 }
1594
1595 ddr3_tip_load_phy_values(0);
1596
1597 return MV_OK;
1598}
1599
1600int mv_ddr_pattern_start_addr_set(struct pattern_info *pattern_tbl, enum hws_pattern pattern, u32 addr)
1601{
1602 pattern_tbl[pattern].start_addr = addr;
1603
1604 return 0;
1605}
1606
1607struct pattern_info *ddr3_tip_get_pattern_table()
1608{
1609 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1610
1611 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask))
1612 return pattern_table_64;
1613 else if (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) == 0)
1614 return pattern_table_32;
1615 else
1616 return pattern_table_16;
1617}
1618
1619u16 *ddr3_tip_get_mask_results_dq_reg()
1620{
1621#if MAX_BUS_NUM == 5
1622 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1623
1624 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1625 return mask_results_dq_reg_map_pup3_ecc;
1626 else
1627#endif
1628 return mask_results_dq_reg_map;
1629}
1630
1631u16 *ddr3_tip_get_mask_results_pup_reg_map()
1632{
1633#if MAX_BUS_NUM == 5
1634 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1635
1636 if (DDR3_IS_ECC_PUP3_MODE(tm->bus_act_mask))
1637 return mask_results_pup_reg_map_pup3_ecc;
1638 else
1639#endif
1640 return mask_results_pup_reg_map;
1641}
1642
1643
1644#define LOW_NIBBLE_BYTE_MASK 0xf
1645#define HIGH_NIBBLE_BYTE_MASK 0xf0
1646int mv_ddr_load_dm_pattern_to_odpg(enum hws_access_type access_type, enum hws_pattern pattern,
1647 enum dm_direction dm_dir)
1648{
1649 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
1650 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1651 u32 pattern_len = 0;
1652 u32 data_low, data_high;
1653 u8 dm_data;
1654
1655 for (pattern_len = 0;
1656 pattern_len < pattern_table[pattern].pattern_len;
1657 pattern_len++) {
1658 if (MV_DDR_IS_64BIT_DRAM_MODE(tm->bus_act_mask)) {
1659 data_low = pattern_table_get_word(0, pattern, (u8)pattern_len);
1660 data_high = data_low;
1661 } else {
1662 data_low = pattern_table_get_word(0, pattern, (u8)(pattern_len * 2));
1663 data_high = pattern_table_get_word(0, pattern, (u8)(pattern_len * 2 + 1));
1664 }
1665
1666
1667 if (dm_dir == DM_DIR_INVERSE)
1668 dm_data = ~((data_low & LOW_NIBBLE_BYTE_MASK) | (data_high & HIGH_NIBBLE_BYTE_MASK));
1669 else
1670 dm_data = (data_low & LOW_NIBBLE_BYTE_MASK) | (data_high & HIGH_NIBBLE_BYTE_MASK);
1671
1672 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_DATA_LOW_REG, data_low, MASK_ALL_BITS);
1673 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_DATA_HIGH_REG, data_high, MASK_ALL_BITS);
1674 ddr3_tip_if_write(0, access_type, 0, ODPG_DATA_WR_ADDR_REG,
1675 pattern_len | ((dm_data & ODPG_DATA_WR_DATA_MASK) << ODPG_DATA_WR_DATA_OFFS),
1676 MASK_ALL_BITS);
1677 }
1678
1679 return MV_OK;
1680}
1681