1
2
3
4
5
6#include "ddr3_init.h"
7#include "mv_ddr_common.h"
8#include "xor_regs.h"
9
10
11#ifdef MV_DEBUG
12#define DB(x) x
13#else
14#define DB(x)
15#endif
16
17static u32 ui_xor_regs_ctrl_backup;
18static u32 ui_xor_regs_base_backup[MAX_CS_NUM + 1];
19static u32 ui_xor_regs_mask_backup[MAX_CS_NUM + 1];
20
21void mv_sys_xor_init(u32 num_of_cs, u32 cs_ena, uint64_t cs_size, u32 base_delta)
22{
23 u32 reg, ui, cs_count;
24 uint64_t base, size_mask;
25
26 ui_xor_regs_ctrl_backup = reg_read(XOR_WINDOW_CTRL_REG(0, 0));
27 for (ui = 0; ui < MAX_CS_NUM + 1; ui++)
28 ui_xor_regs_base_backup[ui] =
29 reg_read(XOR_BASE_ADDR_REG(0, ui));
30 for (ui = 0; ui < MAX_CS_NUM + 1; ui++)
31 ui_xor_regs_mask_backup[ui] =
32 reg_read(XOR_SIZE_MASK_REG(0, ui));
33
34 reg = 0;
35 for (ui = 0, cs_count = 0;
36 (cs_count < num_of_cs) && (ui < 8);
37 ui++, cs_count++) {
38 if (cs_ena & (1 << ui)) {
39
40 reg |= (0x1 << (ui));
41
42 reg |= (0x3 << ((ui * 2) + 16));
43 }
44 }
45
46 reg_write(XOR_WINDOW_CTRL_REG(0, 0), reg);
47
48 cs_count = 0;
49 for (ui = 0, cs_count = 0;
50 (cs_count < num_of_cs) && (ui < 8);
51 ui++, cs_count++) {
52 if (cs_ena & (1 << ui)) {
53
54
55
56
57 base = cs_size * ui + base_delta;
58
59 size_mask = 0x7FFF0000;
60 switch (ui) {
61 case 0:
62 base |= 0xe00;
63 break;
64 case 1:
65 base |= 0xd00;
66 break;
67 case 2:
68 base |= 0xb00;
69 break;
70 case 3:
71 base |= 0x700;
72 break;
73 case 4:
74 base = 0x40000000;
75
76 base |= 0x1F00;
77 size_mask = 0xF0000;
78 break;
79 }
80
81 reg_write(XOR_BASE_ADDR_REG(0, ui), (u32)base);
82 size_mask = (cs_size / _64K) - 1;
83 size_mask = (size_mask << XESMRX_SIZE_MASK_OFFS) & XESMRX_SIZE_MASK_MASK;
84
85 reg_write(XOR_SIZE_MASK_REG(0, ui), (u32)size_mask);
86 }
87 }
88
89 mv_xor_hal_init(1);
90
91 return;
92}
93
94void mv_sys_xor_finish(void)
95{
96 u32 ui;
97
98 reg_write(XOR_WINDOW_CTRL_REG(0, 0), ui_xor_regs_ctrl_backup);
99 for (ui = 0; ui < MAX_CS_NUM + 1; ui++)
100 reg_write(XOR_BASE_ADDR_REG(0, ui),
101 ui_xor_regs_base_backup[ui]);
102 for (ui = 0; ui < MAX_CS_NUM + 1; ui++)
103 reg_write(XOR_SIZE_MASK_REG(0, ui),
104 ui_xor_regs_mask_backup[ui]);
105
106 reg_write(XOR_ADDR_OVRD_REG(0, 0), 0);
107}
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123void mv_xor_hal_init(u32 xor_chan_num)
124{
125 u32 i;
126
127
128 for (i = 0; i < xor_chan_num; i++) {
129 mv_xor_command_set(i, MV_STOP);
130 mv_xor_ctrl_set(i, (1 << XEXCR_REG_ACC_PROTECT_OFFS) |
131 (4 << XEXCR_DST_BURST_LIMIT_OFFS) |
132 (4 << XEXCR_SRC_BURST_LIMIT_OFFS));
133 }
134}
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151int mv_xor_ctrl_set(u32 chan, u32 xor_ctrl)
152{
153 u32 old_value;
154
155
156 old_value = reg_read(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan))) &
157 XEXCR_OPERATION_MODE_MASK;
158 xor_ctrl &= ~XEXCR_OPERATION_MODE_MASK;
159 xor_ctrl |= old_value;
160 reg_write(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan)), xor_ctrl);
161
162 return MV_OK;
163}
164
165int mv_xor_mem_init(u32 chan, u32 start_ptr, unsigned long long block_size,
166 u32 init_val_high, u32 init_val_low)
167{
168 u32 temp;
169
170 if (block_size == _4G)
171 block_size -= 1;
172
173
174 if (chan >= MV_XOR_MAX_CHAN)
175 return MV_BAD_PARAM;
176
177 if (MV_ACTIVE == mv_xor_state_get(chan))
178 return MV_BUSY;
179
180 if ((block_size < XEXBSR_BLOCK_SIZE_MIN_VALUE) ||
181 (block_size > XEXBSR_BLOCK_SIZE_MAX_VALUE))
182 return MV_BAD_PARAM;
183
184
185 temp = reg_read(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan)));
186 temp &= ~XEXCR_OPERATION_MODE_MASK;
187 temp |= XEXCR_OPERATION_MODE_MEM_INIT;
188 reg_write(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan)), temp);
189
190
191
192
193
194 reg_write(XOR_DST_PTR_REG(XOR_UNIT(chan), XOR_CHAN(chan)), start_ptr);
195
196
197
198
199
200 reg_write(XOR_BLOCK_SIZE_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
201 block_size);
202
203
204
205
206
207 reg_write(XOR_INIT_VAL_LOW_REG(XOR_UNIT(chan)), init_val_low);
208
209
210
211
212
213 reg_write(XOR_INIT_VAL_HIGH_REG(XOR_UNIT(chan)), init_val_high);
214
215
216 reg_bit_set(XOR_ACTIVATION_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
217 XEXACTR_XESTART_MASK);
218
219 return MV_OK;
220}
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242enum mv_state mv_xor_state_get(u32 chan)
243{
244 u32 state;
245
246
247 if (chan >= MV_XOR_MAX_CHAN) {
248 DB(printf("%s: ERR. Invalid chan num %d\n", __func__, chan));
249 return MV_UNDEFINED_STATE;
250 }
251
252
253 state = reg_read(XOR_ACTIVATION_REG(XOR_UNIT(chan), XOR_CHAN(chan)));
254 state &= XEXACTR_XESTATUS_MASK;
255
256
257 switch (state) {
258 case XEXACTR_XESTATUS_IDLE:
259 return MV_IDLE;
260 case XEXACTR_XESTATUS_ACTIVE:
261 return MV_ACTIVE;
262 case XEXACTR_XESTATUS_PAUSED:
263 return MV_PAUSED;
264 }
265
266 return MV_UNDEFINED_STATE;
267}
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290int mv_xor_command_set(u32 chan, enum mv_command command)
291{
292 enum mv_state state;
293
294
295 if (chan >= MV_XOR_MAX_CHAN) {
296 DB(printf("%s: ERR. Invalid chan num %d\n", __func__, chan));
297 return MV_BAD_PARAM;
298 }
299
300
301 state = mv_xor_state_get(chan);
302
303 if ((command == MV_START) && (state == MV_IDLE)) {
304
305 reg_bit_set(XOR_ACTIVATION_REG
306 (XOR_UNIT(chan), XOR_CHAN(chan)),
307 XEXACTR_XESTART_MASK);
308 return MV_OK;
309 } else if ((command == MV_STOP) && (state == MV_ACTIVE)) {
310
311 reg_bit_set(XOR_ACTIVATION_REG
312 (XOR_UNIT(chan), XOR_CHAN(chan)),
313 XEXACTR_XESTOP_MASK);
314 return MV_OK;
315 } else if (((enum mv_state)command == MV_PAUSED) &&
316 (state == MV_ACTIVE)) {
317
318 reg_bit_set(XOR_ACTIVATION_REG
319 (XOR_UNIT(chan), XOR_CHAN(chan)),
320 XEXACTR_XEPAUSE_MASK);
321 return MV_OK;
322 } else if ((command == MV_RESTART) && (state == MV_PAUSED)) {
323
324 reg_bit_set(XOR_ACTIVATION_REG
325 (XOR_UNIT(chan), XOR_CHAN(chan)),
326 XEXACTR_XERESTART_MASK);
327 return MV_OK;
328 } else if ((command == MV_STOP) && (state == MV_IDLE)) {
329
330 return MV_OK;
331 }
332
333
334 DB(printf("%s: ERR. Illegal command\n", __func__));
335
336 return MV_BAD_PARAM;
337}
338
339void ddr3_new_tip_ecc_scrub(void)
340{
341 u32 cs_c, max_cs;
342 u32 cs_ena = 0;
343 uint64_t total_mem_size, cs_mem_size_mb = 0, cs_mem_size = 0;
344
345 printf("DDR Training Sequence - Start scrubbing\n");
346 max_cs = mv_ddr_cs_num_get();
347 for (cs_c = 0; cs_c < max_cs; cs_c++)
348 cs_ena |= 1 << cs_c;
349
350
351 ddr3_calc_mem_cs_size(0, &cs_mem_size_mb);
352 cs_mem_size = cs_mem_size_mb * _1M;
353 mv_sys_xor_init(max_cs, cs_ena, cs_mem_size, 0);
354 total_mem_size = max_cs * cs_mem_size;
355 mv_xor_mem_init(0, 0, total_mem_size, 0xdeadbeef, 0xdeadbeef);
356
357 while (mv_xor_state_get(0) != MV_IDLE)
358 ;
359
360 mv_sys_xor_finish();
361
362 printf("DDR3 Training Sequence - End scrubbing\n");
363}
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396int mv_xor_transfer(u32 chan, enum xor_type type, u32 xor_chain_ptr)
397{
398 u32 temp;
399
400
401 if (chan >= MV_XOR_MAX_CHAN) {
402 DB(printf("%s: ERR. Invalid chan num %d\n", __func__, chan));
403 return MV_BAD_PARAM;
404 }
405 if (mv_xor_state_get(chan) == MV_ACTIVE) {
406 DB(printf("%s: ERR. Channel is already active\n", __func__));
407 return MV_BUSY;
408 }
409 if (xor_chain_ptr == 0x0) {
410 DB(printf("%s: ERR. xor_chain_ptr is NULL pointer\n", __func__));
411 return MV_BAD_PARAM;
412 }
413
414
415 temp = reg_read(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan)));
416 temp &= ~XEXCR_OPERATION_MODE_MASK;
417
418 switch (type) {
419 case MV_XOR:
420 if ((xor_chain_ptr & XEXDPR_DST_PTR_XOR_MASK) != 0) {
421 DB(printf("%s: ERR. Invalid chain pointer (bits [5:0] must be cleared)\n",
422 __func__));
423 return MV_BAD_PARAM;
424 }
425
426 temp |= XEXCR_OPERATION_MODE_XOR;
427 break;
428 case MV_DMA:
429 if ((xor_chain_ptr & XEXDPR_DST_PTR_DMA_MASK) != 0) {
430 DB(printf("%s: ERR. Invalid chain pointer (bits [4:0] must be cleared)\n",
431 __func__));
432 return MV_BAD_PARAM;
433 }
434
435 temp |= XEXCR_OPERATION_MODE_DMA;
436 break;
437 case MV_CRC32:
438 if ((xor_chain_ptr & XEXDPR_DST_PTR_CRC_MASK) != 0) {
439 DB(printf("%s: ERR. Invalid chain pointer (bits [4:0] must be cleared)\n",
440 __func__));
441 return MV_BAD_PARAM;
442 }
443
444 temp |= XEXCR_OPERATION_MODE_CRC;
445 break;
446 default:
447 return MV_BAD_PARAM;
448 }
449
450
451 reg_write(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan)), temp);
452
453
454
455
456 reg_write(XOR_NEXT_DESC_PTR_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
457 xor_chain_ptr);
458
459
460 reg_bit_set(XOR_ACTIVATION_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
461 XEXACTR_XESTART_MASK);
462
463 return MV_OK;
464}
465