1
2
3
4
5
6
7#include <common.h>
8#include <i2c.h>
9#include <spl.h>
10#include <asm/io.h>
11#include <asm/arch/cpu.h>
12#include <asm/arch/soc.h>
13
14#include "xor.h"
15#include "xor_regs.h"
16
17static u32 xor_regs_ctrl_backup;
18static u32 xor_regs_base_backup[MAX_CS];
19static u32 xor_regs_mask_backup[MAX_CS];
20
21static int mv_xor_cmd_set(u32 chan, int command);
22static int mv_xor_ctrl_set(u32 chan, u32 xor_ctrl);
23
24void mv_sys_xor_init(MV_DRAM_INFO *dram_info)
25{
26 u32 reg, ui, base, cs_count;
27
28 xor_regs_ctrl_backup = reg_read(XOR_WINDOW_CTRL_REG(0, 0));
29 for (ui = 0; ui < MAX_CS; ui++)
30 xor_regs_base_backup[ui] = reg_read(XOR_BASE_ADDR_REG(0, ui));
31 for (ui = 0; ui < MAX_CS; ui++)
32 xor_regs_mask_backup[ui] = reg_read(XOR_SIZE_MASK_REG(0, ui));
33
34 reg = 0;
35 for (ui = 0; ui < (dram_info->num_cs + 1); ui++) {
36
37 reg |= (0x1 << (ui));
38
39 reg |= (0x3 << ((ui * 2) + 16));
40 }
41
42 reg_write(XOR_WINDOW_CTRL_REG(0, 0), reg);
43
44
45 base = (SRAM_BASE & 0xFFFF0000) | 0x1E00;
46 reg_write(XOR_BASE_ADDR_REG(0, dram_info->num_cs), base);
47
48 reg_write(XOR_SIZE_MASK_REG(0, dram_info->num_cs), 0x03FF0000);
49
50 cs_count = 0;
51 for (ui = 0; ui < MAX_CS; ui++) {
52 if (dram_info->cs_ena & (1 << ui)) {
53
54
55
56 base = 0;
57 switch (ui) {
58 case 0:
59 base |= 0xE00;
60 break;
61 case 1:
62 base |= 0xD00;
63 break;
64 case 2:
65 base |= 0xB00;
66 break;
67 case 3:
68 base |= 0x700;
69 break;
70 }
71
72 reg_write(XOR_BASE_ADDR_REG(0, cs_count), base);
73
74
75 reg_write(XOR_SIZE_MASK_REG(0, cs_count), 0x0FFF0000);
76 cs_count++;
77 }
78 }
79
80 mv_xor_hal_init(1);
81
82 return;
83}
84
85void mv_sys_xor_finish(void)
86{
87 u32 ui;
88
89 reg_write(XOR_WINDOW_CTRL_REG(0, 0), xor_regs_ctrl_backup);
90 for (ui = 0; ui < MAX_CS; ui++)
91 reg_write(XOR_BASE_ADDR_REG(0, ui), xor_regs_base_backup[ui]);
92 for (ui = 0; ui < MAX_CS; ui++)
93 reg_write(XOR_SIZE_MASK_REG(0, ui), xor_regs_mask_backup[ui]);
94
95 reg_write(XOR_ADDR_OVRD_REG(0, 0), 0);
96}
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112void mv_xor_hal_init(u32 chan_num)
113{
114 u32 i;
115
116
117 for (i = 0; i < chan_num; i++) {
118 mv_xor_cmd_set(i, MV_STOP);
119 mv_xor_ctrl_set(i, (1 << XEXCR_REG_ACC_PROTECT_OFFS) |
120 (4 << XEXCR_DST_BURST_LIMIT_OFFS) |
121 (4 << XEXCR_SRC_BURST_LIMIT_OFFS));
122 }
123}
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141static int mv_xor_ctrl_set(u32 chan, u32 xor_ctrl)
142{
143 u32 val;
144
145
146 val = reg_read(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan)))
147 & XEXCR_OPERATION_MODE_MASK;
148 xor_ctrl &= ~XEXCR_OPERATION_MODE_MASK;
149 xor_ctrl |= val;
150 reg_write(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan)), xor_ctrl);
151
152 return MV_OK;
153}
154
155int mv_xor_mem_init(u32 chan, u32 start_ptr, u32 block_size, u32 init_val_high,
156 u32 init_val_low)
157{
158 u32 tmp;
159
160
161 if (chan >= MV_XOR_MAX_CHAN)
162 return MV_BAD_PARAM;
163
164 if (MV_ACTIVE == mv_xor_state_get(chan))
165 return MV_BUSY;
166
167 if ((block_size < XEXBSR_BLOCK_SIZE_MIN_VALUE) ||
168 (block_size > XEXBSR_BLOCK_SIZE_MAX_VALUE))
169 return MV_BAD_PARAM;
170
171
172 tmp = reg_read(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan)));
173 tmp &= ~XEXCR_OPERATION_MODE_MASK;
174 tmp |= XEXCR_OPERATION_MODE_MEM_INIT;
175 reg_write(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan)), tmp);
176
177
178
179
180
181 reg_write(XOR_DST_PTR_REG(XOR_UNIT(chan), XOR_CHAN(chan)), start_ptr);
182
183
184
185
186
187 reg_write(XOR_BLOCK_SIZE_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
188 block_size);
189
190
191
192
193
194 reg_write(XOR_INIT_VAL_LOW_REG(XOR_UNIT(chan)), init_val_low);
195
196
197
198
199
200 reg_write(XOR_INIT_VAL_HIGH_REG(XOR_UNIT(chan)), init_val_high);
201
202
203 reg_bit_set(XOR_ACTIVATION_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
204 XEXACTR_XESTART_MASK);
205
206 return MV_OK;
207}
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241int mv_xor_transfer(u32 chan, int xor_type, u32 xor_chain_ptr)
242{
243 u32 tmp;
244
245
246 if (chan >= MV_XOR_MAX_CHAN) {
247 debug("%s: ERR. Invalid chan num %d\n", __func__, chan);
248 return MV_BAD_PARAM;
249 }
250
251 if (MV_ACTIVE == mv_xor_state_get(chan)) {
252 debug("%s: ERR. Channel is already active\n", __func__);
253 return MV_BUSY;
254 }
255
256 if (0x0 == xor_chain_ptr) {
257 debug("%s: ERR. xor_chain_ptr is NULL pointer\n", __func__);
258 return MV_BAD_PARAM;
259 }
260
261
262 tmp = reg_read(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan)));
263 tmp &= ~XEXCR_OPERATION_MODE_MASK;
264
265 switch (xor_type) {
266 case MV_XOR:
267 if (0 != (xor_chain_ptr & XEXDPR_DST_PTR_XOR_MASK)) {
268 debug("%s: ERR. Invalid chain pointer (bits [5:0] must be cleared)\n",
269 __func__);
270 return MV_BAD_PARAM;
271 }
272
273
274 tmp |= XEXCR_OPERATION_MODE_XOR;
275 break;
276
277 case MV_DMA:
278 if (0 != (xor_chain_ptr & XEXDPR_DST_PTR_DMA_MASK)) {
279 debug("%s: ERR. Invalid chain pointer (bits [4:0] must be cleared)\n",
280 __func__);
281 return MV_BAD_PARAM;
282 }
283
284
285 tmp |= XEXCR_OPERATION_MODE_DMA;
286 break;
287
288 case MV_CRC32:
289 if (0 != (xor_chain_ptr & XEXDPR_DST_PTR_CRC_MASK)) {
290 debug("%s: ERR. Invalid chain pointer (bits [4:0] must be cleared)\n",
291 __func__);
292 return MV_BAD_PARAM;
293 }
294
295
296 tmp |= XEXCR_OPERATION_MODE_CRC;
297 break;
298
299 default:
300 return MV_BAD_PARAM;
301 }
302
303
304 reg_write(XOR_CONFIG_REG(XOR_UNIT(chan), XOR_CHAN(chan)), tmp);
305
306
307
308
309
310 reg_write(XOR_NEXT_DESC_PTR_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
311 xor_chain_ptr);
312
313
314 reg_bit_set(XOR_ACTIVATION_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
315 XEXACTR_XESTART_MASK);
316
317 return MV_OK;
318}
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341int mv_xor_state_get(u32 chan)
342{
343 u32 state;
344
345
346 if (chan >= MV_XOR_MAX_CHAN) {
347 debug("%s: ERR. Invalid chan num %d\n", __func__, chan);
348 return MV_UNDEFINED_STATE;
349 }
350
351
352 state = reg_read(XOR_ACTIVATION_REG(XOR_UNIT(chan), XOR_CHAN(chan)));
353 state &= XEXACTR_XESTATUS_MASK;
354
355
356 switch (state) {
357 case XEXACTR_XESTATUS_IDLE:
358 return MV_IDLE;
359 case XEXACTR_XESTATUS_ACTIVE:
360 return MV_ACTIVE;
361 case XEXACTR_XESTATUS_PAUSED:
362 return MV_PAUSED;
363 }
364
365 return MV_UNDEFINED_STATE;
366}
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390static int mv_xor_cmd_set(u32 chan, int command)
391{
392 int state;
393
394
395 if (chan >= MV_XOR_MAX_CHAN) {
396 debug("%s: ERR. Invalid chan num %d\n", __func__, chan);
397 return MV_BAD_PARAM;
398 }
399
400
401 state = mv_xor_state_get(chan);
402
403
404 if ((command == MV_START) && (state == MV_IDLE)) {
405 reg_bit_set(XOR_ACTIVATION_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
406 XEXACTR_XESTART_MASK);
407 return MV_OK;
408 }
409
410 else if ((command == MV_STOP) && (state == MV_ACTIVE)) {
411 reg_bit_set(XOR_ACTIVATION_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
412 XEXACTR_XESTOP_MASK);
413 return MV_OK;
414 }
415
416 else if ((command == MV_PAUSED) && (state == MV_ACTIVE)) {
417 reg_bit_set(XOR_ACTIVATION_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
418 XEXACTR_XEPAUSE_MASK);
419 return MV_OK;
420 }
421
422 else if ((command == MV_RESTART) && (state == MV_PAUSED)) {
423 reg_bit_set(XOR_ACTIVATION_REG(XOR_UNIT(chan), XOR_CHAN(chan)),
424 XEXACTR_XERESTART_MASK);
425 return MV_OK;
426 }
427
428 else if ((command == MV_STOP) && (state == MV_IDLE))
429 return MV_OK;
430
431
432 debug("%s: ERR. Illegal command\n", __func__);
433
434 return MV_BAD_PARAM;
435}
436