1
2
3
4
5
6
7
8
9
10
11#include <linux/errno.h>
12#include <linux/kernel.h>
13#include <linux/types.h>
14
15#include "cryp_p.h"
16#include "cryp.h"
17
18
19
20
21void cryp_wait_until_done(struct cryp_device_data *device_data)
22{
23 while (cryp_is_logic_busy(device_data))
24 cpu_relax();
25}
26
27
28
29
30
31int cryp_check(struct cryp_device_data *device_data)
32{
33 int peripheralid2 = 0;
34
35 if (NULL == device_data)
36 return -EINVAL;
37
38 peripheralid2 = readl_relaxed(&device_data->base->periphId2);
39
40 if (peripheralid2 != CRYP_PERIPHERAL_ID2_DB8500)
41 return -EPERM;
42
43
44 if ((CRYP_PERIPHERAL_ID0 ==
45 readl_relaxed(&device_data->base->periphId0))
46 && (CRYP_PERIPHERAL_ID1 ==
47 readl_relaxed(&device_data->base->periphId1))
48 && (CRYP_PERIPHERAL_ID3 ==
49 readl_relaxed(&device_data->base->periphId3))
50 && (CRYP_PCELL_ID0 ==
51 readl_relaxed(&device_data->base->pcellId0))
52 && (CRYP_PCELL_ID1 ==
53 readl_relaxed(&device_data->base->pcellId1))
54 && (CRYP_PCELL_ID2 ==
55 readl_relaxed(&device_data->base->pcellId2))
56 && (CRYP_PCELL_ID3 ==
57 readl_relaxed(&device_data->base->pcellId3))) {
58 return 0;
59 }
60
61 return -EPERM;
62}
63
64
65
66
67
68
69void cryp_activity(struct cryp_device_data *device_data,
70 enum cryp_crypen cryp_crypen)
71{
72 CRYP_PUT_BITS(&device_data->base->cr,
73 cryp_crypen,
74 CRYP_CR_CRYPEN_POS,
75 CRYP_CR_CRYPEN_MASK);
76}
77
78
79
80
81
82void cryp_flush_inoutfifo(struct cryp_device_data *device_data)
83{
84
85
86
87
88
89
90 cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
91 cryp_wait_until_done(device_data);
92
93 CRYP_SET_BITS(&device_data->base->cr, CRYP_CR_FFLUSH_MASK);
94
95
96
97
98
99 while (readl_relaxed(&device_data->base->sr) !=
100 CRYP_SR_INFIFO_READY_MASK)
101 cpu_relax();
102}
103
104
105
106
107
108
109
110int cryp_set_configuration(struct cryp_device_data *device_data,
111 struct cryp_config *cryp_config,
112 u32 *control_register)
113{
114 u32 cr_for_kse;
115
116 if (NULL == device_data || NULL == cryp_config)
117 return -EINVAL;
118
119 *control_register |= (cryp_config->keysize << CRYP_CR_KEYSIZE_POS);
120
121
122 if ((CRYP_ALGORITHM_DECRYPT == cryp_config->algodir) &&
123 ((CRYP_ALGO_AES_ECB == cryp_config->algomode) ||
124 (CRYP_ALGO_AES_CBC == cryp_config->algomode))) {
125 cr_for_kse = *control_register;
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142 cr_for_kse |= ((CRYP_ALGORITHM_ENCRYPT << CRYP_CR_ALGODIR_POS) |
143 (CRYP_ALGO_AES_ECB << CRYP_CR_ALGOMODE_POS) |
144 (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS) |
145 (KSE_ENABLED << CRYP_CR_KSE_POS));
146
147 writel_relaxed(cr_for_kse, &device_data->base->cr);
148 cryp_wait_until_done(device_data);
149 }
150
151 *control_register |=
152 ((cryp_config->algomode << CRYP_CR_ALGOMODE_POS) |
153 (cryp_config->algodir << CRYP_CR_ALGODIR_POS));
154
155 return 0;
156}
157
158
159
160
161
162
163
164int cryp_configure_protection(struct cryp_device_data *device_data,
165 struct cryp_protection_config *p_protect_config)
166{
167 if (NULL == p_protect_config)
168 return -EINVAL;
169
170 CRYP_WRITE_BIT(&device_data->base->cr,
171 (u32) p_protect_config->secure_access,
172 CRYP_CR_SECURE_MASK);
173 CRYP_PUT_BITS(&device_data->base->cr,
174 p_protect_config->privilege_access,
175 CRYP_CR_PRLG_POS,
176 CRYP_CR_PRLG_MASK);
177
178 return 0;
179}
180
181
182
183
184
185int cryp_is_logic_busy(struct cryp_device_data *device_data)
186{
187 return CRYP_TEST_BITS(&device_data->base->sr,
188 CRYP_SR_BUSY_MASK);
189}
190
191
192
193
194
195
196void cryp_configure_for_dma(struct cryp_device_data *device_data,
197 enum cryp_dma_req_type dma_req)
198{
199 CRYP_SET_BITS(&device_data->base->dmacr,
200 (u32) dma_req);
201}
202
203
204
205
206
207
208
209int cryp_configure_key_values(struct cryp_device_data *device_data,
210 enum cryp_key_reg_index key_reg_index,
211 struct cryp_key_value key_value)
212{
213 while (cryp_is_logic_busy(device_data))
214 cpu_relax();
215
216 switch (key_reg_index) {
217 case CRYP_KEY_REG_1:
218 writel_relaxed(key_value.key_value_left,
219 &device_data->base->key_1_l);
220 writel_relaxed(key_value.key_value_right,
221 &device_data->base->key_1_r);
222 break;
223 case CRYP_KEY_REG_2:
224 writel_relaxed(key_value.key_value_left,
225 &device_data->base->key_2_l);
226 writel_relaxed(key_value.key_value_right,
227 &device_data->base->key_2_r);
228 break;
229 case CRYP_KEY_REG_3:
230 writel_relaxed(key_value.key_value_left,
231 &device_data->base->key_3_l);
232 writel_relaxed(key_value.key_value_right,
233 &device_data->base->key_3_r);
234 break;
235 case CRYP_KEY_REG_4:
236 writel_relaxed(key_value.key_value_left,
237 &device_data->base->key_4_l);
238 writel_relaxed(key_value.key_value_right,
239 &device_data->base->key_4_r);
240 break;
241 default:
242 return -EINVAL;
243 }
244
245 return 0;
246}
247
248
249
250
251
252
253
254int cryp_configure_init_vector(struct cryp_device_data *device_data,
255 enum cryp_init_vector_index
256 init_vector_index,
257 struct cryp_init_vector_value
258 init_vector_value)
259{
260 while (cryp_is_logic_busy(device_data))
261 cpu_relax();
262
263 switch (init_vector_index) {
264 case CRYP_INIT_VECTOR_INDEX_0:
265 writel_relaxed(init_vector_value.init_value_left,
266 &device_data->base->init_vect_0_l);
267 writel_relaxed(init_vector_value.init_value_right,
268 &device_data->base->init_vect_0_r);
269 break;
270 case CRYP_INIT_VECTOR_INDEX_1:
271 writel_relaxed(init_vector_value.init_value_left,
272 &device_data->base->init_vect_1_l);
273 writel_relaxed(init_vector_value.init_value_right,
274 &device_data->base->init_vect_1_r);
275 break;
276 default:
277 return -EINVAL;
278 }
279
280 return 0;
281}
282
283
284
285
286
287
288
289
290void cryp_save_device_context(struct cryp_device_data *device_data,
291 struct cryp_device_context *ctx,
292 int cryp_mode)
293{
294 enum cryp_algo_mode algomode;
295 struct cryp_register __iomem *src_reg = device_data->base;
296 struct cryp_config *config =
297 (struct cryp_config *)device_data->current_ctx;
298
299
300
301
302
303 cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
304 cryp_wait_until_done(device_data);
305
306 if (cryp_mode == CRYP_MODE_DMA)
307 cryp_configure_for_dma(device_data, CRYP_DMA_DISABLE_BOTH);
308
309 if (CRYP_TEST_BITS(&src_reg->sr, CRYP_SR_IFEM_MASK) == 0)
310 ctx->din = readl_relaxed(&src_reg->din);
311
312 ctx->cr = readl_relaxed(&src_reg->cr) & CRYP_CR_CONTEXT_SAVE_MASK;
313
314 switch (config->keysize) {
315 case CRYP_KEY_SIZE_256:
316 ctx->key_4_l = readl_relaxed(&src_reg->key_4_l);
317 ctx->key_4_r = readl_relaxed(&src_reg->key_4_r);
318 fallthrough;
319
320 case CRYP_KEY_SIZE_192:
321 ctx->key_3_l = readl_relaxed(&src_reg->key_3_l);
322 ctx->key_3_r = readl_relaxed(&src_reg->key_3_r);
323 fallthrough;
324
325 case CRYP_KEY_SIZE_128:
326 ctx->key_2_l = readl_relaxed(&src_reg->key_2_l);
327 ctx->key_2_r = readl_relaxed(&src_reg->key_2_r);
328 fallthrough;
329
330 default:
331 ctx->key_1_l = readl_relaxed(&src_reg->key_1_l);
332 ctx->key_1_r = readl_relaxed(&src_reg->key_1_r);
333 }
334
335
336 algomode = ((ctx->cr & CRYP_CR_ALGOMODE_MASK) >> CRYP_CR_ALGOMODE_POS);
337 if (algomode == CRYP_ALGO_TDES_CBC ||
338 algomode == CRYP_ALGO_DES_CBC ||
339 algomode == CRYP_ALGO_AES_CBC) {
340 ctx->init_vect_0_l = readl_relaxed(&src_reg->init_vect_0_l);
341 ctx->init_vect_0_r = readl_relaxed(&src_reg->init_vect_0_r);
342 ctx->init_vect_1_l = readl_relaxed(&src_reg->init_vect_1_l);
343 ctx->init_vect_1_r = readl_relaxed(&src_reg->init_vect_1_r);
344 }
345}
346
347
348
349
350
351
352
353void cryp_restore_device_context(struct cryp_device_data *device_data,
354 struct cryp_device_context *ctx)
355{
356 struct cryp_register __iomem *reg = device_data->base;
357 struct cryp_config *config =
358 (struct cryp_config *)device_data->current_ctx;
359
360
361
362
363
364 switch (config->keysize) {
365 case CRYP_KEY_SIZE_256:
366 writel_relaxed(ctx->key_4_l, ®->key_4_l);
367 writel_relaxed(ctx->key_4_r, ®->key_4_r);
368 fallthrough;
369
370 case CRYP_KEY_SIZE_192:
371 writel_relaxed(ctx->key_3_l, ®->key_3_l);
372 writel_relaxed(ctx->key_3_r, ®->key_3_r);
373 fallthrough;
374
375 case CRYP_KEY_SIZE_128:
376 writel_relaxed(ctx->key_2_l, ®->key_2_l);
377 writel_relaxed(ctx->key_2_r, ®->key_2_r);
378 fallthrough;
379
380 default:
381 writel_relaxed(ctx->key_1_l, ®->key_1_l);
382 writel_relaxed(ctx->key_1_r, ®->key_1_r);
383 }
384
385
386 if (config->algomode == CRYP_ALGO_TDES_CBC ||
387 config->algomode == CRYP_ALGO_DES_CBC ||
388 config->algomode == CRYP_ALGO_AES_CBC) {
389 writel_relaxed(ctx->init_vect_0_l, ®->init_vect_0_l);
390 writel_relaxed(ctx->init_vect_0_r, ®->init_vect_0_r);
391 writel_relaxed(ctx->init_vect_1_l, ®->init_vect_1_l);
392 writel_relaxed(ctx->init_vect_1_r, ®->init_vect_1_r);
393 }
394}
395