1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84#include <linux/module.h>
85#include <linux/crypto.h>
86#include <linux/scatterlist.h>
87#include <crypto/scatterwalk.h>
88#include <crypto/internal/skcipher.h>
89
90struct crypto_kw_ctx {
91 struct crypto_cipher *child;
92};
93
94struct crypto_kw_block {
95#define SEMIBSIZE 8
96 u8 A[SEMIBSIZE];
97 u8 R[SEMIBSIZE];
98};
99
100
101static inline void crypto_kw_cpu_to_be64(u64 val, u8 *buf)
102{
103 __be64 *a = (__be64 *)buf;
104
105 *a = cpu_to_be64(val);
106}
107
108
109
110
111
112
113static void crypto_kw_scatterlist_ff(struct scatter_walk *walk,
114 struct scatterlist *sg,
115 unsigned int end)
116{
117 unsigned int skip = 0;
118
119
120 BUG_ON(end < SEMIBSIZE);
121
122 skip = end - SEMIBSIZE;
123 while (sg) {
124 if (sg->length > skip) {
125 scatterwalk_start(walk, sg);
126 scatterwalk_advance(walk, skip);
127 break;
128 } else
129 skip -= sg->length;
130
131 sg = sg_next(sg);
132 }
133}
134
135static int crypto_kw_decrypt(struct blkcipher_desc *desc,
136 struct scatterlist *dst, struct scatterlist *src,
137 unsigned int nbytes)
138{
139 struct crypto_blkcipher *tfm = desc->tfm;
140 struct crypto_kw_ctx *ctx = crypto_blkcipher_ctx(tfm);
141 struct crypto_cipher *child = ctx->child;
142
143 unsigned long alignmask = max_t(unsigned long, SEMIBSIZE,
144 crypto_cipher_alignmask(child));
145 unsigned int i;
146
147 u8 blockbuf[sizeof(struct crypto_kw_block) + alignmask];
148 struct crypto_kw_block *block = (struct crypto_kw_block *)
149 PTR_ALIGN(blockbuf + 0, alignmask + 1);
150
151 u64 t = 6 * ((nbytes) >> 3);
152 struct scatterlist *lsrc, *ldst;
153 int ret = 0;
154
155
156
157
158
159 if (nbytes < (2 * SEMIBSIZE) || nbytes % SEMIBSIZE)
160 return -EINVAL;
161
162
163 memcpy(block->A, desc->info, SEMIBSIZE);
164
165
166
167
168
169
170 lsrc = src;
171 ldst = dst;
172
173 for (i = 0; i < 6; i++) {
174 u8 tbe_buffer[SEMIBSIZE + alignmask];
175
176 u8 *tbe = PTR_ALIGN(tbe_buffer + 0, alignmask + 1);
177 unsigned int tmp_nbytes = nbytes;
178 struct scatter_walk src_walk, dst_walk;
179
180 while (tmp_nbytes) {
181
182 crypto_kw_scatterlist_ff(&src_walk, lsrc, tmp_nbytes);
183
184 scatterwalk_copychunks(block->R, &src_walk, SEMIBSIZE,
185 false);
186
187
188 crypto_kw_cpu_to_be64(t, tbe);
189
190 crypto_xor(block->A, tbe, SEMIBSIZE);
191 t--;
192
193 crypto_cipher_decrypt_one(child, (u8*)block,
194 (u8*)block);
195
196
197 crypto_kw_scatterlist_ff(&dst_walk, ldst, tmp_nbytes);
198
199 scatterwalk_copychunks(block->R, &dst_walk, SEMIBSIZE,
200 true);
201
202 tmp_nbytes -= SEMIBSIZE;
203 }
204
205
206 lsrc = dst;
207 ldst = dst;
208 }
209
210
211 if (crypto_memneq("\xA6\xA6\xA6\xA6\xA6\xA6\xA6\xA6", block->A,
212 SEMIBSIZE))
213 ret = -EBADMSG;
214
215 memzero_explicit(block, sizeof(struct crypto_kw_block));
216
217 return ret;
218}
219
220static int crypto_kw_encrypt(struct blkcipher_desc *desc,
221 struct scatterlist *dst, struct scatterlist *src,
222 unsigned int nbytes)
223{
224 struct crypto_blkcipher *tfm = desc->tfm;
225 struct crypto_kw_ctx *ctx = crypto_blkcipher_ctx(tfm);
226 struct crypto_cipher *child = ctx->child;
227
228 unsigned long alignmask = max_t(unsigned long, SEMIBSIZE,
229 crypto_cipher_alignmask(child));
230 unsigned int i;
231
232 u8 blockbuf[sizeof(struct crypto_kw_block) + alignmask];
233 struct crypto_kw_block *block = (struct crypto_kw_block *)
234 PTR_ALIGN(blockbuf + 0, alignmask + 1);
235
236 u64 t = 1;
237 struct scatterlist *lsrc, *ldst;
238
239
240
241
242
243
244
245 if (nbytes < (2 * SEMIBSIZE) || nbytes % SEMIBSIZE)
246 return -EINVAL;
247
248
249
250
251
252 memcpy(block->A, "\xA6\xA6\xA6\xA6\xA6\xA6\xA6\xA6", SEMIBSIZE);
253
254
255
256
257
258
259 lsrc = src;
260 ldst = dst;
261
262 for (i = 0; i < 6; i++) {
263 u8 tbe_buffer[SEMIBSIZE + alignmask];
264 u8 *tbe = PTR_ALIGN(tbe_buffer + 0, alignmask + 1);
265 unsigned int tmp_nbytes = nbytes;
266 struct scatter_walk src_walk, dst_walk;
267
268 scatterwalk_start(&src_walk, lsrc);
269 scatterwalk_start(&dst_walk, ldst);
270
271 while (tmp_nbytes) {
272
273 scatterwalk_copychunks(block->R, &src_walk, SEMIBSIZE,
274 false);
275
276
277 crypto_cipher_encrypt_one(child, (u8 *)block,
278 (u8 *)block);
279
280 crypto_kw_cpu_to_be64(t, tbe);
281
282 crypto_xor(block->A, tbe, SEMIBSIZE);
283 t++;
284
285
286 scatterwalk_copychunks(block->R, &dst_walk, SEMIBSIZE,
287 true);
288
289 tmp_nbytes -= SEMIBSIZE;
290 }
291
292
293 lsrc = dst;
294 ldst = dst;
295 }
296
297
298 memcpy(desc->info, block->A, SEMIBSIZE);
299
300 memzero_explicit(block, sizeof(struct crypto_kw_block));
301
302 return 0;
303}
304
305static int crypto_kw_setkey(struct crypto_tfm *parent, const u8 *key,
306 unsigned int keylen)
307{
308 struct crypto_kw_ctx *ctx = crypto_tfm_ctx(parent);
309 struct crypto_cipher *child = ctx->child;
310 int err;
311
312 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
313 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
314 CRYPTO_TFM_REQ_MASK);
315 err = crypto_cipher_setkey(child, key, keylen);
316 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
317 CRYPTO_TFM_RES_MASK);
318 return err;
319}
320
321static int crypto_kw_init_tfm(struct crypto_tfm *tfm)
322{
323 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
324 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
325 struct crypto_kw_ctx *ctx = crypto_tfm_ctx(tfm);
326 struct crypto_cipher *cipher;
327
328 cipher = crypto_spawn_cipher(spawn);
329 if (IS_ERR(cipher))
330 return PTR_ERR(cipher);
331
332 ctx->child = cipher;
333 return 0;
334}
335
336static void crypto_kw_exit_tfm(struct crypto_tfm *tfm)
337{
338 struct crypto_kw_ctx *ctx = crypto_tfm_ctx(tfm);
339
340 crypto_free_cipher(ctx->child);
341}
342
343static struct crypto_instance *crypto_kw_alloc(struct rtattr **tb)
344{
345 struct crypto_instance *inst = NULL;
346 struct crypto_alg *alg = NULL;
347 int err;
348
349 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
350 if (err)
351 return ERR_PTR(err);
352
353 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
354 CRYPTO_ALG_TYPE_MASK);
355 if (IS_ERR(alg))
356 return ERR_CAST(alg);
357
358 inst = ERR_PTR(-EINVAL);
359
360 if (alg->cra_blocksize != sizeof(struct crypto_kw_block))
361 goto err;
362
363 inst = crypto_alloc_instance("kw", alg);
364 if (IS_ERR(inst))
365 goto err;
366
367 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
368 inst->alg.cra_priority = alg->cra_priority;
369 inst->alg.cra_blocksize = SEMIBSIZE;
370 inst->alg.cra_alignmask = 0;
371 inst->alg.cra_type = &crypto_blkcipher_type;
372 inst->alg.cra_blkcipher.ivsize = SEMIBSIZE;
373 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
374 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
375
376 inst->alg.cra_ctxsize = sizeof(struct crypto_kw_ctx);
377
378 inst->alg.cra_init = crypto_kw_init_tfm;
379 inst->alg.cra_exit = crypto_kw_exit_tfm;
380
381 inst->alg.cra_blkcipher.setkey = crypto_kw_setkey;
382 inst->alg.cra_blkcipher.encrypt = crypto_kw_encrypt;
383 inst->alg.cra_blkcipher.decrypt = crypto_kw_decrypt;
384
385err:
386 crypto_mod_put(alg);
387 return inst;
388}
389
390static void crypto_kw_free(struct crypto_instance *inst)
391{
392 crypto_drop_spawn(crypto_instance_ctx(inst));
393 kfree(inst);
394}
395
396static struct crypto_template crypto_kw_tmpl = {
397 .name = "kw",
398 .alloc = crypto_kw_alloc,
399 .free = crypto_kw_free,
400 .module = THIS_MODULE,
401};
402
403static int __init crypto_kw_init(void)
404{
405 return crypto_register_template(&crypto_kw_tmpl);
406}
407
408static void __exit crypto_kw_exit(void)
409{
410 crypto_unregister_template(&crypto_kw_tmpl);
411}
412
413module_init(crypto_kw_init);
414module_exit(crypto_kw_exit);
415
416MODULE_LICENSE("Dual BSD/GPL");
417MODULE_AUTHOR("Stephan Mueller <smueller@chronox.de>");
418MODULE_DESCRIPTION("Key Wrapping (RFC3394 / NIST SP800-38F)");
419MODULE_ALIAS_CRYPTO("kw");
420