Merge tag 'devicetree-fixes-for-4.14-2' of git://git.kernel.org/pub/scm/linux/kernel...
[platform/kernel/linux-rpi.git] / crypto / shash.c
1 /*
2  * Synchronous Cryptographic Hash operations.
3  *
4  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22 #include <linux/compiler.h>
23
24 #include "internal.h"
25
26 static const struct crypto_type crypto_shash_type;
27
28 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
29                            unsigned int keylen)
30 {
31         return -ENOSYS;
32 }
33
34 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
35                                   unsigned int keylen)
36 {
37         struct shash_alg *shash = crypto_shash_alg(tfm);
38         unsigned long alignmask = crypto_shash_alignmask(tfm);
39         unsigned long absize;
40         u8 *buffer, *alignbuffer;
41         int err;
42
43         absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
44         buffer = kmalloc(absize, GFP_ATOMIC);
45         if (!buffer)
46                 return -ENOMEM;
47
48         alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
49         memcpy(alignbuffer, key, keylen);
50         err = shash->setkey(tfm, alignbuffer, keylen);
51         kzfree(buffer);
52         return err;
53 }
54
55 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
56                         unsigned int keylen)
57 {
58         struct shash_alg *shash = crypto_shash_alg(tfm);
59         unsigned long alignmask = crypto_shash_alignmask(tfm);
60
61         if ((unsigned long)key & alignmask)
62                 return shash_setkey_unaligned(tfm, key, keylen);
63
64         return shash->setkey(tfm, key, keylen);
65 }
66 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
67
68 static inline unsigned int shash_align_buffer_size(unsigned len,
69                                                    unsigned long mask)
70 {
71         typedef u8 __aligned_largest u8_aligned;
72         return len + (mask & ~(__alignof__(u8_aligned) - 1));
73 }
74
75 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
76                                   unsigned int len)
77 {
78         struct crypto_shash *tfm = desc->tfm;
79         struct shash_alg *shash = crypto_shash_alg(tfm);
80         unsigned long alignmask = crypto_shash_alignmask(tfm);
81         unsigned int unaligned_len = alignmask + 1 -
82                                      ((unsigned long)data & alignmask);
83         u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
84                 __aligned_largest;
85         u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
86         int err;
87
88         if (unaligned_len > len)
89                 unaligned_len = len;
90
91         memcpy(buf, data, unaligned_len);
92         err = shash->update(desc, buf, unaligned_len);
93         memset(buf, 0, unaligned_len);
94
95         return err ?:
96                shash->update(desc, data + unaligned_len, len - unaligned_len);
97 }
98
99 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
100                         unsigned int len)
101 {
102         struct crypto_shash *tfm = desc->tfm;
103         struct shash_alg *shash = crypto_shash_alg(tfm);
104         unsigned long alignmask = crypto_shash_alignmask(tfm);
105
106         if ((unsigned long)data & alignmask)
107                 return shash_update_unaligned(desc, data, len);
108
109         return shash->update(desc, data, len);
110 }
111 EXPORT_SYMBOL_GPL(crypto_shash_update);
112
113 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
114 {
115         struct crypto_shash *tfm = desc->tfm;
116         unsigned long alignmask = crypto_shash_alignmask(tfm);
117         struct shash_alg *shash = crypto_shash_alg(tfm);
118         unsigned int ds = crypto_shash_digestsize(tfm);
119         u8 ubuf[shash_align_buffer_size(ds, alignmask)]
120                 __aligned_largest;
121         u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
122         int err;
123
124         err = shash->final(desc, buf);
125         if (err)
126                 goto out;
127
128         memcpy(out, buf, ds);
129
130 out:
131         memset(buf, 0, ds);
132         return err;
133 }
134
135 int crypto_shash_final(struct shash_desc *desc, u8 *out)
136 {
137         struct crypto_shash *tfm = desc->tfm;
138         struct shash_alg *shash = crypto_shash_alg(tfm);
139         unsigned long alignmask = crypto_shash_alignmask(tfm);
140
141         if ((unsigned long)out & alignmask)
142                 return shash_final_unaligned(desc, out);
143
144         return shash->final(desc, out);
145 }
146 EXPORT_SYMBOL_GPL(crypto_shash_final);
147
148 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
149                                  unsigned int len, u8 *out)
150 {
151         return crypto_shash_update(desc, data, len) ?:
152                crypto_shash_final(desc, out);
153 }
154
155 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
156                        unsigned int len, u8 *out)
157 {
158         struct crypto_shash *tfm = desc->tfm;
159         struct shash_alg *shash = crypto_shash_alg(tfm);
160         unsigned long alignmask = crypto_shash_alignmask(tfm);
161
162         if (((unsigned long)data | (unsigned long)out) & alignmask)
163                 return shash_finup_unaligned(desc, data, len, out);
164
165         return shash->finup(desc, data, len, out);
166 }
167 EXPORT_SYMBOL_GPL(crypto_shash_finup);
168
169 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
170                                   unsigned int len, u8 *out)
171 {
172         return crypto_shash_init(desc) ?:
173                crypto_shash_finup(desc, data, len, out);
174 }
175
176 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
177                         unsigned int len, u8 *out)
178 {
179         struct crypto_shash *tfm = desc->tfm;
180         struct shash_alg *shash = crypto_shash_alg(tfm);
181         unsigned long alignmask = crypto_shash_alignmask(tfm);
182
183         if (((unsigned long)data | (unsigned long)out) & alignmask)
184                 return shash_digest_unaligned(desc, data, len, out);
185
186         return shash->digest(desc, data, len, out);
187 }
188 EXPORT_SYMBOL_GPL(crypto_shash_digest);
189
190 static int shash_default_export(struct shash_desc *desc, void *out)
191 {
192         memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
193         return 0;
194 }
195
196 static int shash_default_import(struct shash_desc *desc, const void *in)
197 {
198         memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
199         return 0;
200 }
201
202 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
203                               unsigned int keylen)
204 {
205         struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
206
207         return crypto_shash_setkey(*ctx, key, keylen);
208 }
209
210 static int shash_async_init(struct ahash_request *req)
211 {
212         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
213         struct shash_desc *desc = ahash_request_ctx(req);
214
215         desc->tfm = *ctx;
216         desc->flags = req->base.flags;
217
218         return crypto_shash_init(desc);
219 }
220
221 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
222 {
223         struct crypto_hash_walk walk;
224         int nbytes;
225
226         for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
227              nbytes = crypto_hash_walk_done(&walk, nbytes))
228                 nbytes = crypto_shash_update(desc, walk.data, nbytes);
229
230         return nbytes;
231 }
232 EXPORT_SYMBOL_GPL(shash_ahash_update);
233
234 static int shash_async_update(struct ahash_request *req)
235 {
236         return shash_ahash_update(req, ahash_request_ctx(req));
237 }
238
239 static int shash_async_final(struct ahash_request *req)
240 {
241         return crypto_shash_final(ahash_request_ctx(req), req->result);
242 }
243
244 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
245 {
246         struct crypto_hash_walk walk;
247         int nbytes;
248
249         nbytes = crypto_hash_walk_first(req, &walk);
250         if (!nbytes)
251                 return crypto_shash_final(desc, req->result);
252
253         do {
254                 nbytes = crypto_hash_walk_last(&walk) ?
255                          crypto_shash_finup(desc, walk.data, nbytes,
256                                             req->result) :
257                          crypto_shash_update(desc, walk.data, nbytes);
258                 nbytes = crypto_hash_walk_done(&walk, nbytes);
259         } while (nbytes > 0);
260
261         return nbytes;
262 }
263 EXPORT_SYMBOL_GPL(shash_ahash_finup);
264
265 static int shash_async_finup(struct ahash_request *req)
266 {
267         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
268         struct shash_desc *desc = ahash_request_ctx(req);
269
270         desc->tfm = *ctx;
271         desc->flags = req->base.flags;
272
273         return shash_ahash_finup(req, desc);
274 }
275
276 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
277 {
278         unsigned int nbytes = req->nbytes;
279         struct scatterlist *sg;
280         unsigned int offset;
281         int err;
282
283         if (nbytes &&
284             (sg = req->src, offset = sg->offset,
285              nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
286                 void *data;
287
288                 data = kmap_atomic(sg_page(sg));
289                 err = crypto_shash_digest(desc, data + offset, nbytes,
290                                           req->result);
291                 kunmap_atomic(data);
292                 crypto_yield(desc->flags);
293         } else
294                 err = crypto_shash_init(desc) ?:
295                       shash_ahash_finup(req, desc);
296
297         return err;
298 }
299 EXPORT_SYMBOL_GPL(shash_ahash_digest);
300
301 static int shash_async_digest(struct ahash_request *req)
302 {
303         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
304         struct shash_desc *desc = ahash_request_ctx(req);
305
306         desc->tfm = *ctx;
307         desc->flags = req->base.flags;
308
309         return shash_ahash_digest(req, desc);
310 }
311
312 static int shash_async_export(struct ahash_request *req, void *out)
313 {
314         return crypto_shash_export(ahash_request_ctx(req), out);
315 }
316
317 static int shash_async_import(struct ahash_request *req, const void *in)
318 {
319         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
320         struct shash_desc *desc = ahash_request_ctx(req);
321
322         desc->tfm = *ctx;
323         desc->flags = req->base.flags;
324
325         return crypto_shash_import(desc, in);
326 }
327
328 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
329 {
330         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
331
332         crypto_free_shash(*ctx);
333 }
334
335 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
336 {
337         struct crypto_alg *calg = tfm->__crt_alg;
338         struct shash_alg *alg = __crypto_shash_alg(calg);
339         struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
340         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
341         struct crypto_shash *shash;
342
343         if (!crypto_mod_get(calg))
344                 return -EAGAIN;
345
346         shash = crypto_create_tfm(calg, &crypto_shash_type);
347         if (IS_ERR(shash)) {
348                 crypto_mod_put(calg);
349                 return PTR_ERR(shash);
350         }
351
352         *ctx = shash;
353         tfm->exit = crypto_exit_shash_ops_async;
354
355         crt->init = shash_async_init;
356         crt->update = shash_async_update;
357         crt->final = shash_async_final;
358         crt->finup = shash_async_finup;
359         crt->digest = shash_async_digest;
360         crt->setkey = shash_async_setkey;
361
362         crt->has_setkey = alg->setkey != shash_no_setkey;
363
364         if (alg->export)
365                 crt->export = shash_async_export;
366         if (alg->import)
367                 crt->import = shash_async_import;
368
369         crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
370
371         return 0;
372 }
373
374 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
375 {
376         struct crypto_shash *hash = __crypto_shash_cast(tfm);
377
378         hash->descsize = crypto_shash_alg(hash)->descsize;
379         return 0;
380 }
381
382 #ifdef CONFIG_NET
383 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
384 {
385         struct crypto_report_hash rhash;
386         struct shash_alg *salg = __crypto_shash_alg(alg);
387
388         strncpy(rhash.type, "shash", sizeof(rhash.type));
389
390         rhash.blocksize = alg->cra_blocksize;
391         rhash.digestsize = salg->digestsize;
392
393         if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
394                     sizeof(struct crypto_report_hash), &rhash))
395                 goto nla_put_failure;
396         return 0;
397
398 nla_put_failure:
399         return -EMSGSIZE;
400 }
401 #else
402 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
403 {
404         return -ENOSYS;
405 }
406 #endif
407
408 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
409         __maybe_unused;
410 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
411 {
412         struct shash_alg *salg = __crypto_shash_alg(alg);
413
414         seq_printf(m, "type         : shash\n");
415         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
416         seq_printf(m, "digestsize   : %u\n", salg->digestsize);
417 }
418
419 static const struct crypto_type crypto_shash_type = {
420         .extsize = crypto_alg_extsize,
421         .init_tfm = crypto_shash_init_tfm,
422 #ifdef CONFIG_PROC_FS
423         .show = crypto_shash_show,
424 #endif
425         .report = crypto_shash_report,
426         .maskclear = ~CRYPTO_ALG_TYPE_MASK,
427         .maskset = CRYPTO_ALG_TYPE_MASK,
428         .type = CRYPTO_ALG_TYPE_SHASH,
429         .tfmsize = offsetof(struct crypto_shash, base),
430 };
431
432 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
433                                         u32 mask)
434 {
435         return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
436 }
437 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
438
439 static int shash_prepare_alg(struct shash_alg *alg)
440 {
441         struct crypto_alg *base = &alg->base;
442
443         if (alg->digestsize > PAGE_SIZE / 8 ||
444             alg->descsize > PAGE_SIZE / 8 ||
445             alg->statesize > PAGE_SIZE / 8)
446                 return -EINVAL;
447
448         base->cra_type = &crypto_shash_type;
449         base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
450         base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
451
452         if (!alg->finup)
453                 alg->finup = shash_finup_unaligned;
454         if (!alg->digest)
455                 alg->digest = shash_digest_unaligned;
456         if (!alg->export) {
457                 alg->export = shash_default_export;
458                 alg->import = shash_default_import;
459                 alg->statesize = alg->descsize;
460         }
461         if (!alg->setkey)
462                 alg->setkey = shash_no_setkey;
463
464         return 0;
465 }
466
467 int crypto_register_shash(struct shash_alg *alg)
468 {
469         struct crypto_alg *base = &alg->base;
470         int err;
471
472         err = shash_prepare_alg(alg);
473         if (err)
474                 return err;
475
476         return crypto_register_alg(base);
477 }
478 EXPORT_SYMBOL_GPL(crypto_register_shash);
479
480 int crypto_unregister_shash(struct shash_alg *alg)
481 {
482         return crypto_unregister_alg(&alg->base);
483 }
484 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
485
486 int crypto_register_shashes(struct shash_alg *algs, int count)
487 {
488         int i, ret;
489
490         for (i = 0; i < count; i++) {
491                 ret = crypto_register_shash(&algs[i]);
492                 if (ret)
493                         goto err;
494         }
495
496         return 0;
497
498 err:
499         for (--i; i >= 0; --i)
500                 crypto_unregister_shash(&algs[i]);
501
502         return ret;
503 }
504 EXPORT_SYMBOL_GPL(crypto_register_shashes);
505
506 int crypto_unregister_shashes(struct shash_alg *algs, int count)
507 {
508         int i, ret;
509
510         for (i = count - 1; i >= 0; --i) {
511                 ret = crypto_unregister_shash(&algs[i]);
512                 if (ret)
513                         pr_err("Failed to unregister %s %s: %d\n",
514                                algs[i].base.cra_driver_name,
515                                algs[i].base.cra_name, ret);
516         }
517
518         return 0;
519 }
520 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
521
522 int shash_register_instance(struct crypto_template *tmpl,
523                             struct shash_instance *inst)
524 {
525         int err;
526
527         err = shash_prepare_alg(&inst->alg);
528         if (err)
529                 return err;
530
531         return crypto_register_instance(tmpl, shash_crypto_instance(inst));
532 }
533 EXPORT_SYMBOL_GPL(shash_register_instance);
534
535 void shash_free_instance(struct crypto_instance *inst)
536 {
537         crypto_drop_spawn(crypto_instance_ctx(inst));
538         kfree(shash_instance(inst));
539 }
540 EXPORT_SYMBOL_GPL(shash_free_instance);
541
542 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
543                             struct shash_alg *alg,
544                             struct crypto_instance *inst)
545 {
546         return crypto_init_spawn2(&spawn->base, &alg->base, inst,
547                                   &crypto_shash_type);
548 }
549 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
550
551 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
552 {
553         struct crypto_alg *alg;
554
555         alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
556         return IS_ERR(alg) ? ERR_CAST(alg) :
557                container_of(alg, struct shash_alg, base);
558 }
559 EXPORT_SYMBOL_GPL(shash_attr_alg);
560
561 MODULE_LICENSE("GPL");
562 MODULE_DESCRIPTION("Synchronous cryptographic hash type");