Merge remote-tracking branch 'regulator/topic/vctrl' into regulator-next
[platform/kernel/linux-exynos.git] / crypto / shash.c
1 /*
2  * Synchronous Cryptographic Hash operations.
3  *
4  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22 #include <linux/compiler.h>
23
24 #include "internal.h"
25
26 static const struct crypto_type crypto_shash_type;
27
28 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
29                            unsigned int keylen)
30 {
31         return -ENOSYS;
32 }
33
34 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
35                                   unsigned int keylen)
36 {
37         struct shash_alg *shash = crypto_shash_alg(tfm);
38         unsigned long alignmask = crypto_shash_alignmask(tfm);
39         unsigned long absize;
40         u8 *buffer, *alignbuffer;
41         int err;
42
43         absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
44         buffer = kmalloc(absize, GFP_KERNEL);
45         if (!buffer)
46                 return -ENOMEM;
47
48         alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
49         memcpy(alignbuffer, key, keylen);
50         err = shash->setkey(tfm, alignbuffer, keylen);
51         kzfree(buffer);
52         return err;
53 }
54
55 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
56                         unsigned int keylen)
57 {
58         struct shash_alg *shash = crypto_shash_alg(tfm);
59         unsigned long alignmask = crypto_shash_alignmask(tfm);
60
61         if ((unsigned long)key & alignmask)
62                 return shash_setkey_unaligned(tfm, key, keylen);
63
64         return shash->setkey(tfm, key, keylen);
65 }
66 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
67
68 static inline unsigned int shash_align_buffer_size(unsigned len,
69                                                    unsigned long mask)
70 {
71         typedef u8 __aligned_largest u8_aligned;
72         return len + (mask & ~(__alignof__(u8_aligned) - 1));
73 }
74
75 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
76                                   unsigned int len)
77 {
78         struct crypto_shash *tfm = desc->tfm;
79         struct shash_alg *shash = crypto_shash_alg(tfm);
80         unsigned long alignmask = crypto_shash_alignmask(tfm);
81         unsigned int unaligned_len = alignmask + 1 -
82                                      ((unsigned long)data & alignmask);
83         u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
84                 __aligned_largest;
85         u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
86         int err;
87
88         if (unaligned_len > len)
89                 unaligned_len = len;
90
91         memcpy(buf, data, unaligned_len);
92         err = shash->update(desc, buf, unaligned_len);
93         memset(buf, 0, unaligned_len);
94
95         return err ?:
96                shash->update(desc, data + unaligned_len, len - unaligned_len);
97 }
98
99 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
100                         unsigned int len)
101 {
102         struct crypto_shash *tfm = desc->tfm;
103         struct shash_alg *shash = crypto_shash_alg(tfm);
104         unsigned long alignmask = crypto_shash_alignmask(tfm);
105
106         if ((unsigned long)data & alignmask)
107                 return shash_update_unaligned(desc, data, len);
108
109         return shash->update(desc, data, len);
110 }
111 EXPORT_SYMBOL_GPL(crypto_shash_update);
112
113 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
114 {
115         struct crypto_shash *tfm = desc->tfm;
116         unsigned long alignmask = crypto_shash_alignmask(tfm);
117         struct shash_alg *shash = crypto_shash_alg(tfm);
118         unsigned int ds = crypto_shash_digestsize(tfm);
119         u8 ubuf[shash_align_buffer_size(ds, alignmask)]
120                 __aligned_largest;
121         u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
122         int err;
123
124         err = shash->final(desc, buf);
125         if (err)
126                 goto out;
127
128         memcpy(out, buf, ds);
129
130 out:
131         memset(buf, 0, ds);
132         return err;
133 }
134
135 int crypto_shash_final(struct shash_desc *desc, u8 *out)
136 {
137         struct crypto_shash *tfm = desc->tfm;
138         struct shash_alg *shash = crypto_shash_alg(tfm);
139         unsigned long alignmask = crypto_shash_alignmask(tfm);
140
141         if ((unsigned long)out & alignmask)
142                 return shash_final_unaligned(desc, out);
143
144         return shash->final(desc, out);
145 }
146 EXPORT_SYMBOL_GPL(crypto_shash_final);
147
148 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
149                                  unsigned int len, u8 *out)
150 {
151         return crypto_shash_update(desc, data, len) ?:
152                crypto_shash_final(desc, out);
153 }
154
155 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
156                        unsigned int len, u8 *out)
157 {
158         struct crypto_shash *tfm = desc->tfm;
159         struct shash_alg *shash = crypto_shash_alg(tfm);
160         unsigned long alignmask = crypto_shash_alignmask(tfm);
161
162         if (((unsigned long)data | (unsigned long)out) & alignmask)
163                 return shash_finup_unaligned(desc, data, len, out);
164
165         return shash->finup(desc, data, len, out);
166 }
167 EXPORT_SYMBOL_GPL(crypto_shash_finup);
168
169 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
170                                   unsigned int len, u8 *out)
171 {
172         return crypto_shash_init(desc) ?:
173                crypto_shash_finup(desc, data, len, out);
174 }
175
176 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
177                         unsigned int len, u8 *out)
178 {
179         struct crypto_shash *tfm = desc->tfm;
180         struct shash_alg *shash = crypto_shash_alg(tfm);
181         unsigned long alignmask = crypto_shash_alignmask(tfm);
182
183         if (((unsigned long)data | (unsigned long)out) & alignmask)
184                 return shash_digest_unaligned(desc, data, len, out);
185
186         return shash->digest(desc, data, len, out);
187 }
188 EXPORT_SYMBOL_GPL(crypto_shash_digest);
189
190 static int shash_default_export(struct shash_desc *desc, void *out)
191 {
192         memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
193         return 0;
194 }
195
196 static int shash_default_import(struct shash_desc *desc, const void *in)
197 {
198         memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
199         return 0;
200 }
201
202 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
203                               unsigned int keylen)
204 {
205         struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
206
207         return crypto_shash_setkey(*ctx, key, keylen);
208 }
209
210 static int shash_async_init(struct ahash_request *req)
211 {
212         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
213         struct shash_desc *desc = ahash_request_ctx(req);
214
215         desc->tfm = *ctx;
216         desc->flags = req->base.flags;
217
218         return crypto_shash_init(desc);
219 }
220
221 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
222 {
223         struct crypto_hash_walk walk;
224         int nbytes;
225
226         for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
227              nbytes = crypto_hash_walk_done(&walk, nbytes))
228                 nbytes = crypto_shash_update(desc, walk.data, nbytes);
229
230         return nbytes;
231 }
232 EXPORT_SYMBOL_GPL(shash_ahash_update);
233
234 static int shash_async_update(struct ahash_request *req)
235 {
236         return shash_ahash_update(req, ahash_request_ctx(req));
237 }
238
239 static int shash_async_final(struct ahash_request *req)
240 {
241         return crypto_shash_final(ahash_request_ctx(req), req->result);
242 }
243
244 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
245 {
246         struct crypto_hash_walk walk;
247         int nbytes;
248
249         nbytes = crypto_hash_walk_first(req, &walk);
250         if (!nbytes)
251                 return crypto_shash_final(desc, req->result);
252
253         do {
254                 nbytes = crypto_hash_walk_last(&walk) ?
255                          crypto_shash_finup(desc, walk.data, nbytes,
256                                             req->result) :
257                          crypto_shash_update(desc, walk.data, nbytes);
258                 nbytes = crypto_hash_walk_done(&walk, nbytes);
259         } while (nbytes > 0);
260
261         return nbytes;
262 }
263 EXPORT_SYMBOL_GPL(shash_ahash_finup);
264
265 static int shash_async_finup(struct ahash_request *req)
266 {
267         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
268         struct shash_desc *desc = ahash_request_ctx(req);
269
270         desc->tfm = *ctx;
271         desc->flags = req->base.flags;
272
273         return shash_ahash_finup(req, desc);
274 }
275
276 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
277 {
278         struct scatterlist *sg = req->src;
279         unsigned int offset = sg->offset;
280         unsigned int nbytes = req->nbytes;
281         int err;
282
283         if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
284                 void *data;
285
286                 data = kmap_atomic(sg_page(sg));
287                 err = crypto_shash_digest(desc, data + offset, nbytes,
288                                           req->result);
289                 kunmap_atomic(data);
290                 crypto_yield(desc->flags);
291         } else
292                 err = crypto_shash_init(desc) ?:
293                       shash_ahash_finup(req, desc);
294
295         return err;
296 }
297 EXPORT_SYMBOL_GPL(shash_ahash_digest);
298
299 static int shash_async_digest(struct ahash_request *req)
300 {
301         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
302         struct shash_desc *desc = ahash_request_ctx(req);
303
304         desc->tfm = *ctx;
305         desc->flags = req->base.flags;
306
307         return shash_ahash_digest(req, desc);
308 }
309
310 static int shash_async_export(struct ahash_request *req, void *out)
311 {
312         return crypto_shash_export(ahash_request_ctx(req), out);
313 }
314
315 static int shash_async_import(struct ahash_request *req, const void *in)
316 {
317         struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
318         struct shash_desc *desc = ahash_request_ctx(req);
319
320         desc->tfm = *ctx;
321         desc->flags = req->base.flags;
322
323         return crypto_shash_import(desc, in);
324 }
325
326 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
327 {
328         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
329
330         crypto_free_shash(*ctx);
331 }
332
333 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
334 {
335         struct crypto_alg *calg = tfm->__crt_alg;
336         struct shash_alg *alg = __crypto_shash_alg(calg);
337         struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
338         struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
339         struct crypto_shash *shash;
340
341         if (!crypto_mod_get(calg))
342                 return -EAGAIN;
343
344         shash = crypto_create_tfm(calg, &crypto_shash_type);
345         if (IS_ERR(shash)) {
346                 crypto_mod_put(calg);
347                 return PTR_ERR(shash);
348         }
349
350         *ctx = shash;
351         tfm->exit = crypto_exit_shash_ops_async;
352
353         crt->init = shash_async_init;
354         crt->update = shash_async_update;
355         crt->final = shash_async_final;
356         crt->finup = shash_async_finup;
357         crt->digest = shash_async_digest;
358         crt->setkey = shash_async_setkey;
359
360         crt->has_setkey = alg->setkey != shash_no_setkey;
361
362         if (alg->export)
363                 crt->export = shash_async_export;
364         if (alg->import)
365                 crt->import = shash_async_import;
366
367         crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
368
369         return 0;
370 }
371
372 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
373 {
374         struct crypto_shash *hash = __crypto_shash_cast(tfm);
375
376         hash->descsize = crypto_shash_alg(hash)->descsize;
377         return 0;
378 }
379
380 #ifdef CONFIG_NET
381 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
382 {
383         struct crypto_report_hash rhash;
384         struct shash_alg *salg = __crypto_shash_alg(alg);
385
386         strncpy(rhash.type, "shash", sizeof(rhash.type));
387
388         rhash.blocksize = alg->cra_blocksize;
389         rhash.digestsize = salg->digestsize;
390
391         if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
392                     sizeof(struct crypto_report_hash), &rhash))
393                 goto nla_put_failure;
394         return 0;
395
396 nla_put_failure:
397         return -EMSGSIZE;
398 }
399 #else
400 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
401 {
402         return -ENOSYS;
403 }
404 #endif
405
406 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
407         __maybe_unused;
408 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
409 {
410         struct shash_alg *salg = __crypto_shash_alg(alg);
411
412         seq_printf(m, "type         : shash\n");
413         seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
414         seq_printf(m, "digestsize   : %u\n", salg->digestsize);
415 }
416
417 static const struct crypto_type crypto_shash_type = {
418         .extsize = crypto_alg_extsize,
419         .init_tfm = crypto_shash_init_tfm,
420 #ifdef CONFIG_PROC_FS
421         .show = crypto_shash_show,
422 #endif
423         .report = crypto_shash_report,
424         .maskclear = ~CRYPTO_ALG_TYPE_MASK,
425         .maskset = CRYPTO_ALG_TYPE_MASK,
426         .type = CRYPTO_ALG_TYPE_SHASH,
427         .tfmsize = offsetof(struct crypto_shash, base),
428 };
429
430 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
431                                         u32 mask)
432 {
433         return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
434 }
435 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
436
437 static int shash_prepare_alg(struct shash_alg *alg)
438 {
439         struct crypto_alg *base = &alg->base;
440
441         if (alg->digestsize > PAGE_SIZE / 8 ||
442             alg->descsize > PAGE_SIZE / 8 ||
443             alg->statesize > PAGE_SIZE / 8)
444                 return -EINVAL;
445
446         base->cra_type = &crypto_shash_type;
447         base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
448         base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
449
450         if (!alg->finup)
451                 alg->finup = shash_finup_unaligned;
452         if (!alg->digest)
453                 alg->digest = shash_digest_unaligned;
454         if (!alg->export) {
455                 alg->export = shash_default_export;
456                 alg->import = shash_default_import;
457                 alg->statesize = alg->descsize;
458         }
459         if (!alg->setkey)
460                 alg->setkey = shash_no_setkey;
461
462         return 0;
463 }
464
465 int crypto_register_shash(struct shash_alg *alg)
466 {
467         struct crypto_alg *base = &alg->base;
468         int err;
469
470         err = shash_prepare_alg(alg);
471         if (err)
472                 return err;
473
474         return crypto_register_alg(base);
475 }
476 EXPORT_SYMBOL_GPL(crypto_register_shash);
477
478 int crypto_unregister_shash(struct shash_alg *alg)
479 {
480         return crypto_unregister_alg(&alg->base);
481 }
482 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
483
484 int crypto_register_shashes(struct shash_alg *algs, int count)
485 {
486         int i, ret;
487
488         for (i = 0; i < count; i++) {
489                 ret = crypto_register_shash(&algs[i]);
490                 if (ret)
491                         goto err;
492         }
493
494         return 0;
495
496 err:
497         for (--i; i >= 0; --i)
498                 crypto_unregister_shash(&algs[i]);
499
500         return ret;
501 }
502 EXPORT_SYMBOL_GPL(crypto_register_shashes);
503
504 int crypto_unregister_shashes(struct shash_alg *algs, int count)
505 {
506         int i, ret;
507
508         for (i = count - 1; i >= 0; --i) {
509                 ret = crypto_unregister_shash(&algs[i]);
510                 if (ret)
511                         pr_err("Failed to unregister %s %s: %d\n",
512                                algs[i].base.cra_driver_name,
513                                algs[i].base.cra_name, ret);
514         }
515
516         return 0;
517 }
518 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
519
520 int shash_register_instance(struct crypto_template *tmpl,
521                             struct shash_instance *inst)
522 {
523         int err;
524
525         err = shash_prepare_alg(&inst->alg);
526         if (err)
527                 return err;
528
529         return crypto_register_instance(tmpl, shash_crypto_instance(inst));
530 }
531 EXPORT_SYMBOL_GPL(shash_register_instance);
532
533 void shash_free_instance(struct crypto_instance *inst)
534 {
535         crypto_drop_spawn(crypto_instance_ctx(inst));
536         kfree(shash_instance(inst));
537 }
538 EXPORT_SYMBOL_GPL(shash_free_instance);
539
540 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
541                             struct shash_alg *alg,
542                             struct crypto_instance *inst)
543 {
544         return crypto_init_spawn2(&spawn->base, &alg->base, inst,
545                                   &crypto_shash_type);
546 }
547 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
548
549 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
550 {
551         struct crypto_alg *alg;
552
553         alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
554         return IS_ERR(alg) ? ERR_CAST(alg) :
555                container_of(alg, struct shash_alg, base);
556 }
557 EXPORT_SYMBOL_GPL(shash_attr_alg);
558
559 MODULE_LICENSE("GPL");
560 MODULE_DESCRIPTION("Synchronous cryptographic hash type");