1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Cryptographic API for algorithms (i.e., low-level API).
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 #include <crypto/algapi.h>
9 #include <crypto/internal/simd.h>
10 #include <linux/err.h>
11 #include <linux/errno.h>
12 #include <linux/fips.h>
13 #include <linux/init.h>
14 #include <linux/kernel.h>
15 #include <linux/list.h>
16 #include <linux/module.h>
17 #include <linux/rtnetlink.h>
18 #include <linux/slab.h>
19 #include <linux/string.h>
20 #include <linux/workqueue.h>
24 static LIST_HEAD(crypto_template_list);
26 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
27 DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
28 EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
31 static inline void crypto_check_module_sig(struct module *mod)
33 if (fips_enabled && mod && !module_sig_ok(mod))
34 panic("Module %s signature verification failed in FIPS mode\n",
38 static int crypto_check_alg(struct crypto_alg *alg)
40 crypto_check_module_sig(alg->cra_module);
42 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
45 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
48 /* General maximums for all algs. */
49 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
52 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
55 /* Lower maximums for specific alg types. */
56 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
57 CRYPTO_ALG_TYPE_CIPHER) {
58 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
61 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
65 if (alg->cra_priority < 0)
68 refcount_set(&alg->cra_refcnt, 1);
73 static void crypto_free_instance(struct crypto_instance *inst)
75 inst->alg.cra_type->free(inst);
78 static void crypto_destroy_instance_workfn(struct work_struct *w)
80 struct crypto_instance *inst = container_of(w, struct crypto_instance,
82 struct crypto_template *tmpl = inst->tmpl;
84 crypto_free_instance(inst);
85 crypto_tmpl_put(tmpl);
88 static void crypto_destroy_instance(struct crypto_alg *alg)
90 struct crypto_instance *inst = container_of(alg,
91 struct crypto_instance,
94 INIT_WORK(&inst->free_work, crypto_destroy_instance_workfn);
95 schedule_work(&inst->free_work);
99 * This function adds a spawn to the list secondary_spawns which
100 * will be used at the end of crypto_remove_spawns to unregister
101 * instances, unless the spawn happens to be one that is depended
102 * on by the new algorithm (nalg in crypto_remove_spawns).
104 * This function is also responsible for resurrecting any algorithms
105 * in the dependency chain of nalg by unsetting n->dead.
107 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
108 struct list_head *stack,
109 struct list_head *top,
110 struct list_head *secondary_spawns)
112 struct crypto_spawn *spawn, *n;
114 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
118 n = list_prev_entry(spawn, list);
119 list_move(&spawn->list, secondary_spawns);
121 if (list_is_last(&n->list, stack))
124 n = list_next_entry(n, list);
128 return &n->inst->alg.cra_users;
131 static void crypto_remove_instance(struct crypto_instance *inst,
132 struct list_head *list)
134 struct crypto_template *tmpl = inst->tmpl;
136 if (crypto_is_dead(&inst->alg))
139 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
141 if (!tmpl || !crypto_tmpl_get(tmpl))
144 list_move(&inst->alg.cra_list, list);
145 hlist_del(&inst->list);
146 inst->alg.cra_destroy = crypto_destroy_instance;
148 BUG_ON(!list_empty(&inst->alg.cra_users));
152 * Given an algorithm alg, remove all algorithms that depend on it
153 * through spawns. If nalg is not null, then exempt any algorithms
154 * that is depended on by nalg. This is useful when nalg itself
157 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
158 struct crypto_alg *nalg)
160 u32 new_type = (nalg ?: alg)->cra_flags;
161 struct crypto_spawn *spawn, *n;
162 LIST_HEAD(secondary_spawns);
163 struct list_head *spawns;
167 spawns = &alg->cra_users;
168 list_for_each_entry_safe(spawn, n, spawns, list) {
169 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
172 list_move(&spawn->list, &top);
176 * Perform a depth-first walk starting from alg through
177 * the cra_users tree. The list stack records the path
178 * from alg to the current spawn.
182 while (!list_empty(spawns)) {
183 struct crypto_instance *inst;
185 spawn = list_first_entry(spawns, struct crypto_spawn,
189 list_move(&spawn->list, &stack);
190 spawn->dead = !spawn->registered || &inst->alg != nalg;
192 if (!spawn->registered)
195 BUG_ON(&inst->alg == alg);
197 if (&inst->alg == nalg)
200 spawns = &inst->alg.cra_users;
203 * Even if spawn->registered is true, the
204 * instance itself may still be unregistered.
205 * This is because it may have failed during
206 * registration. Therefore we still need to
207 * make the following test.
209 * We may encounter an unregistered instance here, since
210 * an instance's spawns are set up prior to the instance
211 * being registered. An unregistered instance will have
212 * NULL ->cra_users.next, since ->cra_users isn't
213 * properly initialized until registration. But an
214 * unregistered instance cannot have any users, so treat
215 * it the same as ->cra_users being empty.
217 if (spawns->next == NULL)
220 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
221 &secondary_spawns)));
224 * Remove all instances that are marked as dead. Also
225 * complete the resurrection of the others by moving them
226 * back to the cra_users list.
228 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
230 list_move(&spawn->list, &spawn->alg->cra_users);
231 else if (spawn->registered)
232 crypto_remove_instance(spawn->inst, list);
235 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
237 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
239 struct crypto_larval *larval;
241 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER))
244 larval = crypto_larval_alloc(alg->cra_name,
245 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
249 larval->adult = crypto_mod_get(alg);
250 if (!larval->adult) {
252 return ERR_PTR(-ENOENT);
255 refcount_set(&larval->alg.cra_refcnt, 1);
256 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
257 CRYPTO_MAX_ALG_NAME);
258 larval->alg.cra_priority = alg->cra_priority;
263 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
265 struct crypto_alg *q;
266 struct crypto_larval *larval;
269 if (crypto_is_dead(alg))
272 INIT_LIST_HEAD(&alg->cra_users);
275 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
279 list_for_each_entry(q, &crypto_alg_list, cra_list) {
283 if (crypto_is_moribund(q))
286 if (crypto_is_larval(q)) {
287 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
292 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
293 !strcmp(q->cra_name, alg->cra_driver_name))
297 larval = crypto_alloc_test_larval(alg);
301 list_add(&alg->cra_list, &crypto_alg_list);
304 list_add(&larval->alg.cra_list, &crypto_alg_list);
306 alg->cra_flags |= CRYPTO_ALG_TESTED;
308 crypto_stats_init(alg);
314 larval = ERR_PTR(ret);
318 void crypto_alg_tested(const char *name, int err)
320 struct crypto_larval *test;
321 struct crypto_alg *alg;
322 struct crypto_alg *q;
326 down_write(&crypto_alg_sem);
327 list_for_each_entry(q, &crypto_alg_list, cra_list) {
328 if (crypto_is_moribund(q) || !crypto_is_larval(q))
331 test = (struct crypto_larval *)q;
333 if (!strcmp(q->cra_driver_name, name))
337 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
341 q->cra_flags |= CRYPTO_ALG_DEAD;
344 if (list_empty(&alg->cra_list))
347 if (err == -ECANCELED)
348 alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
352 alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
354 alg->cra_flags |= CRYPTO_ALG_TESTED;
356 /* Only satisfy larval waiters if we are the best. */
358 list_for_each_entry(q, &crypto_alg_list, cra_list) {
359 if (crypto_is_moribund(q) || !crypto_is_larval(q))
362 if (strcmp(alg->cra_name, q->cra_name))
365 if (q->cra_priority > alg->cra_priority) {
371 list_for_each_entry(q, &crypto_alg_list, cra_list) {
375 if (crypto_is_moribund(q))
378 if (crypto_is_larval(q)) {
379 struct crypto_larval *larval = (void *)q;
382 * Check to see if either our generic name or
383 * specific name can satisfy the name requested
384 * by the larval entry q.
386 if (strcmp(alg->cra_name, q->cra_name) &&
387 strcmp(alg->cra_driver_name, q->cra_name))
392 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
395 if (best && crypto_mod_get(alg))
398 larval->adult = ERR_PTR(-EAGAIN);
403 if (strcmp(alg->cra_name, q->cra_name))
406 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
407 q->cra_priority > alg->cra_priority)
410 crypto_remove_spawns(q, &list, alg);
414 complete_all(&test->completion);
417 up_write(&crypto_alg_sem);
419 crypto_remove_final(&list);
421 EXPORT_SYMBOL_GPL(crypto_alg_tested);
423 void crypto_remove_final(struct list_head *list)
425 struct crypto_alg *alg;
426 struct crypto_alg *n;
428 list_for_each_entry_safe(alg, n, list, cra_list) {
429 list_del_init(&alg->cra_list);
433 EXPORT_SYMBOL_GPL(crypto_remove_final);
435 int crypto_register_alg(struct crypto_alg *alg)
437 struct crypto_larval *larval;
441 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
442 err = crypto_check_alg(alg);
446 down_write(&crypto_alg_sem);
447 larval = __crypto_register_alg(alg);
448 test_started = static_key_enabled(&crypto_boot_test_finished);
449 if (!IS_ERR_OR_NULL(larval))
450 larval->test_started = test_started;
451 up_write(&crypto_alg_sem);
453 if (IS_ERR_OR_NULL(larval))
454 return PTR_ERR(larval);
457 crypto_wait_for_test(larval);
460 EXPORT_SYMBOL_GPL(crypto_register_alg);
462 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
464 if (unlikely(list_empty(&alg->cra_list)))
467 alg->cra_flags |= CRYPTO_ALG_DEAD;
469 list_del_init(&alg->cra_list);
470 crypto_remove_spawns(alg, list, NULL);
475 void crypto_unregister_alg(struct crypto_alg *alg)
480 down_write(&crypto_alg_sem);
481 ret = crypto_remove_alg(alg, &list);
482 up_write(&crypto_alg_sem);
484 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
487 if (WARN_ON(refcount_read(&alg->cra_refcnt) != 1))
490 if (alg->cra_destroy)
491 alg->cra_destroy(alg);
493 crypto_remove_final(&list);
495 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
497 int crypto_register_algs(struct crypto_alg *algs, int count)
501 for (i = 0; i < count; i++) {
502 ret = crypto_register_alg(&algs[i]);
510 for (--i; i >= 0; --i)
511 crypto_unregister_alg(&algs[i]);
515 EXPORT_SYMBOL_GPL(crypto_register_algs);
517 void crypto_unregister_algs(struct crypto_alg *algs, int count)
521 for (i = 0; i < count; i++)
522 crypto_unregister_alg(&algs[i]);
524 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
526 int crypto_register_template(struct crypto_template *tmpl)
528 struct crypto_template *q;
531 down_write(&crypto_alg_sem);
533 crypto_check_module_sig(tmpl->module);
535 list_for_each_entry(q, &crypto_template_list, list) {
540 list_add(&tmpl->list, &crypto_template_list);
543 up_write(&crypto_alg_sem);
546 EXPORT_SYMBOL_GPL(crypto_register_template);
548 int crypto_register_templates(struct crypto_template *tmpls, int count)
552 for (i = 0; i < count; i++) {
553 err = crypto_register_template(&tmpls[i]);
560 for (--i; i >= 0; --i)
561 crypto_unregister_template(&tmpls[i]);
564 EXPORT_SYMBOL_GPL(crypto_register_templates);
566 void crypto_unregister_template(struct crypto_template *tmpl)
568 struct crypto_instance *inst;
569 struct hlist_node *n;
570 struct hlist_head *list;
573 down_write(&crypto_alg_sem);
575 BUG_ON(list_empty(&tmpl->list));
576 list_del_init(&tmpl->list);
578 list = &tmpl->instances;
579 hlist_for_each_entry(inst, list, list) {
580 int err = crypto_remove_alg(&inst->alg, &users);
585 up_write(&crypto_alg_sem);
587 hlist_for_each_entry_safe(inst, n, list, list) {
588 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
589 crypto_free_instance(inst);
591 crypto_remove_final(&users);
593 EXPORT_SYMBOL_GPL(crypto_unregister_template);
595 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
599 for (i = count - 1; i >= 0; --i)
600 crypto_unregister_template(&tmpls[i]);
602 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
604 static struct crypto_template *__crypto_lookup_template(const char *name)
606 struct crypto_template *q, *tmpl = NULL;
608 down_read(&crypto_alg_sem);
609 list_for_each_entry(q, &crypto_template_list, list) {
610 if (strcmp(q->name, name))
612 if (unlikely(!crypto_tmpl_get(q)))
618 up_read(&crypto_alg_sem);
623 struct crypto_template *crypto_lookup_template(const char *name)
625 return try_then_request_module(__crypto_lookup_template(name),
628 EXPORT_SYMBOL_GPL(crypto_lookup_template);
630 int crypto_register_instance(struct crypto_template *tmpl,
631 struct crypto_instance *inst)
633 struct crypto_larval *larval;
634 struct crypto_spawn *spawn;
635 u32 fips_internal = 0;
638 err = crypto_check_alg(&inst->alg);
642 inst->alg.cra_module = tmpl->module;
643 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
645 down_write(&crypto_alg_sem);
647 larval = ERR_PTR(-EAGAIN);
648 for (spawn = inst->spawns; spawn;) {
649 struct crypto_spawn *next;
656 spawn->registered = true;
658 fips_internal |= spawn->alg->cra_flags;
660 crypto_mod_put(spawn->alg);
665 inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
667 larval = __crypto_register_alg(&inst->alg);
671 larval->test_started = true;
673 hlist_add_head(&inst->list, &tmpl->instances);
677 up_write(&crypto_alg_sem);
679 err = PTR_ERR(larval);
680 if (IS_ERR_OR_NULL(larval))
683 crypto_wait_for_test(larval);
689 EXPORT_SYMBOL_GPL(crypto_register_instance);
691 void crypto_unregister_instance(struct crypto_instance *inst)
695 down_write(&crypto_alg_sem);
697 crypto_remove_spawns(&inst->alg, &list, NULL);
698 crypto_remove_instance(inst, &list);
700 up_write(&crypto_alg_sem);
702 crypto_remove_final(&list);
704 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
706 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
707 const char *name, u32 type, u32 mask)
709 struct crypto_alg *alg;
712 if (WARN_ON_ONCE(inst == NULL))
715 /* Allow the result of crypto_attr_alg_name() to be passed directly */
717 return PTR_ERR(name);
719 alg = crypto_find_alg(name, spawn->frontend,
720 type | CRYPTO_ALG_FIPS_INTERNAL, mask);
724 down_write(&crypto_alg_sem);
725 if (!crypto_is_moribund(alg)) {
726 list_add(&spawn->list, &alg->cra_users);
729 spawn->next = inst->spawns;
730 inst->spawns = spawn;
731 inst->alg.cra_flags |=
732 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
735 up_write(&crypto_alg_sem);
740 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
742 void crypto_drop_spawn(struct crypto_spawn *spawn)
744 if (!spawn->alg) /* not yet initialized? */
747 down_write(&crypto_alg_sem);
749 list_del(&spawn->list);
750 up_write(&crypto_alg_sem);
752 if (!spawn->registered)
753 crypto_mod_put(spawn->alg);
755 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
757 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
759 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
760 struct crypto_alg *target;
763 down_read(&crypto_alg_sem);
766 if (!crypto_mod_get(alg)) {
767 target = crypto_alg_get(alg);
769 alg = ERR_PTR(-EAGAIN);
772 up_read(&crypto_alg_sem);
775 crypto_shoot_alg(target);
776 crypto_alg_put(target);
782 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
785 struct crypto_alg *alg;
786 struct crypto_tfm *tfm;
788 alg = crypto_spawn_alg(spawn);
790 return ERR_CAST(alg);
792 tfm = ERR_PTR(-EINVAL);
793 if (unlikely((alg->cra_flags ^ type) & mask))
796 tfm = __crypto_alloc_tfm(alg, type, mask);
806 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
808 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
810 struct crypto_alg *alg;
811 struct crypto_tfm *tfm;
813 alg = crypto_spawn_alg(spawn);
815 return ERR_CAST(alg);
817 tfm = crypto_create_tfm(alg, spawn->frontend);
827 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
829 int crypto_register_notifier(struct notifier_block *nb)
831 return blocking_notifier_chain_register(&crypto_chain, nb);
833 EXPORT_SYMBOL_GPL(crypto_register_notifier);
835 int crypto_unregister_notifier(struct notifier_block *nb)
837 return blocking_notifier_chain_unregister(&crypto_chain, nb);
839 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
841 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
843 struct rtattr *rta = tb[0];
844 struct crypto_attr_type *algt;
847 return ERR_PTR(-ENOENT);
848 if (RTA_PAYLOAD(rta) < sizeof(*algt))
849 return ERR_PTR(-EINVAL);
850 if (rta->rta_type != CRYPTOA_TYPE)
851 return ERR_PTR(-EINVAL);
853 algt = RTA_DATA(rta);
857 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
860 * crypto_check_attr_type() - check algorithm type and compute inherited mask
861 * @tb: the template parameters
862 * @type: the algorithm type the template would be instantiated as
863 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
864 * to restrict the flags of any inner algorithms
866 * Validate that the algorithm type the user requested is compatible with the
867 * one the template would actually be instantiated as. E.g., if the user is
868 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
869 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
871 * Also compute the mask to use to restrict the flags of any inner algorithms.
873 * Return: 0 on success; -errno on failure
875 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
877 struct crypto_attr_type *algt;
879 algt = crypto_get_attr_type(tb);
881 return PTR_ERR(algt);
883 if ((algt->type ^ type) & algt->mask)
886 *mask_ret = crypto_algt_inherited_mask(algt);
889 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
891 const char *crypto_attr_alg_name(struct rtattr *rta)
893 struct crypto_attr_alg *alga;
896 return ERR_PTR(-ENOENT);
897 if (RTA_PAYLOAD(rta) < sizeof(*alga))
898 return ERR_PTR(-EINVAL);
899 if (rta->rta_type != CRYPTOA_ALG)
900 return ERR_PTR(-EINVAL);
902 alga = RTA_DATA(rta);
903 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
907 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
909 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
910 struct crypto_alg *alg)
912 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
913 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
914 return -ENAMETOOLONG;
916 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
917 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
918 return -ENAMETOOLONG;
922 EXPORT_SYMBOL_GPL(crypto_inst_setname);
924 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
926 INIT_LIST_HEAD(&queue->list);
927 queue->backlog = &queue->list;
929 queue->max_qlen = max_qlen;
931 EXPORT_SYMBOL_GPL(crypto_init_queue);
933 int crypto_enqueue_request(struct crypto_queue *queue,
934 struct crypto_async_request *request)
936 int err = -EINPROGRESS;
938 if (unlikely(queue->qlen >= queue->max_qlen)) {
939 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
944 if (queue->backlog == &queue->list)
945 queue->backlog = &request->list;
949 list_add_tail(&request->list, &queue->list);
954 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
956 void crypto_enqueue_request_head(struct crypto_queue *queue,
957 struct crypto_async_request *request)
959 if (unlikely(queue->qlen >= queue->max_qlen))
960 queue->backlog = queue->backlog->prev;
963 list_add(&request->list, &queue->list);
965 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
967 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
969 struct list_head *request;
971 if (unlikely(!queue->qlen))
976 if (queue->backlog != &queue->list)
977 queue->backlog = queue->backlog->next;
979 request = queue->list.next;
982 return list_entry(request, struct crypto_async_request, list);
984 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
986 static inline void crypto_inc_byte(u8 *a, unsigned int size)
991 for (; size; size--) {
999 void crypto_inc(u8 *a, unsigned int size)
1001 __be32 *b = (__be32 *)(a + size);
1004 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1005 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1006 for (; size >= 4; size -= 4) {
1007 c = be32_to_cpu(*--b) + 1;
1008 *b = cpu_to_be32(c);
1013 crypto_inc_byte(a, size);
1015 EXPORT_SYMBOL_GPL(crypto_inc);
1017 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1019 return alg->cra_ctxsize +
1020 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1022 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1024 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1028 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1031 crypto_mod_put(alg);
1037 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1039 #ifdef CONFIG_CRYPTO_STATS
1040 void crypto_stats_init(struct crypto_alg *alg)
1042 memset(&alg->stats, 0, sizeof(alg->stats));
1044 EXPORT_SYMBOL_GPL(crypto_stats_init);
1046 void crypto_stats_get(struct crypto_alg *alg)
1048 crypto_alg_get(alg);
1050 EXPORT_SYMBOL_GPL(crypto_stats_get);
1052 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1055 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1056 atomic64_inc(&alg->stats.aead.err_cnt);
1058 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1059 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1061 crypto_alg_put(alg);
1063 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1065 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1068 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1069 atomic64_inc(&alg->stats.aead.err_cnt);
1071 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1072 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1074 crypto_alg_put(alg);
1076 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1078 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1079 struct crypto_alg *alg)
1081 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1082 atomic64_inc(&alg->stats.akcipher.err_cnt);
1084 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1085 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1087 crypto_alg_put(alg);
1089 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1091 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1092 struct crypto_alg *alg)
1094 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1095 atomic64_inc(&alg->stats.akcipher.err_cnt);
1097 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1098 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1100 crypto_alg_put(alg);
1102 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1104 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1106 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1107 atomic64_inc(&alg->stats.akcipher.err_cnt);
1109 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1110 crypto_alg_put(alg);
1112 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1114 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1116 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1117 atomic64_inc(&alg->stats.akcipher.err_cnt);
1119 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1120 crypto_alg_put(alg);
1122 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1124 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1126 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1127 atomic64_inc(&alg->stats.compress.err_cnt);
1129 atomic64_inc(&alg->stats.compress.compress_cnt);
1130 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1132 crypto_alg_put(alg);
1134 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1136 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1138 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1139 atomic64_inc(&alg->stats.compress.err_cnt);
1141 atomic64_inc(&alg->stats.compress.decompress_cnt);
1142 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1144 crypto_alg_put(alg);
1146 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1148 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1149 struct crypto_alg *alg)
1151 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1152 atomic64_inc(&alg->stats.hash.err_cnt);
1154 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1155 crypto_alg_put(alg);
1157 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1159 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1160 struct crypto_alg *alg)
1162 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1163 atomic64_inc(&alg->stats.hash.err_cnt);
1165 atomic64_inc(&alg->stats.hash.hash_cnt);
1166 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1168 crypto_alg_put(alg);
1170 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1172 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1175 atomic64_inc(&alg->stats.kpp.err_cnt);
1177 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1178 crypto_alg_put(alg);
1180 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1182 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1185 atomic64_inc(&alg->stats.kpp.err_cnt);
1187 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1188 crypto_alg_put(alg);
1190 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1192 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1195 atomic64_inc(&alg->stats.kpp.err_cnt);
1197 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1198 crypto_alg_put(alg);
1200 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1202 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1204 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1205 atomic64_inc(&alg->stats.rng.err_cnt);
1207 atomic64_inc(&alg->stats.rng.seed_cnt);
1208 crypto_alg_put(alg);
1210 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1212 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1215 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1216 atomic64_inc(&alg->stats.rng.err_cnt);
1218 atomic64_inc(&alg->stats.rng.generate_cnt);
1219 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1221 crypto_alg_put(alg);
1223 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1225 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1226 struct crypto_alg *alg)
1228 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1229 atomic64_inc(&alg->stats.cipher.err_cnt);
1231 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1232 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1234 crypto_alg_put(alg);
1236 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1238 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1239 struct crypto_alg *alg)
1241 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1242 atomic64_inc(&alg->stats.cipher.err_cnt);
1244 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1245 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1247 crypto_alg_put(alg);
1249 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1252 static void __init crypto_start_tests(void)
1255 struct crypto_larval *larval = NULL;
1256 struct crypto_alg *q;
1258 down_write(&crypto_alg_sem);
1260 list_for_each_entry(q, &crypto_alg_list, cra_list) {
1261 struct crypto_larval *l;
1263 if (!crypto_is_larval(q))
1268 if (!crypto_is_test_larval(l))
1271 if (l->test_started)
1274 l->test_started = true;
1279 up_write(&crypto_alg_sem);
1284 crypto_wait_for_test(larval);
1287 static_branch_enable(&crypto_boot_test_finished);
1290 static int __init crypto_algapi_init(void)
1293 crypto_start_tests();
1297 static void __exit crypto_algapi_exit(void)
1303 * We run this at late_initcall so that all the built-in algorithms
1304 * have had a chance to register themselves first.
1306 late_initcall(crypto_algapi_init);
1307 module_exit(crypto_algapi_exit);
1309 MODULE_LICENSE("GPL");
1310 MODULE_DESCRIPTION("Cryptographic algorithms API");
1311 MODULE_SOFTDEP("pre: cryptomgr");