1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Cryptographic API for algorithms (i.e., low-level API).
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 #include <crypto/algapi.h>
9 #include <crypto/internal/simd.h>
10 #include <linux/err.h>
11 #include <linux/errno.h>
12 #include <linux/fips.h>
13 #include <linux/init.h>
14 #include <linux/kernel.h>
15 #include <linux/list.h>
16 #include <linux/module.h>
17 #include <linux/rtnetlink.h>
18 #include <linux/slab.h>
19 #include <linux/string.h>
23 static LIST_HEAD(crypto_template_list);
25 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
26 DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
27 EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
30 static inline void crypto_check_module_sig(struct module *mod)
32 if (fips_enabled && mod && !module_sig_ok(mod))
33 panic("Module %s signature verification failed in FIPS mode\n",
37 static int crypto_check_alg(struct crypto_alg *alg)
39 crypto_check_module_sig(alg->cra_module);
41 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
44 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
47 /* General maximums for all algs. */
48 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
51 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
54 /* Lower maximums for specific alg types. */
55 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
56 CRYPTO_ALG_TYPE_CIPHER) {
57 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
60 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
64 if (alg->cra_priority < 0)
67 refcount_set(&alg->cra_refcnt, 1);
72 static void crypto_free_instance(struct crypto_instance *inst)
74 inst->alg.cra_type->free(inst);
77 static void crypto_destroy_instance(struct crypto_alg *alg)
79 struct crypto_instance *inst = (void *)alg;
80 struct crypto_template *tmpl = inst->tmpl;
82 crypto_free_instance(inst);
83 crypto_tmpl_put(tmpl);
87 * This function adds a spawn to the list secondary_spawns which
88 * will be used at the end of crypto_remove_spawns to unregister
89 * instances, unless the spawn happens to be one that is depended
90 * on by the new algorithm (nalg in crypto_remove_spawns).
92 * This function is also responsible for resurrecting any algorithms
93 * in the dependency chain of nalg by unsetting n->dead.
95 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
96 struct list_head *stack,
97 struct list_head *top,
98 struct list_head *secondary_spawns)
100 struct crypto_spawn *spawn, *n;
102 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
106 n = list_prev_entry(spawn, list);
107 list_move(&spawn->list, secondary_spawns);
109 if (list_is_last(&n->list, stack))
112 n = list_next_entry(n, list);
116 return &n->inst->alg.cra_users;
119 static void crypto_remove_instance(struct crypto_instance *inst,
120 struct list_head *list)
122 struct crypto_template *tmpl = inst->tmpl;
124 if (crypto_is_dead(&inst->alg))
127 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
129 if (!tmpl || !crypto_tmpl_get(tmpl))
132 list_move(&inst->alg.cra_list, list);
133 hlist_del(&inst->list);
134 inst->alg.cra_destroy = crypto_destroy_instance;
136 BUG_ON(!list_empty(&inst->alg.cra_users));
140 * Given an algorithm alg, remove all algorithms that depend on it
141 * through spawns. If nalg is not null, then exempt any algorithms
142 * that is depended on by nalg. This is useful when nalg itself
145 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
146 struct crypto_alg *nalg)
148 u32 new_type = (nalg ?: alg)->cra_flags;
149 struct crypto_spawn *spawn, *n;
150 LIST_HEAD(secondary_spawns);
151 struct list_head *spawns;
155 spawns = &alg->cra_users;
156 list_for_each_entry_safe(spawn, n, spawns, list) {
157 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
160 list_move(&spawn->list, &top);
164 * Perform a depth-first walk starting from alg through
165 * the cra_users tree. The list stack records the path
166 * from alg to the current spawn.
170 while (!list_empty(spawns)) {
171 struct crypto_instance *inst;
173 spawn = list_first_entry(spawns, struct crypto_spawn,
177 list_move(&spawn->list, &stack);
178 spawn->dead = !spawn->registered || &inst->alg != nalg;
180 if (!spawn->registered)
183 BUG_ON(&inst->alg == alg);
185 if (&inst->alg == nalg)
188 spawns = &inst->alg.cra_users;
191 * Even if spawn->registered is true, the
192 * instance itself may still be unregistered.
193 * This is because it may have failed during
194 * registration. Therefore we still need to
195 * make the following test.
197 * We may encounter an unregistered instance here, since
198 * an instance's spawns are set up prior to the instance
199 * being registered. An unregistered instance will have
200 * NULL ->cra_users.next, since ->cra_users isn't
201 * properly initialized until registration. But an
202 * unregistered instance cannot have any users, so treat
203 * it the same as ->cra_users being empty.
205 if (spawns->next == NULL)
208 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
209 &secondary_spawns)));
212 * Remove all instances that are marked as dead. Also
213 * complete the resurrection of the others by moving them
214 * back to the cra_users list.
216 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
218 list_move(&spawn->list, &spawn->alg->cra_users);
219 else if (spawn->registered)
220 crypto_remove_instance(spawn->inst, list);
223 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
225 static void crypto_alg_finish_registration(struct crypto_alg *alg,
226 bool fulfill_requests,
227 struct list_head *algs_to_put)
229 struct crypto_alg *q;
231 list_for_each_entry(q, &crypto_alg_list, cra_list) {
235 if (crypto_is_moribund(q))
238 if (crypto_is_larval(q)) {
239 struct crypto_larval *larval = (void *)q;
242 * Check to see if either our generic name or
243 * specific name can satisfy the name requested
244 * by the larval entry q.
246 if (strcmp(alg->cra_name, q->cra_name) &&
247 strcmp(alg->cra_driver_name, q->cra_name))
252 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
255 if (fulfill_requests && crypto_mod_get(alg))
258 larval->adult = ERR_PTR(-EAGAIN);
263 if (strcmp(alg->cra_name, q->cra_name))
266 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
267 q->cra_priority > alg->cra_priority)
270 crypto_remove_spawns(q, algs_to_put, alg);
273 crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
276 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
278 struct crypto_larval *larval;
280 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER) ||
281 IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS) ||
282 (alg->cra_flags & CRYPTO_ALG_INTERNAL))
283 return NULL; /* No self-test needed */
285 larval = crypto_larval_alloc(alg->cra_name,
286 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
290 larval->adult = crypto_mod_get(alg);
291 if (!larval->adult) {
293 return ERR_PTR(-ENOENT);
296 refcount_set(&larval->alg.cra_refcnt, 1);
297 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
298 CRYPTO_MAX_ALG_NAME);
299 larval->alg.cra_priority = alg->cra_priority;
304 static struct crypto_larval *
305 __crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
307 struct crypto_alg *q;
308 struct crypto_larval *larval;
311 if (crypto_is_dead(alg))
314 INIT_LIST_HEAD(&alg->cra_users);
318 list_for_each_entry(q, &crypto_alg_list, cra_list) {
322 if (crypto_is_moribund(q))
325 if (crypto_is_larval(q)) {
326 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
331 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
332 !strcmp(q->cra_name, alg->cra_driver_name))
336 larval = crypto_alloc_test_larval(alg);
340 list_add(&alg->cra_list, &crypto_alg_list);
342 crypto_stats_init(alg);
346 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
348 list_add(&larval->alg.cra_list, &crypto_alg_list);
350 alg->cra_flags |= CRYPTO_ALG_TESTED;
351 crypto_alg_finish_registration(alg, true, algs_to_put);
358 larval = ERR_PTR(ret);
362 void crypto_alg_tested(const char *name, int err)
364 struct crypto_larval *test;
365 struct crypto_alg *alg;
366 struct crypto_alg *q;
370 down_write(&crypto_alg_sem);
371 list_for_each_entry(q, &crypto_alg_list, cra_list) {
372 if (crypto_is_moribund(q) || !crypto_is_larval(q))
375 test = (struct crypto_larval *)q;
377 if (!strcmp(q->cra_driver_name, name))
381 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
385 q->cra_flags |= CRYPTO_ALG_DEAD;
388 if (list_empty(&alg->cra_list))
391 if (err == -ECANCELED)
392 alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
396 alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
398 alg->cra_flags |= CRYPTO_ALG_TESTED;
401 * If a higher-priority implementation of the same algorithm is
402 * currently being tested, then don't fulfill request larvals.
405 list_for_each_entry(q, &crypto_alg_list, cra_list) {
406 if (crypto_is_moribund(q) || !crypto_is_larval(q))
409 if (strcmp(alg->cra_name, q->cra_name))
412 if (q->cra_priority > alg->cra_priority) {
418 crypto_alg_finish_registration(alg, best, &list);
421 complete_all(&test->completion);
424 up_write(&crypto_alg_sem);
426 crypto_remove_final(&list);
428 EXPORT_SYMBOL_GPL(crypto_alg_tested);
430 void crypto_remove_final(struct list_head *list)
432 struct crypto_alg *alg;
433 struct crypto_alg *n;
435 list_for_each_entry_safe(alg, n, list, cra_list) {
436 list_del_init(&alg->cra_list);
440 EXPORT_SYMBOL_GPL(crypto_remove_final);
442 int crypto_register_alg(struct crypto_alg *alg)
444 struct crypto_larval *larval;
445 LIST_HEAD(algs_to_put);
446 bool test_started = false;
449 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
450 err = crypto_check_alg(alg);
454 down_write(&crypto_alg_sem);
455 larval = __crypto_register_alg(alg, &algs_to_put);
456 if (!IS_ERR_OR_NULL(larval)) {
457 test_started = crypto_boot_test_finished();
458 larval->test_started = test_started;
460 up_write(&crypto_alg_sem);
463 return PTR_ERR(larval);
465 crypto_wait_for_test(larval);
466 crypto_remove_final(&algs_to_put);
469 EXPORT_SYMBOL_GPL(crypto_register_alg);
471 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
473 if (unlikely(list_empty(&alg->cra_list)))
476 alg->cra_flags |= CRYPTO_ALG_DEAD;
478 list_del_init(&alg->cra_list);
479 crypto_remove_spawns(alg, list, NULL);
484 void crypto_unregister_alg(struct crypto_alg *alg)
489 down_write(&crypto_alg_sem);
490 ret = crypto_remove_alg(alg, &list);
491 up_write(&crypto_alg_sem);
493 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
496 BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
497 if (alg->cra_destroy)
498 alg->cra_destroy(alg);
500 crypto_remove_final(&list);
502 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
504 int crypto_register_algs(struct crypto_alg *algs, int count)
508 for (i = 0; i < count; i++) {
509 ret = crypto_register_alg(&algs[i]);
517 for (--i; i >= 0; --i)
518 crypto_unregister_alg(&algs[i]);
522 EXPORT_SYMBOL_GPL(crypto_register_algs);
524 void crypto_unregister_algs(struct crypto_alg *algs, int count)
528 for (i = 0; i < count; i++)
529 crypto_unregister_alg(&algs[i]);
531 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
533 int crypto_register_template(struct crypto_template *tmpl)
535 struct crypto_template *q;
538 down_write(&crypto_alg_sem);
540 crypto_check_module_sig(tmpl->module);
542 list_for_each_entry(q, &crypto_template_list, list) {
547 list_add(&tmpl->list, &crypto_template_list);
550 up_write(&crypto_alg_sem);
553 EXPORT_SYMBOL_GPL(crypto_register_template);
555 int crypto_register_templates(struct crypto_template *tmpls, int count)
559 for (i = 0; i < count; i++) {
560 err = crypto_register_template(&tmpls[i]);
567 for (--i; i >= 0; --i)
568 crypto_unregister_template(&tmpls[i]);
571 EXPORT_SYMBOL_GPL(crypto_register_templates);
573 void crypto_unregister_template(struct crypto_template *tmpl)
575 struct crypto_instance *inst;
576 struct hlist_node *n;
577 struct hlist_head *list;
580 down_write(&crypto_alg_sem);
582 BUG_ON(list_empty(&tmpl->list));
583 list_del_init(&tmpl->list);
585 list = &tmpl->instances;
586 hlist_for_each_entry(inst, list, list) {
587 int err = crypto_remove_alg(&inst->alg, &users);
592 up_write(&crypto_alg_sem);
594 hlist_for_each_entry_safe(inst, n, list, list) {
595 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
596 crypto_free_instance(inst);
598 crypto_remove_final(&users);
600 EXPORT_SYMBOL_GPL(crypto_unregister_template);
602 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
606 for (i = count - 1; i >= 0; --i)
607 crypto_unregister_template(&tmpls[i]);
609 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
611 static struct crypto_template *__crypto_lookup_template(const char *name)
613 struct crypto_template *q, *tmpl = NULL;
615 down_read(&crypto_alg_sem);
616 list_for_each_entry(q, &crypto_template_list, list) {
617 if (strcmp(q->name, name))
619 if (unlikely(!crypto_tmpl_get(q)))
625 up_read(&crypto_alg_sem);
630 struct crypto_template *crypto_lookup_template(const char *name)
632 return try_then_request_module(__crypto_lookup_template(name),
635 EXPORT_SYMBOL_GPL(crypto_lookup_template);
637 int crypto_register_instance(struct crypto_template *tmpl,
638 struct crypto_instance *inst)
640 struct crypto_larval *larval;
641 struct crypto_spawn *spawn;
642 u32 fips_internal = 0;
643 LIST_HEAD(algs_to_put);
646 err = crypto_check_alg(&inst->alg);
650 inst->alg.cra_module = tmpl->module;
651 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
653 down_write(&crypto_alg_sem);
655 larval = ERR_PTR(-EAGAIN);
656 for (spawn = inst->spawns; spawn;) {
657 struct crypto_spawn *next;
664 spawn->registered = true;
666 fips_internal |= spawn->alg->cra_flags;
668 crypto_mod_put(spawn->alg);
673 inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
675 larval = __crypto_register_alg(&inst->alg, &algs_to_put);
679 larval->test_started = true;
681 hlist_add_head(&inst->list, &tmpl->instances);
685 up_write(&crypto_alg_sem);
688 return PTR_ERR(larval);
690 crypto_wait_for_test(larval);
691 crypto_remove_final(&algs_to_put);
694 EXPORT_SYMBOL_GPL(crypto_register_instance);
696 void crypto_unregister_instance(struct crypto_instance *inst)
700 down_write(&crypto_alg_sem);
702 crypto_remove_spawns(&inst->alg, &list, NULL);
703 crypto_remove_instance(inst, &list);
705 up_write(&crypto_alg_sem);
707 crypto_remove_final(&list);
709 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
711 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
712 const char *name, u32 type, u32 mask)
714 struct crypto_alg *alg;
717 if (WARN_ON_ONCE(inst == NULL))
720 /* Allow the result of crypto_attr_alg_name() to be passed directly */
722 return PTR_ERR(name);
724 alg = crypto_find_alg(name, spawn->frontend,
725 type | CRYPTO_ALG_FIPS_INTERNAL, mask);
729 down_write(&crypto_alg_sem);
730 if (!crypto_is_moribund(alg)) {
731 list_add(&spawn->list, &alg->cra_users);
734 spawn->next = inst->spawns;
735 inst->spawns = spawn;
736 inst->alg.cra_flags |=
737 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
740 up_write(&crypto_alg_sem);
745 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
747 void crypto_drop_spawn(struct crypto_spawn *spawn)
749 if (!spawn->alg) /* not yet initialized? */
752 down_write(&crypto_alg_sem);
754 list_del(&spawn->list);
755 up_write(&crypto_alg_sem);
757 if (!spawn->registered)
758 crypto_mod_put(spawn->alg);
760 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
762 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
764 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
765 struct crypto_alg *target;
768 down_read(&crypto_alg_sem);
771 if (!crypto_mod_get(alg)) {
772 target = crypto_alg_get(alg);
774 alg = ERR_PTR(-EAGAIN);
777 up_read(&crypto_alg_sem);
780 crypto_shoot_alg(target);
781 crypto_alg_put(target);
787 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
790 struct crypto_alg *alg;
791 struct crypto_tfm *tfm;
793 alg = crypto_spawn_alg(spawn);
795 return ERR_CAST(alg);
797 tfm = ERR_PTR(-EINVAL);
798 if (unlikely((alg->cra_flags ^ type) & mask))
801 tfm = __crypto_alloc_tfm(alg, type, mask);
811 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
813 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
815 struct crypto_alg *alg;
816 struct crypto_tfm *tfm;
818 alg = crypto_spawn_alg(spawn);
820 return ERR_CAST(alg);
822 tfm = crypto_create_tfm(alg, spawn->frontend);
832 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
834 int crypto_register_notifier(struct notifier_block *nb)
836 return blocking_notifier_chain_register(&crypto_chain, nb);
838 EXPORT_SYMBOL_GPL(crypto_register_notifier);
840 int crypto_unregister_notifier(struct notifier_block *nb)
842 return blocking_notifier_chain_unregister(&crypto_chain, nb);
844 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
846 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
848 struct rtattr *rta = tb[0];
849 struct crypto_attr_type *algt;
852 return ERR_PTR(-ENOENT);
853 if (RTA_PAYLOAD(rta) < sizeof(*algt))
854 return ERR_PTR(-EINVAL);
855 if (rta->rta_type != CRYPTOA_TYPE)
856 return ERR_PTR(-EINVAL);
858 algt = RTA_DATA(rta);
862 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
865 * crypto_check_attr_type() - check algorithm type and compute inherited mask
866 * @tb: the template parameters
867 * @type: the algorithm type the template would be instantiated as
868 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
869 * to restrict the flags of any inner algorithms
871 * Validate that the algorithm type the user requested is compatible with the
872 * one the template would actually be instantiated as. E.g., if the user is
873 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
874 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
876 * Also compute the mask to use to restrict the flags of any inner algorithms.
878 * Return: 0 on success; -errno on failure
880 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
882 struct crypto_attr_type *algt;
884 algt = crypto_get_attr_type(tb);
886 return PTR_ERR(algt);
888 if ((algt->type ^ type) & algt->mask)
891 *mask_ret = crypto_algt_inherited_mask(algt);
894 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
896 const char *crypto_attr_alg_name(struct rtattr *rta)
898 struct crypto_attr_alg *alga;
901 return ERR_PTR(-ENOENT);
902 if (RTA_PAYLOAD(rta) < sizeof(*alga))
903 return ERR_PTR(-EINVAL);
904 if (rta->rta_type != CRYPTOA_ALG)
905 return ERR_PTR(-EINVAL);
907 alga = RTA_DATA(rta);
908 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
912 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
914 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
915 struct crypto_alg *alg)
917 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
918 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
919 return -ENAMETOOLONG;
921 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
922 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
923 return -ENAMETOOLONG;
927 EXPORT_SYMBOL_GPL(crypto_inst_setname);
929 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
931 INIT_LIST_HEAD(&queue->list);
932 queue->backlog = &queue->list;
934 queue->max_qlen = max_qlen;
936 EXPORT_SYMBOL_GPL(crypto_init_queue);
938 int crypto_enqueue_request(struct crypto_queue *queue,
939 struct crypto_async_request *request)
941 int err = -EINPROGRESS;
943 if (unlikely(queue->qlen >= queue->max_qlen)) {
944 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
949 if (queue->backlog == &queue->list)
950 queue->backlog = &request->list;
954 list_add_tail(&request->list, &queue->list);
959 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
961 void crypto_enqueue_request_head(struct crypto_queue *queue,
962 struct crypto_async_request *request)
965 list_add(&request->list, &queue->list);
967 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
969 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
971 struct list_head *request;
973 if (unlikely(!queue->qlen))
978 if (queue->backlog != &queue->list)
979 queue->backlog = queue->backlog->next;
981 request = queue->list.next;
984 return list_entry(request, struct crypto_async_request, list);
986 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
988 static inline void crypto_inc_byte(u8 *a, unsigned int size)
993 for (; size; size--) {
1001 void crypto_inc(u8 *a, unsigned int size)
1003 __be32 *b = (__be32 *)(a + size);
1006 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1007 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1008 for (; size >= 4; size -= 4) {
1009 c = be32_to_cpu(*--b) + 1;
1010 *b = cpu_to_be32(c);
1015 crypto_inc_byte(a, size);
1017 EXPORT_SYMBOL_GPL(crypto_inc);
1019 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1021 return alg->cra_ctxsize +
1022 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1024 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1026 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1030 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1033 crypto_mod_put(alg);
1039 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1041 #ifdef CONFIG_CRYPTO_STATS
1042 void crypto_stats_init(struct crypto_alg *alg)
1044 memset(&alg->stats, 0, sizeof(alg->stats));
1046 EXPORT_SYMBOL_GPL(crypto_stats_init);
1048 void crypto_stats_get(struct crypto_alg *alg)
1050 crypto_alg_get(alg);
1052 EXPORT_SYMBOL_GPL(crypto_stats_get);
1054 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1057 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1058 atomic64_inc(&alg->stats.aead.err_cnt);
1060 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1061 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1063 crypto_alg_put(alg);
1065 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1067 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1070 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1071 atomic64_inc(&alg->stats.aead.err_cnt);
1073 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1074 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1076 crypto_alg_put(alg);
1078 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1080 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1081 struct crypto_alg *alg)
1083 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1084 atomic64_inc(&alg->stats.akcipher.err_cnt);
1086 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1087 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1089 crypto_alg_put(alg);
1091 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1093 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1094 struct crypto_alg *alg)
1096 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1097 atomic64_inc(&alg->stats.akcipher.err_cnt);
1099 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1100 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1102 crypto_alg_put(alg);
1104 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1106 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1108 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1109 atomic64_inc(&alg->stats.akcipher.err_cnt);
1111 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1112 crypto_alg_put(alg);
1114 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1116 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1118 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1119 atomic64_inc(&alg->stats.akcipher.err_cnt);
1121 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1122 crypto_alg_put(alg);
1124 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1126 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1128 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1129 atomic64_inc(&alg->stats.compress.err_cnt);
1131 atomic64_inc(&alg->stats.compress.compress_cnt);
1132 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1134 crypto_alg_put(alg);
1136 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1138 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1140 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1141 atomic64_inc(&alg->stats.compress.err_cnt);
1143 atomic64_inc(&alg->stats.compress.decompress_cnt);
1144 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1146 crypto_alg_put(alg);
1148 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1150 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1151 struct crypto_alg *alg)
1153 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1154 atomic64_inc(&alg->stats.hash.err_cnt);
1156 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1157 crypto_alg_put(alg);
1159 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1161 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1162 struct crypto_alg *alg)
1164 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1165 atomic64_inc(&alg->stats.hash.err_cnt);
1167 atomic64_inc(&alg->stats.hash.hash_cnt);
1168 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1170 crypto_alg_put(alg);
1172 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1174 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1177 atomic64_inc(&alg->stats.kpp.err_cnt);
1179 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1180 crypto_alg_put(alg);
1182 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1184 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1187 atomic64_inc(&alg->stats.kpp.err_cnt);
1189 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1190 crypto_alg_put(alg);
1192 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1194 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1197 atomic64_inc(&alg->stats.kpp.err_cnt);
1199 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1200 crypto_alg_put(alg);
1202 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1204 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1206 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1207 atomic64_inc(&alg->stats.rng.err_cnt);
1209 atomic64_inc(&alg->stats.rng.seed_cnt);
1210 crypto_alg_put(alg);
1212 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1214 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1217 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1218 atomic64_inc(&alg->stats.rng.err_cnt);
1220 atomic64_inc(&alg->stats.rng.generate_cnt);
1221 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1223 crypto_alg_put(alg);
1225 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1227 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1228 struct crypto_alg *alg)
1230 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1231 atomic64_inc(&alg->stats.cipher.err_cnt);
1233 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1234 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1236 crypto_alg_put(alg);
1238 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1240 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1241 struct crypto_alg *alg)
1243 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1244 atomic64_inc(&alg->stats.cipher.err_cnt);
1246 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1247 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1249 crypto_alg_put(alg);
1251 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1254 static void __init crypto_start_tests(void)
1256 if (IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS))
1260 struct crypto_larval *larval = NULL;
1261 struct crypto_alg *q;
1263 down_write(&crypto_alg_sem);
1265 list_for_each_entry(q, &crypto_alg_list, cra_list) {
1266 struct crypto_larval *l;
1268 if (!crypto_is_larval(q))
1273 if (!crypto_is_test_larval(l))
1276 if (l->test_started)
1279 l->test_started = true;
1284 up_write(&crypto_alg_sem);
1289 crypto_wait_for_test(larval);
1292 set_crypto_boot_test_finished();
1295 static int __init crypto_algapi_init(void)
1298 crypto_start_tests();
1302 static void __exit crypto_algapi_exit(void)
1308 * We run this at late_initcall so that all the built-in algorithms
1309 * have had a chance to register themselves first.
1311 late_initcall(crypto_algapi_init);
1312 module_exit(crypto_algapi_exit);
1314 MODULE_LICENSE("GPL");
1315 MODULE_DESCRIPTION("Cryptographic algorithms API");
1316 MODULE_SOFTDEP("pre: cryptomgr");