1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Cryptographic API for algorithms (i.e., low-level API).
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 #include <crypto/algapi.h>
9 #include <crypto/internal/simd.h>
10 #include <linux/err.h>
11 #include <linux/errno.h>
12 #include <linux/fips.h>
13 #include <linux/init.h>
14 #include <linux/kernel.h>
15 #include <linux/list.h>
16 #include <linux/module.h>
17 #include <linux/rtnetlink.h>
18 #include <linux/slab.h>
19 #include <linux/string.h>
20 #include <linux/workqueue.h>
24 static LIST_HEAD(crypto_template_list);
26 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
27 DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
28 EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
31 static inline void crypto_check_module_sig(struct module *mod)
33 if (fips_enabled && mod && !module_sig_ok(mod))
34 panic("Module %s signature verification failed in FIPS mode\n",
38 static int crypto_check_alg(struct crypto_alg *alg)
40 crypto_check_module_sig(alg->cra_module);
42 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
45 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
48 /* General maximums for all algs. */
49 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
52 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
55 /* Lower maximums for specific alg types. */
56 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
57 CRYPTO_ALG_TYPE_CIPHER) {
58 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
61 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
65 if (alg->cra_priority < 0)
68 refcount_set(&alg->cra_refcnt, 1);
73 static void crypto_free_instance(struct crypto_instance *inst)
75 inst->alg.cra_type->free(inst);
78 static void crypto_destroy_instance_workfn(struct work_struct *w)
80 struct crypto_instance *inst = container_of(w, struct crypto_instance,
82 struct crypto_template *tmpl = inst->tmpl;
84 crypto_free_instance(inst);
85 crypto_tmpl_put(tmpl);
88 static void crypto_destroy_instance(struct crypto_alg *alg)
90 struct crypto_instance *inst = container_of(alg,
91 struct crypto_instance,
94 INIT_WORK(&inst->free_work, crypto_destroy_instance_workfn);
95 schedule_work(&inst->free_work);
99 * This function adds a spawn to the list secondary_spawns which
100 * will be used at the end of crypto_remove_spawns to unregister
101 * instances, unless the spawn happens to be one that is depended
102 * on by the new algorithm (nalg in crypto_remove_spawns).
104 * This function is also responsible for resurrecting any algorithms
105 * in the dependency chain of nalg by unsetting n->dead.
107 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
108 struct list_head *stack,
109 struct list_head *top,
110 struct list_head *secondary_spawns)
112 struct crypto_spawn *spawn, *n;
114 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
118 n = list_prev_entry(spawn, list);
119 list_move(&spawn->list, secondary_spawns);
121 if (list_is_last(&n->list, stack))
124 n = list_next_entry(n, list);
128 return &n->inst->alg.cra_users;
131 static void crypto_remove_instance(struct crypto_instance *inst,
132 struct list_head *list)
134 struct crypto_template *tmpl = inst->tmpl;
136 if (crypto_is_dead(&inst->alg))
139 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
141 if (!tmpl || !crypto_tmpl_get(tmpl))
144 list_move(&inst->alg.cra_list, list);
145 hlist_del(&inst->list);
146 inst->alg.cra_destroy = crypto_destroy_instance;
148 BUG_ON(!list_empty(&inst->alg.cra_users));
152 * Given an algorithm alg, remove all algorithms that depend on it
153 * through spawns. If nalg is not null, then exempt any algorithms
154 * that is depended on by nalg. This is useful when nalg itself
157 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
158 struct crypto_alg *nalg)
160 u32 new_type = (nalg ?: alg)->cra_flags;
161 struct crypto_spawn *spawn, *n;
162 LIST_HEAD(secondary_spawns);
163 struct list_head *spawns;
167 spawns = &alg->cra_users;
168 list_for_each_entry_safe(spawn, n, spawns, list) {
169 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
172 list_move(&spawn->list, &top);
176 * Perform a depth-first walk starting from alg through
177 * the cra_users tree. The list stack records the path
178 * from alg to the current spawn.
182 while (!list_empty(spawns)) {
183 struct crypto_instance *inst;
185 spawn = list_first_entry(spawns, struct crypto_spawn,
189 list_move(&spawn->list, &stack);
190 spawn->dead = !spawn->registered || &inst->alg != nalg;
192 if (!spawn->registered)
195 BUG_ON(&inst->alg == alg);
197 if (&inst->alg == nalg)
200 spawns = &inst->alg.cra_users;
203 * Even if spawn->registered is true, the
204 * instance itself may still be unregistered.
205 * This is because it may have failed during
206 * registration. Therefore we still need to
207 * make the following test.
209 * We may encounter an unregistered instance here, since
210 * an instance's spawns are set up prior to the instance
211 * being registered. An unregistered instance will have
212 * NULL ->cra_users.next, since ->cra_users isn't
213 * properly initialized until registration. But an
214 * unregistered instance cannot have any users, so treat
215 * it the same as ->cra_users being empty.
217 if (spawns->next == NULL)
220 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
221 &secondary_spawns)));
224 * Remove all instances that are marked as dead. Also
225 * complete the resurrection of the others by moving them
226 * back to the cra_users list.
228 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
230 list_move(&spawn->list, &spawn->alg->cra_users);
231 else if (spawn->registered)
232 crypto_remove_instance(spawn->inst, list);
235 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
237 static void crypto_alg_finish_registration(struct crypto_alg *alg,
238 bool fulfill_requests,
239 struct list_head *algs_to_put)
241 struct crypto_alg *q;
243 list_for_each_entry(q, &crypto_alg_list, cra_list) {
247 if (crypto_is_moribund(q))
250 if (crypto_is_larval(q)) {
251 struct crypto_larval *larval = (void *)q;
254 * Check to see if either our generic name or
255 * specific name can satisfy the name requested
256 * by the larval entry q.
258 if (strcmp(alg->cra_name, q->cra_name) &&
259 strcmp(alg->cra_driver_name, q->cra_name))
264 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
267 if (fulfill_requests && crypto_mod_get(alg))
270 larval->adult = ERR_PTR(-EAGAIN);
275 if (strcmp(alg->cra_name, q->cra_name))
278 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
279 q->cra_priority > alg->cra_priority)
282 crypto_remove_spawns(q, algs_to_put, alg);
285 crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
288 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
290 struct crypto_larval *larval;
292 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER) ||
293 IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS) ||
294 (alg->cra_flags & CRYPTO_ALG_INTERNAL))
295 return NULL; /* No self-test needed */
297 larval = crypto_larval_alloc(alg->cra_name,
298 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
302 larval->adult = crypto_mod_get(alg);
303 if (!larval->adult) {
305 return ERR_PTR(-ENOENT);
308 refcount_set(&larval->alg.cra_refcnt, 1);
309 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
310 CRYPTO_MAX_ALG_NAME);
311 larval->alg.cra_priority = alg->cra_priority;
316 static struct crypto_larval *
317 __crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
319 struct crypto_alg *q;
320 struct crypto_larval *larval;
323 if (crypto_is_dead(alg))
326 INIT_LIST_HEAD(&alg->cra_users);
330 list_for_each_entry(q, &crypto_alg_list, cra_list) {
334 if (crypto_is_moribund(q))
337 if (crypto_is_larval(q)) {
338 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
343 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
344 !strcmp(q->cra_driver_name, alg->cra_driver_name) ||
345 !strcmp(q->cra_name, alg->cra_driver_name))
349 larval = crypto_alloc_test_larval(alg);
353 list_add(&alg->cra_list, &crypto_alg_list);
357 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
359 list_add(&larval->alg.cra_list, &crypto_alg_list);
361 alg->cra_flags |= CRYPTO_ALG_TESTED;
362 crypto_alg_finish_registration(alg, true, algs_to_put);
369 larval = ERR_PTR(ret);
373 void crypto_alg_tested(const char *name, int err)
375 struct crypto_larval *test;
376 struct crypto_alg *alg;
377 struct crypto_alg *q;
381 down_write(&crypto_alg_sem);
382 list_for_each_entry(q, &crypto_alg_list, cra_list) {
383 if (crypto_is_moribund(q) || !crypto_is_larval(q))
386 test = (struct crypto_larval *)q;
388 if (!strcmp(q->cra_driver_name, name))
392 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
396 q->cra_flags |= CRYPTO_ALG_DEAD;
399 if (list_empty(&alg->cra_list))
402 if (err == -ECANCELED)
403 alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
407 alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
409 alg->cra_flags |= CRYPTO_ALG_TESTED;
412 * If a higher-priority implementation of the same algorithm is
413 * currently being tested, then don't fulfill request larvals.
416 list_for_each_entry(q, &crypto_alg_list, cra_list) {
417 if (crypto_is_moribund(q) || !crypto_is_larval(q))
420 if (strcmp(alg->cra_name, q->cra_name))
423 if (q->cra_priority > alg->cra_priority) {
429 crypto_alg_finish_registration(alg, best, &list);
432 complete_all(&test->completion);
435 up_write(&crypto_alg_sem);
437 crypto_remove_final(&list);
439 EXPORT_SYMBOL_GPL(crypto_alg_tested);
441 void crypto_remove_final(struct list_head *list)
443 struct crypto_alg *alg;
444 struct crypto_alg *n;
446 list_for_each_entry_safe(alg, n, list, cra_list) {
447 list_del_init(&alg->cra_list);
451 EXPORT_SYMBOL_GPL(crypto_remove_final);
453 int crypto_register_alg(struct crypto_alg *alg)
455 struct crypto_larval *larval;
456 LIST_HEAD(algs_to_put);
457 bool test_started = false;
460 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
461 err = crypto_check_alg(alg);
465 down_write(&crypto_alg_sem);
466 larval = __crypto_register_alg(alg, &algs_to_put);
467 if (!IS_ERR_OR_NULL(larval)) {
468 test_started = crypto_boot_test_finished();
469 larval->test_started = test_started;
471 up_write(&crypto_alg_sem);
474 return PTR_ERR(larval);
476 crypto_wait_for_test(larval);
477 crypto_remove_final(&algs_to_put);
480 EXPORT_SYMBOL_GPL(crypto_register_alg);
482 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
484 if (unlikely(list_empty(&alg->cra_list)))
487 alg->cra_flags |= CRYPTO_ALG_DEAD;
489 list_del_init(&alg->cra_list);
490 crypto_remove_spawns(alg, list, NULL);
495 void crypto_unregister_alg(struct crypto_alg *alg)
500 down_write(&crypto_alg_sem);
501 ret = crypto_remove_alg(alg, &list);
502 up_write(&crypto_alg_sem);
504 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
507 if (WARN_ON(refcount_read(&alg->cra_refcnt) != 1))
510 if (alg->cra_destroy)
511 alg->cra_destroy(alg);
513 crypto_remove_final(&list);
515 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
517 int crypto_register_algs(struct crypto_alg *algs, int count)
521 for (i = 0; i < count; i++) {
522 ret = crypto_register_alg(&algs[i]);
530 for (--i; i >= 0; --i)
531 crypto_unregister_alg(&algs[i]);
535 EXPORT_SYMBOL_GPL(crypto_register_algs);
537 void crypto_unregister_algs(struct crypto_alg *algs, int count)
541 for (i = 0; i < count; i++)
542 crypto_unregister_alg(&algs[i]);
544 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
546 int crypto_register_template(struct crypto_template *tmpl)
548 struct crypto_template *q;
551 down_write(&crypto_alg_sem);
553 crypto_check_module_sig(tmpl->module);
555 list_for_each_entry(q, &crypto_template_list, list) {
560 list_add(&tmpl->list, &crypto_template_list);
563 up_write(&crypto_alg_sem);
566 EXPORT_SYMBOL_GPL(crypto_register_template);
568 int crypto_register_templates(struct crypto_template *tmpls, int count)
572 for (i = 0; i < count; i++) {
573 err = crypto_register_template(&tmpls[i]);
580 for (--i; i >= 0; --i)
581 crypto_unregister_template(&tmpls[i]);
584 EXPORT_SYMBOL_GPL(crypto_register_templates);
586 void crypto_unregister_template(struct crypto_template *tmpl)
588 struct crypto_instance *inst;
589 struct hlist_node *n;
590 struct hlist_head *list;
593 down_write(&crypto_alg_sem);
595 BUG_ON(list_empty(&tmpl->list));
596 list_del_init(&tmpl->list);
598 list = &tmpl->instances;
599 hlist_for_each_entry(inst, list, list) {
600 int err = crypto_remove_alg(&inst->alg, &users);
605 up_write(&crypto_alg_sem);
607 hlist_for_each_entry_safe(inst, n, list, list) {
608 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
609 crypto_free_instance(inst);
611 crypto_remove_final(&users);
613 EXPORT_SYMBOL_GPL(crypto_unregister_template);
615 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
619 for (i = count - 1; i >= 0; --i)
620 crypto_unregister_template(&tmpls[i]);
622 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
624 static struct crypto_template *__crypto_lookup_template(const char *name)
626 struct crypto_template *q, *tmpl = NULL;
628 down_read(&crypto_alg_sem);
629 list_for_each_entry(q, &crypto_template_list, list) {
630 if (strcmp(q->name, name))
632 if (unlikely(!crypto_tmpl_get(q)))
638 up_read(&crypto_alg_sem);
643 struct crypto_template *crypto_lookup_template(const char *name)
645 return try_then_request_module(__crypto_lookup_template(name),
648 EXPORT_SYMBOL_GPL(crypto_lookup_template);
650 int crypto_register_instance(struct crypto_template *tmpl,
651 struct crypto_instance *inst)
653 struct crypto_larval *larval;
654 struct crypto_spawn *spawn;
655 u32 fips_internal = 0;
656 LIST_HEAD(algs_to_put);
659 err = crypto_check_alg(&inst->alg);
663 inst->alg.cra_module = tmpl->module;
664 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
666 down_write(&crypto_alg_sem);
668 larval = ERR_PTR(-EAGAIN);
669 for (spawn = inst->spawns; spawn;) {
670 struct crypto_spawn *next;
677 spawn->registered = true;
679 fips_internal |= spawn->alg->cra_flags;
681 crypto_mod_put(spawn->alg);
686 inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
688 larval = __crypto_register_alg(&inst->alg, &algs_to_put);
692 larval->test_started = true;
694 hlist_add_head(&inst->list, &tmpl->instances);
698 up_write(&crypto_alg_sem);
701 return PTR_ERR(larval);
703 crypto_wait_for_test(larval);
704 crypto_remove_final(&algs_to_put);
707 EXPORT_SYMBOL_GPL(crypto_register_instance);
709 void crypto_unregister_instance(struct crypto_instance *inst)
713 down_write(&crypto_alg_sem);
715 crypto_remove_spawns(&inst->alg, &list, NULL);
716 crypto_remove_instance(inst, &list);
718 up_write(&crypto_alg_sem);
720 crypto_remove_final(&list);
722 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
724 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
725 const char *name, u32 type, u32 mask)
727 struct crypto_alg *alg;
730 if (WARN_ON_ONCE(inst == NULL))
733 /* Allow the result of crypto_attr_alg_name() to be passed directly */
735 return PTR_ERR(name);
737 alg = crypto_find_alg(name, spawn->frontend,
738 type | CRYPTO_ALG_FIPS_INTERNAL, mask);
742 down_write(&crypto_alg_sem);
743 if (!crypto_is_moribund(alg)) {
744 list_add(&spawn->list, &alg->cra_users);
747 spawn->next = inst->spawns;
748 inst->spawns = spawn;
749 inst->alg.cra_flags |=
750 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
753 up_write(&crypto_alg_sem);
758 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
760 void crypto_drop_spawn(struct crypto_spawn *spawn)
762 if (!spawn->alg) /* not yet initialized? */
765 down_write(&crypto_alg_sem);
767 list_del(&spawn->list);
768 up_write(&crypto_alg_sem);
770 if (!spawn->registered)
771 crypto_mod_put(spawn->alg);
773 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
775 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
777 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
778 struct crypto_alg *target;
781 down_read(&crypto_alg_sem);
784 if (!crypto_mod_get(alg)) {
785 target = crypto_alg_get(alg);
787 alg = ERR_PTR(-EAGAIN);
790 up_read(&crypto_alg_sem);
793 crypto_shoot_alg(target);
794 crypto_alg_put(target);
800 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
803 struct crypto_alg *alg;
804 struct crypto_tfm *tfm;
806 alg = crypto_spawn_alg(spawn);
808 return ERR_CAST(alg);
810 tfm = ERR_PTR(-EINVAL);
811 if (unlikely((alg->cra_flags ^ type) & mask))
814 tfm = __crypto_alloc_tfm(alg, type, mask);
824 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
826 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
828 struct crypto_alg *alg;
829 struct crypto_tfm *tfm;
831 alg = crypto_spawn_alg(spawn);
833 return ERR_CAST(alg);
835 tfm = crypto_create_tfm(alg, spawn->frontend);
845 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
847 int crypto_register_notifier(struct notifier_block *nb)
849 return blocking_notifier_chain_register(&crypto_chain, nb);
851 EXPORT_SYMBOL_GPL(crypto_register_notifier);
853 int crypto_unregister_notifier(struct notifier_block *nb)
855 return blocking_notifier_chain_unregister(&crypto_chain, nb);
857 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
859 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
861 struct rtattr *rta = tb[0];
862 struct crypto_attr_type *algt;
865 return ERR_PTR(-ENOENT);
866 if (RTA_PAYLOAD(rta) < sizeof(*algt))
867 return ERR_PTR(-EINVAL);
868 if (rta->rta_type != CRYPTOA_TYPE)
869 return ERR_PTR(-EINVAL);
871 algt = RTA_DATA(rta);
875 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
878 * crypto_check_attr_type() - check algorithm type and compute inherited mask
879 * @tb: the template parameters
880 * @type: the algorithm type the template would be instantiated as
881 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
882 * to restrict the flags of any inner algorithms
884 * Validate that the algorithm type the user requested is compatible with the
885 * one the template would actually be instantiated as. E.g., if the user is
886 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
887 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
889 * Also compute the mask to use to restrict the flags of any inner algorithms.
891 * Return: 0 on success; -errno on failure
893 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
895 struct crypto_attr_type *algt;
897 algt = crypto_get_attr_type(tb);
899 return PTR_ERR(algt);
901 if ((algt->type ^ type) & algt->mask)
904 *mask_ret = crypto_algt_inherited_mask(algt);
907 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
909 const char *crypto_attr_alg_name(struct rtattr *rta)
911 struct crypto_attr_alg *alga;
914 return ERR_PTR(-ENOENT);
915 if (RTA_PAYLOAD(rta) < sizeof(*alga))
916 return ERR_PTR(-EINVAL);
917 if (rta->rta_type != CRYPTOA_ALG)
918 return ERR_PTR(-EINVAL);
920 alga = RTA_DATA(rta);
921 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
925 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
927 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
928 struct crypto_alg *alg)
930 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
931 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
932 return -ENAMETOOLONG;
934 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
935 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
936 return -ENAMETOOLONG;
940 EXPORT_SYMBOL_GPL(crypto_inst_setname);
942 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
944 INIT_LIST_HEAD(&queue->list);
945 queue->backlog = &queue->list;
947 queue->max_qlen = max_qlen;
949 EXPORT_SYMBOL_GPL(crypto_init_queue);
951 int crypto_enqueue_request(struct crypto_queue *queue,
952 struct crypto_async_request *request)
954 int err = -EINPROGRESS;
956 if (unlikely(queue->qlen >= queue->max_qlen)) {
957 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
962 if (queue->backlog == &queue->list)
963 queue->backlog = &request->list;
967 list_add_tail(&request->list, &queue->list);
972 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
974 void crypto_enqueue_request_head(struct crypto_queue *queue,
975 struct crypto_async_request *request)
977 if (unlikely(queue->qlen >= queue->max_qlen))
978 queue->backlog = queue->backlog->prev;
981 list_add(&request->list, &queue->list);
983 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
985 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
987 struct list_head *request;
989 if (unlikely(!queue->qlen))
994 if (queue->backlog != &queue->list)
995 queue->backlog = queue->backlog->next;
997 request = queue->list.next;
1000 return list_entry(request, struct crypto_async_request, list);
1002 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
1004 static inline void crypto_inc_byte(u8 *a, unsigned int size)
1009 for (; size; size--) {
1017 void crypto_inc(u8 *a, unsigned int size)
1019 __be32 *b = (__be32 *)(a + size);
1022 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1023 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1024 for (; size >= 4; size -= 4) {
1025 c = be32_to_cpu(*--b) + 1;
1026 *b = cpu_to_be32(c);
1031 crypto_inc_byte(a, size);
1033 EXPORT_SYMBOL_GPL(crypto_inc);
1035 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1037 return alg->cra_ctxsize +
1038 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1040 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1042 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1046 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1049 crypto_mod_put(alg);
1055 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1057 static void __init crypto_start_tests(void)
1059 if (IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS))
1063 struct crypto_larval *larval = NULL;
1064 struct crypto_alg *q;
1066 down_write(&crypto_alg_sem);
1068 list_for_each_entry(q, &crypto_alg_list, cra_list) {
1069 struct crypto_larval *l;
1071 if (!crypto_is_larval(q))
1076 if (!crypto_is_test_larval(l))
1079 if (l->test_started)
1082 l->test_started = true;
1087 up_write(&crypto_alg_sem);
1092 crypto_wait_for_test(larval);
1095 set_crypto_boot_test_finished();
1098 static int __init crypto_algapi_init(void)
1101 crypto_start_tests();
1105 static void __exit crypto_algapi_exit(void)
1111 * We run this at late_initcall so that all the built-in algorithms
1112 * have had a chance to register themselves first.
1114 late_initcall(crypto_algapi_init);
1115 module_exit(crypto_algapi_exit);
1117 MODULE_LICENSE("GPL");
1118 MODULE_DESCRIPTION("Cryptographic algorithms API");
1119 MODULE_SOFTDEP("pre: cryptomgr");