1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Cryptographic API for algorithms (i.e., low-level API).
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 #include <crypto/algapi.h>
9 #include <crypto/internal/simd.h>
10 #include <linux/err.h>
11 #include <linux/errno.h>
12 #include <linux/fips.h>
13 #include <linux/init.h>
14 #include <linux/kernel.h>
15 #include <linux/list.h>
16 #include <linux/module.h>
17 #include <linux/rtnetlink.h>
18 #include <linux/slab.h>
19 #include <linux/string.h>
23 static LIST_HEAD(crypto_template_list);
25 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
26 DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
27 EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
30 static inline void crypto_check_module_sig(struct module *mod)
32 if (fips_enabled && mod && !module_sig_ok(mod))
33 panic("Module %s signature verification failed in FIPS mode\n",
37 static int crypto_check_alg(struct crypto_alg *alg)
39 crypto_check_module_sig(alg->cra_module);
41 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
44 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
47 /* General maximums for all algs. */
48 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
51 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
54 /* Lower maximums for specific alg types. */
55 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
56 CRYPTO_ALG_TYPE_CIPHER) {
57 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
60 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
64 if (alg->cra_priority < 0)
67 refcount_set(&alg->cra_refcnt, 1);
72 static void crypto_free_instance(struct crypto_instance *inst)
74 inst->alg.cra_type->free(inst);
77 static void crypto_destroy_instance(struct crypto_alg *alg)
79 struct crypto_instance *inst = (void *)alg;
80 struct crypto_template *tmpl = inst->tmpl;
82 crypto_free_instance(inst);
83 crypto_tmpl_put(tmpl);
87 * This function adds a spawn to the list secondary_spawns which
88 * will be used at the end of crypto_remove_spawns to unregister
89 * instances, unless the spawn happens to be one that is depended
90 * on by the new algorithm (nalg in crypto_remove_spawns).
92 * This function is also responsible for resurrecting any algorithms
93 * in the dependency chain of nalg by unsetting n->dead.
95 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
96 struct list_head *stack,
97 struct list_head *top,
98 struct list_head *secondary_spawns)
100 struct crypto_spawn *spawn, *n;
102 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
106 n = list_prev_entry(spawn, list);
107 list_move(&spawn->list, secondary_spawns);
109 if (list_is_last(&n->list, stack))
112 n = list_next_entry(n, list);
116 return &n->inst->alg.cra_users;
119 static void crypto_remove_instance(struct crypto_instance *inst,
120 struct list_head *list)
122 struct crypto_template *tmpl = inst->tmpl;
124 if (crypto_is_dead(&inst->alg))
127 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
129 if (!tmpl || !crypto_tmpl_get(tmpl))
132 list_move(&inst->alg.cra_list, list);
133 hlist_del(&inst->list);
134 inst->alg.cra_destroy = crypto_destroy_instance;
136 BUG_ON(!list_empty(&inst->alg.cra_users));
140 * Given an algorithm alg, remove all algorithms that depend on it
141 * through spawns. If nalg is not null, then exempt any algorithms
142 * that is depended on by nalg. This is useful when nalg itself
145 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
146 struct crypto_alg *nalg)
148 u32 new_type = (nalg ?: alg)->cra_flags;
149 struct crypto_spawn *spawn, *n;
150 LIST_HEAD(secondary_spawns);
151 struct list_head *spawns;
155 spawns = &alg->cra_users;
156 list_for_each_entry_safe(spawn, n, spawns, list) {
157 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
160 list_move(&spawn->list, &top);
164 * Perform a depth-first walk starting from alg through
165 * the cra_users tree. The list stack records the path
166 * from alg to the current spawn.
170 while (!list_empty(spawns)) {
171 struct crypto_instance *inst;
173 spawn = list_first_entry(spawns, struct crypto_spawn,
177 list_move(&spawn->list, &stack);
178 spawn->dead = !spawn->registered || &inst->alg != nalg;
180 if (!spawn->registered)
183 BUG_ON(&inst->alg == alg);
185 if (&inst->alg == nalg)
188 spawns = &inst->alg.cra_users;
191 * Even if spawn->registered is true, the
192 * instance itself may still be unregistered.
193 * This is because it may have failed during
194 * registration. Therefore we still need to
195 * make the following test.
197 * We may encounter an unregistered instance here, since
198 * an instance's spawns are set up prior to the instance
199 * being registered. An unregistered instance will have
200 * NULL ->cra_users.next, since ->cra_users isn't
201 * properly initialized until registration. But an
202 * unregistered instance cannot have any users, so treat
203 * it the same as ->cra_users being empty.
205 if (spawns->next == NULL)
208 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
209 &secondary_spawns)));
212 * Remove all instances that are marked as dead. Also
213 * complete the resurrection of the others by moving them
214 * back to the cra_users list.
216 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
218 list_move(&spawn->list, &spawn->alg->cra_users);
219 else if (spawn->registered)
220 crypto_remove_instance(spawn->inst, list);
223 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
225 static void crypto_alg_finish_registration(struct crypto_alg *alg,
226 bool fulfill_requests,
227 struct list_head *algs_to_put)
229 struct crypto_alg *q;
231 list_for_each_entry(q, &crypto_alg_list, cra_list) {
235 if (crypto_is_moribund(q))
238 if (crypto_is_larval(q)) {
239 struct crypto_larval *larval = (void *)q;
242 * Check to see if either our generic name or
243 * specific name can satisfy the name requested
244 * by the larval entry q.
246 if (strcmp(alg->cra_name, q->cra_name) &&
247 strcmp(alg->cra_driver_name, q->cra_name))
252 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
255 if (fulfill_requests && crypto_mod_get(alg))
258 larval->adult = ERR_PTR(-EAGAIN);
263 if (strcmp(alg->cra_name, q->cra_name))
266 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
267 q->cra_priority > alg->cra_priority)
270 crypto_remove_spawns(q, algs_to_put, alg);
273 crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
276 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
278 struct crypto_larval *larval;
280 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER) ||
281 IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS))
282 return NULL; /* No self-test needed */
284 larval = crypto_larval_alloc(alg->cra_name,
285 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
289 larval->adult = crypto_mod_get(alg);
290 if (!larval->adult) {
292 return ERR_PTR(-ENOENT);
295 refcount_set(&larval->alg.cra_refcnt, 1);
296 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
297 CRYPTO_MAX_ALG_NAME);
298 larval->alg.cra_priority = alg->cra_priority;
303 static struct crypto_larval *
304 __crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
306 struct crypto_alg *q;
307 struct crypto_larval *larval;
310 if (crypto_is_dead(alg))
313 INIT_LIST_HEAD(&alg->cra_users);
317 list_for_each_entry(q, &crypto_alg_list, cra_list) {
321 if (crypto_is_moribund(q))
324 if (crypto_is_larval(q)) {
325 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
330 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
331 !strcmp(q->cra_name, alg->cra_driver_name))
335 larval = crypto_alloc_test_larval(alg);
339 list_add(&alg->cra_list, &crypto_alg_list);
341 crypto_stats_init(alg);
345 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
347 list_add(&larval->alg.cra_list, &crypto_alg_list);
349 alg->cra_flags |= CRYPTO_ALG_TESTED;
350 crypto_alg_finish_registration(alg, true, algs_to_put);
357 larval = ERR_PTR(ret);
361 void crypto_alg_tested(const char *name, int err)
363 struct crypto_larval *test;
364 struct crypto_alg *alg;
365 struct crypto_alg *q;
369 down_write(&crypto_alg_sem);
370 list_for_each_entry(q, &crypto_alg_list, cra_list) {
371 if (crypto_is_moribund(q) || !crypto_is_larval(q))
374 test = (struct crypto_larval *)q;
376 if (!strcmp(q->cra_driver_name, name))
380 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
384 q->cra_flags |= CRYPTO_ALG_DEAD;
387 if (list_empty(&alg->cra_list))
390 if (err == -ECANCELED)
391 alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
395 alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
397 alg->cra_flags |= CRYPTO_ALG_TESTED;
400 * If a higher-priority implementation of the same algorithm is
401 * currently being tested, then don't fulfill request larvals.
404 list_for_each_entry(q, &crypto_alg_list, cra_list) {
405 if (crypto_is_moribund(q) || !crypto_is_larval(q))
408 if (strcmp(alg->cra_name, q->cra_name))
411 if (q->cra_priority > alg->cra_priority) {
417 crypto_alg_finish_registration(alg, best, &list);
420 complete_all(&test->completion);
423 up_write(&crypto_alg_sem);
425 crypto_remove_final(&list);
427 EXPORT_SYMBOL_GPL(crypto_alg_tested);
429 void crypto_remove_final(struct list_head *list)
431 struct crypto_alg *alg;
432 struct crypto_alg *n;
434 list_for_each_entry_safe(alg, n, list, cra_list) {
435 list_del_init(&alg->cra_list);
439 EXPORT_SYMBOL_GPL(crypto_remove_final);
441 int crypto_register_alg(struct crypto_alg *alg)
443 struct crypto_larval *larval;
444 LIST_HEAD(algs_to_put);
445 bool test_started = false;
448 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
449 err = crypto_check_alg(alg);
453 down_write(&crypto_alg_sem);
454 larval = __crypto_register_alg(alg, &algs_to_put);
455 if (!IS_ERR_OR_NULL(larval)) {
456 test_started = static_key_enabled(&crypto_boot_test_finished);
457 larval->test_started = test_started;
459 up_write(&crypto_alg_sem);
462 return PTR_ERR(larval);
464 crypto_wait_for_test(larval);
465 crypto_remove_final(&algs_to_put);
468 EXPORT_SYMBOL_GPL(crypto_register_alg);
470 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
472 if (unlikely(list_empty(&alg->cra_list)))
475 alg->cra_flags |= CRYPTO_ALG_DEAD;
477 list_del_init(&alg->cra_list);
478 crypto_remove_spawns(alg, list, NULL);
483 void crypto_unregister_alg(struct crypto_alg *alg)
488 down_write(&crypto_alg_sem);
489 ret = crypto_remove_alg(alg, &list);
490 up_write(&crypto_alg_sem);
492 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
495 BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
496 if (alg->cra_destroy)
497 alg->cra_destroy(alg);
499 crypto_remove_final(&list);
501 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
503 int crypto_register_algs(struct crypto_alg *algs, int count)
507 for (i = 0; i < count; i++) {
508 ret = crypto_register_alg(&algs[i]);
516 for (--i; i >= 0; --i)
517 crypto_unregister_alg(&algs[i]);
521 EXPORT_SYMBOL_GPL(crypto_register_algs);
523 void crypto_unregister_algs(struct crypto_alg *algs, int count)
527 for (i = 0; i < count; i++)
528 crypto_unregister_alg(&algs[i]);
530 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
532 int crypto_register_template(struct crypto_template *tmpl)
534 struct crypto_template *q;
537 down_write(&crypto_alg_sem);
539 crypto_check_module_sig(tmpl->module);
541 list_for_each_entry(q, &crypto_template_list, list) {
546 list_add(&tmpl->list, &crypto_template_list);
549 up_write(&crypto_alg_sem);
552 EXPORT_SYMBOL_GPL(crypto_register_template);
554 int crypto_register_templates(struct crypto_template *tmpls, int count)
558 for (i = 0; i < count; i++) {
559 err = crypto_register_template(&tmpls[i]);
566 for (--i; i >= 0; --i)
567 crypto_unregister_template(&tmpls[i]);
570 EXPORT_SYMBOL_GPL(crypto_register_templates);
572 void crypto_unregister_template(struct crypto_template *tmpl)
574 struct crypto_instance *inst;
575 struct hlist_node *n;
576 struct hlist_head *list;
579 down_write(&crypto_alg_sem);
581 BUG_ON(list_empty(&tmpl->list));
582 list_del_init(&tmpl->list);
584 list = &tmpl->instances;
585 hlist_for_each_entry(inst, list, list) {
586 int err = crypto_remove_alg(&inst->alg, &users);
591 up_write(&crypto_alg_sem);
593 hlist_for_each_entry_safe(inst, n, list, list) {
594 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
595 crypto_free_instance(inst);
597 crypto_remove_final(&users);
599 EXPORT_SYMBOL_GPL(crypto_unregister_template);
601 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
605 for (i = count - 1; i >= 0; --i)
606 crypto_unregister_template(&tmpls[i]);
608 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
610 static struct crypto_template *__crypto_lookup_template(const char *name)
612 struct crypto_template *q, *tmpl = NULL;
614 down_read(&crypto_alg_sem);
615 list_for_each_entry(q, &crypto_template_list, list) {
616 if (strcmp(q->name, name))
618 if (unlikely(!crypto_tmpl_get(q)))
624 up_read(&crypto_alg_sem);
629 struct crypto_template *crypto_lookup_template(const char *name)
631 return try_then_request_module(__crypto_lookup_template(name),
634 EXPORT_SYMBOL_GPL(crypto_lookup_template);
636 int crypto_register_instance(struct crypto_template *tmpl,
637 struct crypto_instance *inst)
639 struct crypto_larval *larval;
640 struct crypto_spawn *spawn;
641 u32 fips_internal = 0;
642 LIST_HEAD(algs_to_put);
645 err = crypto_check_alg(&inst->alg);
649 inst->alg.cra_module = tmpl->module;
650 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
652 down_write(&crypto_alg_sem);
654 larval = ERR_PTR(-EAGAIN);
655 for (spawn = inst->spawns; spawn;) {
656 struct crypto_spawn *next;
663 spawn->registered = true;
665 fips_internal |= spawn->alg->cra_flags;
667 crypto_mod_put(spawn->alg);
672 inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
674 larval = __crypto_register_alg(&inst->alg, &algs_to_put);
678 larval->test_started = true;
680 hlist_add_head(&inst->list, &tmpl->instances);
684 up_write(&crypto_alg_sem);
687 return PTR_ERR(larval);
689 crypto_wait_for_test(larval);
690 crypto_remove_final(&algs_to_put);
693 EXPORT_SYMBOL_GPL(crypto_register_instance);
695 void crypto_unregister_instance(struct crypto_instance *inst)
699 down_write(&crypto_alg_sem);
701 crypto_remove_spawns(&inst->alg, &list, NULL);
702 crypto_remove_instance(inst, &list);
704 up_write(&crypto_alg_sem);
706 crypto_remove_final(&list);
708 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
710 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
711 const char *name, u32 type, u32 mask)
713 struct crypto_alg *alg;
716 if (WARN_ON_ONCE(inst == NULL))
719 /* Allow the result of crypto_attr_alg_name() to be passed directly */
721 return PTR_ERR(name);
723 alg = crypto_find_alg(name, spawn->frontend,
724 type | CRYPTO_ALG_FIPS_INTERNAL, mask);
728 down_write(&crypto_alg_sem);
729 if (!crypto_is_moribund(alg)) {
730 list_add(&spawn->list, &alg->cra_users);
733 spawn->next = inst->spawns;
734 inst->spawns = spawn;
735 inst->alg.cra_flags |=
736 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
739 up_write(&crypto_alg_sem);
744 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
746 void crypto_drop_spawn(struct crypto_spawn *spawn)
748 if (!spawn->alg) /* not yet initialized? */
751 down_write(&crypto_alg_sem);
753 list_del(&spawn->list);
754 up_write(&crypto_alg_sem);
756 if (!spawn->registered)
757 crypto_mod_put(spawn->alg);
759 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
761 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
763 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
764 struct crypto_alg *target;
767 down_read(&crypto_alg_sem);
770 if (!crypto_mod_get(alg)) {
771 target = crypto_alg_get(alg);
773 alg = ERR_PTR(-EAGAIN);
776 up_read(&crypto_alg_sem);
779 crypto_shoot_alg(target);
780 crypto_alg_put(target);
786 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
789 struct crypto_alg *alg;
790 struct crypto_tfm *tfm;
792 alg = crypto_spawn_alg(spawn);
794 return ERR_CAST(alg);
796 tfm = ERR_PTR(-EINVAL);
797 if (unlikely((alg->cra_flags ^ type) & mask))
800 tfm = __crypto_alloc_tfm(alg, type, mask);
810 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
812 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
814 struct crypto_alg *alg;
815 struct crypto_tfm *tfm;
817 alg = crypto_spawn_alg(spawn);
819 return ERR_CAST(alg);
821 tfm = crypto_create_tfm(alg, spawn->frontend);
831 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
833 int crypto_register_notifier(struct notifier_block *nb)
835 return blocking_notifier_chain_register(&crypto_chain, nb);
837 EXPORT_SYMBOL_GPL(crypto_register_notifier);
839 int crypto_unregister_notifier(struct notifier_block *nb)
841 return blocking_notifier_chain_unregister(&crypto_chain, nb);
843 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
845 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
847 struct rtattr *rta = tb[0];
848 struct crypto_attr_type *algt;
851 return ERR_PTR(-ENOENT);
852 if (RTA_PAYLOAD(rta) < sizeof(*algt))
853 return ERR_PTR(-EINVAL);
854 if (rta->rta_type != CRYPTOA_TYPE)
855 return ERR_PTR(-EINVAL);
857 algt = RTA_DATA(rta);
861 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
864 * crypto_check_attr_type() - check algorithm type and compute inherited mask
865 * @tb: the template parameters
866 * @type: the algorithm type the template would be instantiated as
867 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
868 * to restrict the flags of any inner algorithms
870 * Validate that the algorithm type the user requested is compatible with the
871 * one the template would actually be instantiated as. E.g., if the user is
872 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
873 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
875 * Also compute the mask to use to restrict the flags of any inner algorithms.
877 * Return: 0 on success; -errno on failure
879 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
881 struct crypto_attr_type *algt;
883 algt = crypto_get_attr_type(tb);
885 return PTR_ERR(algt);
887 if ((algt->type ^ type) & algt->mask)
890 *mask_ret = crypto_algt_inherited_mask(algt);
893 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
895 const char *crypto_attr_alg_name(struct rtattr *rta)
897 struct crypto_attr_alg *alga;
900 return ERR_PTR(-ENOENT);
901 if (RTA_PAYLOAD(rta) < sizeof(*alga))
902 return ERR_PTR(-EINVAL);
903 if (rta->rta_type != CRYPTOA_ALG)
904 return ERR_PTR(-EINVAL);
906 alga = RTA_DATA(rta);
907 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
911 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
913 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
914 struct crypto_alg *alg)
916 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
917 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
918 return -ENAMETOOLONG;
920 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
921 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
922 return -ENAMETOOLONG;
926 EXPORT_SYMBOL_GPL(crypto_inst_setname);
928 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
930 INIT_LIST_HEAD(&queue->list);
931 queue->backlog = &queue->list;
933 queue->max_qlen = max_qlen;
935 EXPORT_SYMBOL_GPL(crypto_init_queue);
937 int crypto_enqueue_request(struct crypto_queue *queue,
938 struct crypto_async_request *request)
940 int err = -EINPROGRESS;
942 if (unlikely(queue->qlen >= queue->max_qlen)) {
943 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
948 if (queue->backlog == &queue->list)
949 queue->backlog = &request->list;
953 list_add_tail(&request->list, &queue->list);
958 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
960 void crypto_enqueue_request_head(struct crypto_queue *queue,
961 struct crypto_async_request *request)
964 list_add(&request->list, &queue->list);
966 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
968 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
970 struct list_head *request;
972 if (unlikely(!queue->qlen))
977 if (queue->backlog != &queue->list)
978 queue->backlog = queue->backlog->next;
980 request = queue->list.next;
983 return list_entry(request, struct crypto_async_request, list);
985 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
987 static inline void crypto_inc_byte(u8 *a, unsigned int size)
992 for (; size; size--) {
1000 void crypto_inc(u8 *a, unsigned int size)
1002 __be32 *b = (__be32 *)(a + size);
1005 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1006 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1007 for (; size >= 4; size -= 4) {
1008 c = be32_to_cpu(*--b) + 1;
1009 *b = cpu_to_be32(c);
1014 crypto_inc_byte(a, size);
1016 EXPORT_SYMBOL_GPL(crypto_inc);
1018 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1020 return alg->cra_ctxsize +
1021 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1023 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1025 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1029 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1032 crypto_mod_put(alg);
1038 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1040 #ifdef CONFIG_CRYPTO_STATS
1041 void crypto_stats_init(struct crypto_alg *alg)
1043 memset(&alg->stats, 0, sizeof(alg->stats));
1045 EXPORT_SYMBOL_GPL(crypto_stats_init);
1047 void crypto_stats_get(struct crypto_alg *alg)
1049 crypto_alg_get(alg);
1051 EXPORT_SYMBOL_GPL(crypto_stats_get);
1053 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1056 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1057 atomic64_inc(&alg->stats.aead.err_cnt);
1059 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1060 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1062 crypto_alg_put(alg);
1064 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1066 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1069 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1070 atomic64_inc(&alg->stats.aead.err_cnt);
1072 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1073 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1075 crypto_alg_put(alg);
1077 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1079 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1080 struct crypto_alg *alg)
1082 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1083 atomic64_inc(&alg->stats.akcipher.err_cnt);
1085 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1086 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1088 crypto_alg_put(alg);
1090 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1092 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1093 struct crypto_alg *alg)
1095 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1096 atomic64_inc(&alg->stats.akcipher.err_cnt);
1098 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1099 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1101 crypto_alg_put(alg);
1103 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1105 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1107 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1108 atomic64_inc(&alg->stats.akcipher.err_cnt);
1110 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1111 crypto_alg_put(alg);
1113 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1115 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1117 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1118 atomic64_inc(&alg->stats.akcipher.err_cnt);
1120 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1121 crypto_alg_put(alg);
1123 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1125 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1127 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1128 atomic64_inc(&alg->stats.compress.err_cnt);
1130 atomic64_inc(&alg->stats.compress.compress_cnt);
1131 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1133 crypto_alg_put(alg);
1135 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1137 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1139 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1140 atomic64_inc(&alg->stats.compress.err_cnt);
1142 atomic64_inc(&alg->stats.compress.decompress_cnt);
1143 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1145 crypto_alg_put(alg);
1147 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1149 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1150 struct crypto_alg *alg)
1152 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1153 atomic64_inc(&alg->stats.hash.err_cnt);
1155 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1156 crypto_alg_put(alg);
1158 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1160 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1161 struct crypto_alg *alg)
1163 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1164 atomic64_inc(&alg->stats.hash.err_cnt);
1166 atomic64_inc(&alg->stats.hash.hash_cnt);
1167 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1169 crypto_alg_put(alg);
1171 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1173 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1176 atomic64_inc(&alg->stats.kpp.err_cnt);
1178 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1179 crypto_alg_put(alg);
1181 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1183 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1186 atomic64_inc(&alg->stats.kpp.err_cnt);
1188 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1189 crypto_alg_put(alg);
1191 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1193 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1196 atomic64_inc(&alg->stats.kpp.err_cnt);
1198 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1199 crypto_alg_put(alg);
1201 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1203 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1205 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1206 atomic64_inc(&alg->stats.rng.err_cnt);
1208 atomic64_inc(&alg->stats.rng.seed_cnt);
1209 crypto_alg_put(alg);
1211 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1213 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1216 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1217 atomic64_inc(&alg->stats.rng.err_cnt);
1219 atomic64_inc(&alg->stats.rng.generate_cnt);
1220 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1222 crypto_alg_put(alg);
1224 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1226 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1227 struct crypto_alg *alg)
1229 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1230 atomic64_inc(&alg->stats.cipher.err_cnt);
1232 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1233 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1235 crypto_alg_put(alg);
1237 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1239 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1240 struct crypto_alg *alg)
1242 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1243 atomic64_inc(&alg->stats.cipher.err_cnt);
1245 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1246 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1248 crypto_alg_put(alg);
1250 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1253 static void __init crypto_start_tests(void)
1256 struct crypto_larval *larval = NULL;
1257 struct crypto_alg *q;
1259 down_write(&crypto_alg_sem);
1261 list_for_each_entry(q, &crypto_alg_list, cra_list) {
1262 struct crypto_larval *l;
1264 if (!crypto_is_larval(q))
1269 if (!crypto_is_test_larval(l))
1272 if (l->test_started)
1275 l->test_started = true;
1280 up_write(&crypto_alg_sem);
1285 crypto_wait_for_test(larval);
1288 static_branch_enable(&crypto_boot_test_finished);
1291 static int __init crypto_algapi_init(void)
1294 crypto_start_tests();
1298 static void __exit crypto_algapi_exit(void)
1304 * We run this at late_initcall so that all the built-in algorithms
1305 * have had a chance to register themselves first.
1307 late_initcall(crypto_algapi_init);
1308 module_exit(crypto_algapi_exit);
1310 MODULE_LICENSE("GPL");
1311 MODULE_DESCRIPTION("Cryptographic algorithms API");
1312 MODULE_SOFTDEP("pre: cryptomgr");