Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-or-later
0002 /*
0003  * Scatterlist Cryptographic API.
0004  *
0005  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
0006  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
0007  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
0008  *
0009  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
0010  * and Nettle, by Niels Möller.
0011  */
0012 
0013 #include <linux/err.h>
0014 #include <linux/errno.h>
0015 #include <linux/jump_label.h>
0016 #include <linux/kernel.h>
0017 #include <linux/kmod.h>
0018 #include <linux/module.h>
0019 #include <linux/param.h>
0020 #include <linux/sched/signal.h>
0021 #include <linux/slab.h>
0022 #include <linux/string.h>
0023 #include <linux/completion.h>
0024 #include "internal.h"
0025 
0026 LIST_HEAD(crypto_alg_list);
0027 EXPORT_SYMBOL_GPL(crypto_alg_list);
0028 DECLARE_RWSEM(crypto_alg_sem);
0029 EXPORT_SYMBOL_GPL(crypto_alg_sem);
0030 
0031 BLOCKING_NOTIFIER_HEAD(crypto_chain);
0032 EXPORT_SYMBOL_GPL(crypto_chain);
0033 
0034 DEFINE_STATIC_KEY_FALSE(crypto_boot_test_finished);
0035 EXPORT_SYMBOL_GPL(crypto_boot_test_finished);
0036 
0037 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
0038 
0039 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
0040 {
0041     return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
0042 }
0043 EXPORT_SYMBOL_GPL(crypto_mod_get);
0044 
0045 void crypto_mod_put(struct crypto_alg *alg)
0046 {
0047     struct module *module = alg->cra_module;
0048 
0049     crypto_alg_put(alg);
0050     module_put(module);
0051 }
0052 EXPORT_SYMBOL_GPL(crypto_mod_put);
0053 
0054 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
0055                           u32 mask)
0056 {
0057     struct crypto_alg *q, *alg = NULL;
0058     int best = -2;
0059 
0060     list_for_each_entry(q, &crypto_alg_list, cra_list) {
0061         int exact, fuzzy;
0062 
0063         if (crypto_is_moribund(q))
0064             continue;
0065 
0066         if ((q->cra_flags ^ type) & mask)
0067             continue;
0068 
0069         if (crypto_is_larval(q) &&
0070             !crypto_is_test_larval((struct crypto_larval *)q) &&
0071             ((struct crypto_larval *)q)->mask != mask)
0072             continue;
0073 
0074         exact = !strcmp(q->cra_driver_name, name);
0075         fuzzy = !strcmp(q->cra_name, name);
0076         if (!exact && !(fuzzy && q->cra_priority > best))
0077             continue;
0078 
0079         if (unlikely(!crypto_mod_get(q)))
0080             continue;
0081 
0082         best = q->cra_priority;
0083         if (alg)
0084             crypto_mod_put(alg);
0085         alg = q;
0086 
0087         if (exact)
0088             break;
0089     }
0090 
0091     return alg;
0092 }
0093 
0094 static void crypto_larval_destroy(struct crypto_alg *alg)
0095 {
0096     struct crypto_larval *larval = (void *)alg;
0097 
0098     BUG_ON(!crypto_is_larval(alg));
0099     if (!IS_ERR_OR_NULL(larval->adult))
0100         crypto_mod_put(larval->adult);
0101     kfree(larval);
0102 }
0103 
0104 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
0105 {
0106     struct crypto_larval *larval;
0107 
0108     larval = kzalloc(sizeof(*larval), GFP_KERNEL);
0109     if (!larval)
0110         return ERR_PTR(-ENOMEM);
0111 
0112     larval->mask = mask;
0113     larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
0114     larval->alg.cra_priority = -1;
0115     larval->alg.cra_destroy = crypto_larval_destroy;
0116 
0117     strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
0118     init_completion(&larval->completion);
0119 
0120     return larval;
0121 }
0122 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
0123 
0124 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
0125                         u32 mask)
0126 {
0127     struct crypto_alg *alg;
0128     struct crypto_larval *larval;
0129 
0130     larval = crypto_larval_alloc(name, type, mask);
0131     if (IS_ERR(larval))
0132         return ERR_CAST(larval);
0133 
0134     refcount_set(&larval->alg.cra_refcnt, 2);
0135 
0136     down_write(&crypto_alg_sem);
0137     alg = __crypto_alg_lookup(name, type, mask);
0138     if (!alg) {
0139         alg = &larval->alg;
0140         list_add(&alg->cra_list, &crypto_alg_list);
0141     }
0142     up_write(&crypto_alg_sem);
0143 
0144     if (alg != &larval->alg) {
0145         kfree(larval);
0146         if (crypto_is_larval(alg))
0147             alg = crypto_larval_wait(alg);
0148     }
0149 
0150     return alg;
0151 }
0152 
0153 void crypto_larval_kill(struct crypto_alg *alg)
0154 {
0155     struct crypto_larval *larval = (void *)alg;
0156 
0157     down_write(&crypto_alg_sem);
0158     list_del(&alg->cra_list);
0159     up_write(&crypto_alg_sem);
0160     complete_all(&larval->completion);
0161     crypto_alg_put(alg);
0162 }
0163 EXPORT_SYMBOL_GPL(crypto_larval_kill);
0164 
0165 void crypto_wait_for_test(struct crypto_larval *larval)
0166 {
0167     int err;
0168 
0169     err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
0170     if (WARN_ON_ONCE(err != NOTIFY_STOP))
0171         goto out;
0172 
0173     err = wait_for_completion_killable(&larval->completion);
0174     WARN_ON(err);
0175     if (!err)
0176         crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
0177 
0178 out:
0179     crypto_larval_kill(&larval->alg);
0180 }
0181 EXPORT_SYMBOL_GPL(crypto_wait_for_test);
0182 
0183 static void crypto_start_test(struct crypto_larval *larval)
0184 {
0185     if (!crypto_is_test_larval(larval))
0186         return;
0187 
0188     if (larval->test_started)
0189         return;
0190 
0191     down_write(&crypto_alg_sem);
0192     if (larval->test_started) {
0193         up_write(&crypto_alg_sem);
0194         return;
0195     }
0196 
0197     larval->test_started = true;
0198     up_write(&crypto_alg_sem);
0199 
0200     crypto_wait_for_test(larval);
0201 }
0202 
0203 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
0204 {
0205     struct crypto_larval *larval = (void *)alg;
0206     long timeout;
0207 
0208     if (!static_branch_likely(&crypto_boot_test_finished))
0209         crypto_start_test(larval);
0210 
0211     timeout = wait_for_completion_killable_timeout(
0212         &larval->completion, 60 * HZ);
0213 
0214     alg = larval->adult;
0215     if (timeout < 0)
0216         alg = ERR_PTR(-EINTR);
0217     else if (!timeout)
0218         alg = ERR_PTR(-ETIMEDOUT);
0219     else if (!alg)
0220         alg = ERR_PTR(-ENOENT);
0221     else if (IS_ERR(alg))
0222         ;
0223     else if (crypto_is_test_larval(larval) &&
0224          !(alg->cra_flags & CRYPTO_ALG_TESTED))
0225         alg = ERR_PTR(-EAGAIN);
0226     else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
0227         alg = ERR_PTR(-EAGAIN);
0228     else if (!crypto_mod_get(alg))
0229         alg = ERR_PTR(-EAGAIN);
0230     crypto_mod_put(&larval->alg);
0231 
0232     return alg;
0233 }
0234 
0235 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
0236                         u32 mask)
0237 {
0238     const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
0239     struct crypto_alg *alg;
0240     u32 test = 0;
0241 
0242     if (!((type | mask) & CRYPTO_ALG_TESTED))
0243         test |= CRYPTO_ALG_TESTED;
0244 
0245     down_read(&crypto_alg_sem);
0246     alg = __crypto_alg_lookup(name, (type | test) & ~fips,
0247                   (mask | test) & ~fips);
0248     if (alg) {
0249         if (((type | mask) ^ fips) & fips)
0250             mask |= fips;
0251         mask &= fips;
0252 
0253         if (!crypto_is_larval(alg) &&
0254             ((type ^ alg->cra_flags) & mask)) {
0255             /* Algorithm is disallowed in FIPS mode. */
0256             crypto_mod_put(alg);
0257             alg = ERR_PTR(-ENOENT);
0258         }
0259     } else if (test) {
0260         alg = __crypto_alg_lookup(name, type, mask);
0261         if (alg && !crypto_is_larval(alg)) {
0262             /* Test failed */
0263             crypto_mod_put(alg);
0264             alg = ERR_PTR(-ELIBBAD);
0265         }
0266     }
0267     up_read(&crypto_alg_sem);
0268 
0269     return alg;
0270 }
0271 
0272 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
0273                            u32 mask)
0274 {
0275     struct crypto_alg *alg;
0276 
0277     if (!name)
0278         return ERR_PTR(-ENOENT);
0279 
0280     type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
0281     mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
0282 
0283     alg = crypto_alg_lookup(name, type, mask);
0284     if (!alg && !(mask & CRYPTO_NOLOAD)) {
0285         request_module("crypto-%s", name);
0286 
0287         if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
0288               CRYPTO_ALG_NEED_FALLBACK))
0289             request_module("crypto-%s-all", name);
0290 
0291         alg = crypto_alg_lookup(name, type, mask);
0292     }
0293 
0294     if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
0295         alg = crypto_larval_wait(alg);
0296     else if (!alg)
0297         alg = crypto_larval_add(name, type, mask);
0298 
0299     return alg;
0300 }
0301 
0302 int crypto_probing_notify(unsigned long val, void *v)
0303 {
0304     int ok;
0305 
0306     ok = blocking_notifier_call_chain(&crypto_chain, val, v);
0307     if (ok == NOTIFY_DONE) {
0308         request_module("cryptomgr");
0309         ok = blocking_notifier_call_chain(&crypto_chain, val, v);
0310     }
0311 
0312     return ok;
0313 }
0314 EXPORT_SYMBOL_GPL(crypto_probing_notify);
0315 
0316 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
0317 {
0318     struct crypto_alg *alg;
0319     struct crypto_alg *larval;
0320     int ok;
0321 
0322     /*
0323      * If the internal flag is set for a cipher, require a caller to
0324      * to invoke the cipher with the internal flag to use that cipher.
0325      * Also, if a caller wants to allocate a cipher that may or may
0326      * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
0327      * !(mask & CRYPTO_ALG_INTERNAL).
0328      */
0329     if (!((type | mask) & CRYPTO_ALG_INTERNAL))
0330         mask |= CRYPTO_ALG_INTERNAL;
0331 
0332     larval = crypto_larval_lookup(name, type, mask);
0333     if (IS_ERR(larval) || !crypto_is_larval(larval))
0334         return larval;
0335 
0336     ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
0337 
0338     if (ok == NOTIFY_STOP)
0339         alg = crypto_larval_wait(larval);
0340     else {
0341         crypto_mod_put(larval);
0342         alg = ERR_PTR(-ENOENT);
0343     }
0344     crypto_larval_kill(larval);
0345     return alg;
0346 }
0347 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
0348 
0349 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
0350 {
0351     const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
0352 
0353     if (type_obj)
0354         return type_obj->init(tfm, type, mask);
0355     return 0;
0356 }
0357 
0358 static void crypto_exit_ops(struct crypto_tfm *tfm)
0359 {
0360     const struct crypto_type *type = tfm->__crt_alg->cra_type;
0361 
0362     if (type && tfm->exit)
0363         tfm->exit(tfm);
0364 }
0365 
0366 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
0367 {
0368     const struct crypto_type *type_obj = alg->cra_type;
0369     unsigned int len;
0370 
0371     len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
0372     if (type_obj)
0373         return len + type_obj->ctxsize(alg, type, mask);
0374 
0375     switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
0376     default:
0377         BUG();
0378 
0379     case CRYPTO_ALG_TYPE_CIPHER:
0380         len += crypto_cipher_ctxsize(alg);
0381         break;
0382 
0383     case CRYPTO_ALG_TYPE_COMPRESS:
0384         len += crypto_compress_ctxsize(alg);
0385         break;
0386     }
0387 
0388     return len;
0389 }
0390 
0391 void crypto_shoot_alg(struct crypto_alg *alg)
0392 {
0393     down_write(&crypto_alg_sem);
0394     alg->cra_flags |= CRYPTO_ALG_DYING;
0395     up_write(&crypto_alg_sem);
0396 }
0397 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
0398 
0399 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
0400                       u32 mask)
0401 {
0402     struct crypto_tfm *tfm = NULL;
0403     unsigned int tfm_size;
0404     int err = -ENOMEM;
0405 
0406     tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
0407     tfm = kzalloc(tfm_size, GFP_KERNEL);
0408     if (tfm == NULL)
0409         goto out_err;
0410 
0411     tfm->__crt_alg = alg;
0412 
0413     err = crypto_init_ops(tfm, type, mask);
0414     if (err)
0415         goto out_free_tfm;
0416 
0417     if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
0418         goto cra_init_failed;
0419 
0420     goto out;
0421 
0422 cra_init_failed:
0423     crypto_exit_ops(tfm);
0424 out_free_tfm:
0425     if (err == -EAGAIN)
0426         crypto_shoot_alg(alg);
0427     kfree(tfm);
0428 out_err:
0429     tfm = ERR_PTR(err);
0430 out:
0431     return tfm;
0432 }
0433 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
0434 
0435 /*
0436  *  crypto_alloc_base - Locate algorithm and allocate transform
0437  *  @alg_name: Name of algorithm
0438  *  @type: Type of algorithm
0439  *  @mask: Mask for type comparison
0440  *
0441  *  This function should not be used by new algorithm types.
0442  *  Please use crypto_alloc_tfm instead.
0443  *
0444  *  crypto_alloc_base() will first attempt to locate an already loaded
0445  *  algorithm.  If that fails and the kernel supports dynamically loadable
0446  *  modules, it will then attempt to load a module of the same name or
0447  *  alias.  If that fails it will send a query to any loaded crypto manager
0448  *  to construct an algorithm on the fly.  A refcount is grabbed on the
0449  *  algorithm which is then associated with the new transform.
0450  *
0451  *  The returned transform is of a non-determinate type.  Most people
0452  *  should use one of the more specific allocation functions such as
0453  *  crypto_alloc_skcipher().
0454  *
0455  *  In case of error the return value is an error pointer.
0456  */
0457 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
0458 {
0459     struct crypto_tfm *tfm;
0460     int err;
0461 
0462     for (;;) {
0463         struct crypto_alg *alg;
0464 
0465         alg = crypto_alg_mod_lookup(alg_name, type, mask);
0466         if (IS_ERR(alg)) {
0467             err = PTR_ERR(alg);
0468             goto err;
0469         }
0470 
0471         tfm = __crypto_alloc_tfm(alg, type, mask);
0472         if (!IS_ERR(tfm))
0473             return tfm;
0474 
0475         crypto_mod_put(alg);
0476         err = PTR_ERR(tfm);
0477 
0478 err:
0479         if (err != -EAGAIN)
0480             break;
0481         if (fatal_signal_pending(current)) {
0482             err = -EINTR;
0483             break;
0484         }
0485     }
0486 
0487     return ERR_PTR(err);
0488 }
0489 EXPORT_SYMBOL_GPL(crypto_alloc_base);
0490 
0491 void *crypto_create_tfm_node(struct crypto_alg *alg,
0492             const struct crypto_type *frontend,
0493             int node)
0494 {
0495     char *mem;
0496     struct crypto_tfm *tfm = NULL;
0497     unsigned int tfmsize;
0498     unsigned int total;
0499     int err = -ENOMEM;
0500 
0501     tfmsize = frontend->tfmsize;
0502     total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
0503 
0504     mem = kzalloc_node(total, GFP_KERNEL, node);
0505     if (mem == NULL)
0506         goto out_err;
0507 
0508     tfm = (struct crypto_tfm *)(mem + tfmsize);
0509     tfm->__crt_alg = alg;
0510     tfm->node = node;
0511 
0512     err = frontend->init_tfm(tfm);
0513     if (err)
0514         goto out_free_tfm;
0515 
0516     if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
0517         goto cra_init_failed;
0518 
0519     goto out;
0520 
0521 cra_init_failed:
0522     crypto_exit_ops(tfm);
0523 out_free_tfm:
0524     if (err == -EAGAIN)
0525         crypto_shoot_alg(alg);
0526     kfree(mem);
0527 out_err:
0528     mem = ERR_PTR(err);
0529 out:
0530     return mem;
0531 }
0532 EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
0533 
0534 struct crypto_alg *crypto_find_alg(const char *alg_name,
0535                    const struct crypto_type *frontend,
0536                    u32 type, u32 mask)
0537 {
0538     if (frontend) {
0539         type &= frontend->maskclear;
0540         mask &= frontend->maskclear;
0541         type |= frontend->type;
0542         mask |= frontend->maskset;
0543     }
0544 
0545     return crypto_alg_mod_lookup(alg_name, type, mask);
0546 }
0547 EXPORT_SYMBOL_GPL(crypto_find_alg);
0548 
0549 /*
0550  *  crypto_alloc_tfm_node - Locate algorithm and allocate transform
0551  *  @alg_name: Name of algorithm
0552  *  @frontend: Frontend algorithm type
0553  *  @type: Type of algorithm
0554  *  @mask: Mask for type comparison
0555  *  @node: NUMA node in which users desire to put requests, if node is
0556  *      NUMA_NO_NODE, it means users have no special requirement.
0557  *
0558  *  crypto_alloc_tfm() will first attempt to locate an already loaded
0559  *  algorithm.  If that fails and the kernel supports dynamically loadable
0560  *  modules, it will then attempt to load a module of the same name or
0561  *  alias.  If that fails it will send a query to any loaded crypto manager
0562  *  to construct an algorithm on the fly.  A refcount is grabbed on the
0563  *  algorithm which is then associated with the new transform.
0564  *
0565  *  The returned transform is of a non-determinate type.  Most people
0566  *  should use one of the more specific allocation functions such as
0567  *  crypto_alloc_skcipher().
0568  *
0569  *  In case of error the return value is an error pointer.
0570  */
0571 
0572 void *crypto_alloc_tfm_node(const char *alg_name,
0573                const struct crypto_type *frontend, u32 type, u32 mask,
0574                int node)
0575 {
0576     void *tfm;
0577     int err;
0578 
0579     for (;;) {
0580         struct crypto_alg *alg;
0581 
0582         alg = crypto_find_alg(alg_name, frontend, type, mask);
0583         if (IS_ERR(alg)) {
0584             err = PTR_ERR(alg);
0585             goto err;
0586         }
0587 
0588         tfm = crypto_create_tfm_node(alg, frontend, node);
0589         if (!IS_ERR(tfm))
0590             return tfm;
0591 
0592         crypto_mod_put(alg);
0593         err = PTR_ERR(tfm);
0594 
0595 err:
0596         if (err != -EAGAIN)
0597             break;
0598         if (fatal_signal_pending(current)) {
0599             err = -EINTR;
0600             break;
0601         }
0602     }
0603 
0604     return ERR_PTR(err);
0605 }
0606 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
0607 
0608 /*
0609  *  crypto_destroy_tfm - Free crypto transform
0610  *  @mem: Start of tfm slab
0611  *  @tfm: Transform to free
0612  *
0613  *  This function frees up the transform and any associated resources,
0614  *  then drops the refcount on the associated algorithm.
0615  */
0616 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
0617 {
0618     struct crypto_alg *alg;
0619 
0620     if (IS_ERR_OR_NULL(mem))
0621         return;
0622 
0623     alg = tfm->__crt_alg;
0624 
0625     if (!tfm->exit && alg->cra_exit)
0626         alg->cra_exit(tfm);
0627     crypto_exit_ops(tfm);
0628     crypto_mod_put(alg);
0629     kfree_sensitive(mem);
0630 }
0631 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
0632 
0633 int crypto_has_alg(const char *name, u32 type, u32 mask)
0634 {
0635     int ret = 0;
0636     struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
0637 
0638     if (!IS_ERR(alg)) {
0639         crypto_mod_put(alg);
0640         ret = 1;
0641     }
0642 
0643     return ret;
0644 }
0645 EXPORT_SYMBOL_GPL(crypto_has_alg);
0646 
0647 void crypto_req_done(struct crypto_async_request *req, int err)
0648 {
0649     struct crypto_wait *wait = req->data;
0650 
0651     if (err == -EINPROGRESS)
0652         return;
0653 
0654     wait->err = err;
0655     complete(&wait->completion);
0656 }
0657 EXPORT_SYMBOL_GPL(crypto_req_done);
0658 
0659 MODULE_DESCRIPTION("Cryptographic core API");
0660 MODULE_LICENSE("GPL");