Back to home page

LXR

 
 

    


0001 /*
0002  * Scatterlist Cryptographic API.
0003  *
0004  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
0005  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
0006  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
0007  *
0008  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
0009  * and Nettle, by Niels Möller.
0010  *
0011  * This program is free software; you can redistribute it and/or modify it
0012  * under the terms of the GNU General Public License as published by the Free
0013  * Software Foundation; either version 2 of the License, or (at your option)
0014  * any later version.
0015  *
0016  */
0017 
0018 #include <linux/err.h>
0019 #include <linux/errno.h>
0020 #include <linux/kernel.h>
0021 #include <linux/kmod.h>
0022 #include <linux/module.h>
0023 #include <linux/param.h>
0024 #include <linux/sched.h>
0025 #include <linux/slab.h>
0026 #include <linux/string.h>
0027 #include "internal.h"
0028 
0029 LIST_HEAD(crypto_alg_list);
0030 EXPORT_SYMBOL_GPL(crypto_alg_list);
0031 DECLARE_RWSEM(crypto_alg_sem);
0032 EXPORT_SYMBOL_GPL(crypto_alg_sem);
0033 
0034 BLOCKING_NOTIFIER_HEAD(crypto_chain);
0035 EXPORT_SYMBOL_GPL(crypto_chain);
0036 
0037 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
0038 
0039 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
0040 {
0041     return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
0042 }
0043 EXPORT_SYMBOL_GPL(crypto_mod_get);
0044 
0045 void crypto_mod_put(struct crypto_alg *alg)
0046 {
0047     struct module *module = alg->cra_module;
0048 
0049     crypto_alg_put(alg);
0050     module_put(module);
0051 }
0052 EXPORT_SYMBOL_GPL(crypto_mod_put);
0053 
0054 static inline int crypto_is_test_larval(struct crypto_larval *larval)
0055 {
0056     return larval->alg.cra_driver_name[0];
0057 }
0058 
0059 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
0060                           u32 mask)
0061 {
0062     struct crypto_alg *q, *alg = NULL;
0063     int best = -2;
0064 
0065     list_for_each_entry(q, &crypto_alg_list, cra_list) {
0066         int exact, fuzzy;
0067 
0068         if (crypto_is_moribund(q))
0069             continue;
0070 
0071         if ((q->cra_flags ^ type) & mask)
0072             continue;
0073 
0074         if (crypto_is_larval(q) &&
0075             !crypto_is_test_larval((struct crypto_larval *)q) &&
0076             ((struct crypto_larval *)q)->mask != mask)
0077             continue;
0078 
0079         exact = !strcmp(q->cra_driver_name, name);
0080         fuzzy = !strcmp(q->cra_name, name);
0081         if (!exact && !(fuzzy && q->cra_priority > best))
0082             continue;
0083 
0084         if (unlikely(!crypto_mod_get(q)))
0085             continue;
0086 
0087         best = q->cra_priority;
0088         if (alg)
0089             crypto_mod_put(alg);
0090         alg = q;
0091 
0092         if (exact)
0093             break;
0094     }
0095 
0096     return alg;
0097 }
0098 
0099 static void crypto_larval_destroy(struct crypto_alg *alg)
0100 {
0101     struct crypto_larval *larval = (void *)alg;
0102 
0103     BUG_ON(!crypto_is_larval(alg));
0104     if (larval->adult)
0105         crypto_mod_put(larval->adult);
0106     kfree(larval);
0107 }
0108 
0109 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
0110 {
0111     struct crypto_larval *larval;
0112 
0113     larval = kzalloc(sizeof(*larval), GFP_KERNEL);
0114     if (!larval)
0115         return ERR_PTR(-ENOMEM);
0116 
0117     larval->mask = mask;
0118     larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
0119     larval->alg.cra_priority = -1;
0120     larval->alg.cra_destroy = crypto_larval_destroy;
0121 
0122     strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
0123     init_completion(&larval->completion);
0124 
0125     return larval;
0126 }
0127 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
0128 
0129 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
0130                         u32 mask)
0131 {
0132     struct crypto_alg *alg;
0133     struct crypto_larval *larval;
0134 
0135     larval = crypto_larval_alloc(name, type, mask);
0136     if (IS_ERR(larval))
0137         return ERR_CAST(larval);
0138 
0139     atomic_set(&larval->alg.cra_refcnt, 2);
0140 
0141     down_write(&crypto_alg_sem);
0142     alg = __crypto_alg_lookup(name, type, mask);
0143     if (!alg) {
0144         alg = &larval->alg;
0145         list_add(&alg->cra_list, &crypto_alg_list);
0146     }
0147     up_write(&crypto_alg_sem);
0148 
0149     if (alg != &larval->alg) {
0150         kfree(larval);
0151         if (crypto_is_larval(alg))
0152             alg = crypto_larval_wait(alg);
0153     }
0154 
0155     return alg;
0156 }
0157 
0158 void crypto_larval_kill(struct crypto_alg *alg)
0159 {
0160     struct crypto_larval *larval = (void *)alg;
0161 
0162     down_write(&crypto_alg_sem);
0163     list_del(&alg->cra_list);
0164     up_write(&crypto_alg_sem);
0165     complete_all(&larval->completion);
0166     crypto_alg_put(alg);
0167 }
0168 EXPORT_SYMBOL_GPL(crypto_larval_kill);
0169 
0170 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
0171 {
0172     struct crypto_larval *larval = (void *)alg;
0173     long timeout;
0174 
0175     timeout = wait_for_completion_killable_timeout(
0176         &larval->completion, 60 * HZ);
0177 
0178     alg = larval->adult;
0179     if (timeout < 0)
0180         alg = ERR_PTR(-EINTR);
0181     else if (!timeout)
0182         alg = ERR_PTR(-ETIMEDOUT);
0183     else if (!alg)
0184         alg = ERR_PTR(-ENOENT);
0185     else if (crypto_is_test_larval(larval) &&
0186          !(alg->cra_flags & CRYPTO_ALG_TESTED))
0187         alg = ERR_PTR(-EAGAIN);
0188     else if (!crypto_mod_get(alg))
0189         alg = ERR_PTR(-EAGAIN);
0190     crypto_mod_put(&larval->alg);
0191 
0192     return alg;
0193 }
0194 
0195 struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
0196 {
0197     struct crypto_alg *alg;
0198 
0199     down_read(&crypto_alg_sem);
0200     alg = __crypto_alg_lookup(name, type, mask);
0201     up_read(&crypto_alg_sem);
0202 
0203     return alg;
0204 }
0205 EXPORT_SYMBOL_GPL(crypto_alg_lookup);
0206 
0207 struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
0208 {
0209     struct crypto_alg *alg;
0210 
0211     if (!name)
0212         return ERR_PTR(-ENOENT);
0213 
0214     type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
0215     mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
0216 
0217     alg = crypto_alg_lookup(name, type, mask);
0218     if (!alg) {
0219         request_module("crypto-%s", name);
0220 
0221         if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
0222               CRYPTO_ALG_NEED_FALLBACK))
0223             request_module("crypto-%s-all", name);
0224 
0225         alg = crypto_alg_lookup(name, type, mask);
0226     }
0227 
0228     if (alg)
0229         return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
0230 
0231     return crypto_larval_add(name, type, mask);
0232 }
0233 EXPORT_SYMBOL_GPL(crypto_larval_lookup);
0234 
0235 int crypto_probing_notify(unsigned long val, void *v)
0236 {
0237     int ok;
0238 
0239     ok = blocking_notifier_call_chain(&crypto_chain, val, v);
0240     if (ok == NOTIFY_DONE) {
0241         request_module("cryptomgr");
0242         ok = blocking_notifier_call_chain(&crypto_chain, val, v);
0243     }
0244 
0245     return ok;
0246 }
0247 EXPORT_SYMBOL_GPL(crypto_probing_notify);
0248 
0249 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
0250 {
0251     struct crypto_alg *alg;
0252     struct crypto_alg *larval;
0253     int ok;
0254 
0255     if (!((type | mask) & CRYPTO_ALG_TESTED)) {
0256         type |= CRYPTO_ALG_TESTED;
0257         mask |= CRYPTO_ALG_TESTED;
0258     }
0259 
0260     /*
0261      * If the internal flag is set for a cipher, require a caller to
0262      * to invoke the cipher with the internal flag to use that cipher.
0263      * Also, if a caller wants to allocate a cipher that may or may
0264      * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
0265      * !(mask & CRYPTO_ALG_INTERNAL).
0266      */
0267     if (!((type | mask) & CRYPTO_ALG_INTERNAL))
0268         mask |= CRYPTO_ALG_INTERNAL;
0269 
0270     larval = crypto_larval_lookup(name, type, mask);
0271     if (IS_ERR(larval) || !crypto_is_larval(larval))
0272         return larval;
0273 
0274     ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
0275 
0276     if (ok == NOTIFY_STOP)
0277         alg = crypto_larval_wait(larval);
0278     else {
0279         crypto_mod_put(larval);
0280         alg = ERR_PTR(-ENOENT);
0281     }
0282     crypto_larval_kill(larval);
0283     return alg;
0284 }
0285 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
0286 
0287 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
0288 {
0289     const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
0290 
0291     if (type_obj)
0292         return type_obj->init(tfm, type, mask);
0293 
0294     switch (crypto_tfm_alg_type(tfm)) {
0295     case CRYPTO_ALG_TYPE_CIPHER:
0296         return crypto_init_cipher_ops(tfm);
0297 
0298     case CRYPTO_ALG_TYPE_COMPRESS:
0299         return crypto_init_compress_ops(tfm);
0300 
0301     default:
0302         break;
0303     }
0304 
0305     BUG();
0306     return -EINVAL;
0307 }
0308 
0309 static void crypto_exit_ops(struct crypto_tfm *tfm)
0310 {
0311     const struct crypto_type *type = tfm->__crt_alg->cra_type;
0312 
0313     if (type && tfm->exit)
0314         tfm->exit(tfm);
0315 }
0316 
0317 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
0318 {
0319     const struct crypto_type *type_obj = alg->cra_type;
0320     unsigned int len;
0321 
0322     len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
0323     if (type_obj)
0324         return len + type_obj->ctxsize(alg, type, mask);
0325 
0326     switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
0327     default:
0328         BUG();
0329 
0330     case CRYPTO_ALG_TYPE_CIPHER:
0331         len += crypto_cipher_ctxsize(alg);
0332         break;
0333 
0334     case CRYPTO_ALG_TYPE_COMPRESS:
0335         len += crypto_compress_ctxsize(alg);
0336         break;
0337     }
0338 
0339     return len;
0340 }
0341 
0342 void crypto_shoot_alg(struct crypto_alg *alg)
0343 {
0344     down_write(&crypto_alg_sem);
0345     alg->cra_flags |= CRYPTO_ALG_DYING;
0346     up_write(&crypto_alg_sem);
0347 }
0348 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
0349 
0350 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
0351                       u32 mask)
0352 {
0353     struct crypto_tfm *tfm = NULL;
0354     unsigned int tfm_size;
0355     int err = -ENOMEM;
0356 
0357     tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
0358     tfm = kzalloc(tfm_size, GFP_KERNEL);
0359     if (tfm == NULL)
0360         goto out_err;
0361 
0362     tfm->__crt_alg = alg;
0363 
0364     err = crypto_init_ops(tfm, type, mask);
0365     if (err)
0366         goto out_free_tfm;
0367 
0368     if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
0369         goto cra_init_failed;
0370 
0371     goto out;
0372 
0373 cra_init_failed:
0374     crypto_exit_ops(tfm);
0375 out_free_tfm:
0376     if (err == -EAGAIN)
0377         crypto_shoot_alg(alg);
0378     kfree(tfm);
0379 out_err:
0380     tfm = ERR_PTR(err);
0381 out:
0382     return tfm;
0383 }
0384 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
0385 
0386 /*
0387  *  crypto_alloc_base - Locate algorithm and allocate transform
0388  *  @alg_name: Name of algorithm
0389  *  @type: Type of algorithm
0390  *  @mask: Mask for type comparison
0391  *
0392  *  This function should not be used by new algorithm types.
0393  *  Please use crypto_alloc_tfm instead.
0394  *
0395  *  crypto_alloc_base() will first attempt to locate an already loaded
0396  *  algorithm.  If that fails and the kernel supports dynamically loadable
0397  *  modules, it will then attempt to load a module of the same name or
0398  *  alias.  If that fails it will send a query to any loaded crypto manager
0399  *  to construct an algorithm on the fly.  A refcount is grabbed on the
0400  *  algorithm which is then associated with the new transform.
0401  *
0402  *  The returned transform is of a non-determinate type.  Most people
0403  *  should use one of the more specific allocation functions such as
0404  *  crypto_alloc_blkcipher.
0405  *
0406  *  In case of error the return value is an error pointer.
0407  */
0408 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
0409 {
0410     struct crypto_tfm *tfm;
0411     int err;
0412 
0413     for (;;) {
0414         struct crypto_alg *alg;
0415 
0416         alg = crypto_alg_mod_lookup(alg_name, type, mask);
0417         if (IS_ERR(alg)) {
0418             err = PTR_ERR(alg);
0419             goto err;
0420         }
0421 
0422         tfm = __crypto_alloc_tfm(alg, type, mask);
0423         if (!IS_ERR(tfm))
0424             return tfm;
0425 
0426         crypto_mod_put(alg);
0427         err = PTR_ERR(tfm);
0428 
0429 err:
0430         if (err != -EAGAIN)
0431             break;
0432         if (fatal_signal_pending(current)) {
0433             err = -EINTR;
0434             break;
0435         }
0436     }
0437 
0438     return ERR_PTR(err);
0439 }
0440 EXPORT_SYMBOL_GPL(crypto_alloc_base);
0441 
0442 void *crypto_create_tfm(struct crypto_alg *alg,
0443             const struct crypto_type *frontend)
0444 {
0445     char *mem;
0446     struct crypto_tfm *tfm = NULL;
0447     unsigned int tfmsize;
0448     unsigned int total;
0449     int err = -ENOMEM;
0450 
0451     tfmsize = frontend->tfmsize;
0452     total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
0453 
0454     mem = kzalloc(total, GFP_KERNEL);
0455     if (mem == NULL)
0456         goto out_err;
0457 
0458     tfm = (struct crypto_tfm *)(mem + tfmsize);
0459     tfm->__crt_alg = alg;
0460 
0461     err = frontend->init_tfm(tfm);
0462     if (err)
0463         goto out_free_tfm;
0464 
0465     if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
0466         goto cra_init_failed;
0467 
0468     goto out;
0469 
0470 cra_init_failed:
0471     crypto_exit_ops(tfm);
0472 out_free_tfm:
0473     if (err == -EAGAIN)
0474         crypto_shoot_alg(alg);
0475     kfree(mem);
0476 out_err:
0477     mem = ERR_PTR(err);
0478 out:
0479     return mem;
0480 }
0481 EXPORT_SYMBOL_GPL(crypto_create_tfm);
0482 
0483 struct crypto_alg *crypto_find_alg(const char *alg_name,
0484                    const struct crypto_type *frontend,
0485                    u32 type, u32 mask)
0486 {
0487     struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
0488         crypto_alg_mod_lookup;
0489 
0490     if (frontend) {
0491         type &= frontend->maskclear;
0492         mask &= frontend->maskclear;
0493         type |= frontend->type;
0494         mask |= frontend->maskset;
0495 
0496         if (frontend->lookup)
0497             lookup = frontend->lookup;
0498     }
0499 
0500     return lookup(alg_name, type, mask);
0501 }
0502 EXPORT_SYMBOL_GPL(crypto_find_alg);
0503 
0504 /*
0505  *  crypto_alloc_tfm - Locate algorithm and allocate transform
0506  *  @alg_name: Name of algorithm
0507  *  @frontend: Frontend algorithm type
0508  *  @type: Type of algorithm
0509  *  @mask: Mask for type comparison
0510  *
0511  *  crypto_alloc_tfm() will first attempt to locate an already loaded
0512  *  algorithm.  If that fails and the kernel supports dynamically loadable
0513  *  modules, it will then attempt to load a module of the same name or
0514  *  alias.  If that fails it will send a query to any loaded crypto manager
0515  *  to construct an algorithm on the fly.  A refcount is grabbed on the
0516  *  algorithm which is then associated with the new transform.
0517  *
0518  *  The returned transform is of a non-determinate type.  Most people
0519  *  should use one of the more specific allocation functions such as
0520  *  crypto_alloc_blkcipher.
0521  *
0522  *  In case of error the return value is an error pointer.
0523  */
0524 void *crypto_alloc_tfm(const char *alg_name,
0525                const struct crypto_type *frontend, u32 type, u32 mask)
0526 {
0527     void *tfm;
0528     int err;
0529 
0530     for (;;) {
0531         struct crypto_alg *alg;
0532 
0533         alg = crypto_find_alg(alg_name, frontend, type, mask);
0534         if (IS_ERR(alg)) {
0535             err = PTR_ERR(alg);
0536             goto err;
0537         }
0538 
0539         tfm = crypto_create_tfm(alg, frontend);
0540         if (!IS_ERR(tfm))
0541             return tfm;
0542 
0543         crypto_mod_put(alg);
0544         err = PTR_ERR(tfm);
0545 
0546 err:
0547         if (err != -EAGAIN)
0548             break;
0549         if (fatal_signal_pending(current)) {
0550             err = -EINTR;
0551             break;
0552         }
0553     }
0554 
0555     return ERR_PTR(err);
0556 }
0557 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
0558 
0559 /*
0560  *  crypto_destroy_tfm - Free crypto transform
0561  *  @mem: Start of tfm slab
0562  *  @tfm: Transform to free
0563  *
0564  *  This function frees up the transform and any associated resources,
0565  *  then drops the refcount on the associated algorithm.
0566  */
0567 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
0568 {
0569     struct crypto_alg *alg;
0570 
0571     if (unlikely(!mem))
0572         return;
0573 
0574     alg = tfm->__crt_alg;
0575 
0576     if (!tfm->exit && alg->cra_exit)
0577         alg->cra_exit(tfm);
0578     crypto_exit_ops(tfm);
0579     crypto_mod_put(alg);
0580     kzfree(mem);
0581 }
0582 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
0583 
0584 int crypto_has_alg(const char *name, u32 type, u32 mask)
0585 {
0586     int ret = 0;
0587     struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
0588 
0589     if (!IS_ERR(alg)) {
0590         crypto_mod_put(alg);
0591         ret = 1;
0592     }
0593 
0594     return ret;
0595 }
0596 EXPORT_SYMBOL_GPL(crypto_has_alg);
0597 
0598 MODULE_DESCRIPTION("Cryptographic core API");
0599 MODULE_LICENSE("GPL");