Back to home page

LXR

 
 

    


0001 /*
0002  * Synchronous Cryptographic Hash operations.
0003  *
0004  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
0005  *
0006  * This program is free software; you can redistribute it and/or modify it
0007  * under the terms of the GNU General Public License as published by the Free
0008  * Software Foundation; either version 2 of the License, or (at your option)
0009  * any later version.
0010  *
0011  */
0012 
0013 #include <crypto/scatterwalk.h>
0014 #include <crypto/internal/hash.h>
0015 #include <linux/err.h>
0016 #include <linux/kernel.h>
0017 #include <linux/module.h>
0018 #include <linux/slab.h>
0019 #include <linux/seq_file.h>
0020 #include <linux/cryptouser.h>
0021 #include <net/netlink.h>
0022 
0023 #include "internal.h"
0024 
0025 static const struct crypto_type crypto_shash_type;
0026 
0027 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
0028                unsigned int keylen)
0029 {
0030     return -ENOSYS;
0031 }
0032 
0033 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
0034                   unsigned int keylen)
0035 {
0036     struct shash_alg *shash = crypto_shash_alg(tfm);
0037     unsigned long alignmask = crypto_shash_alignmask(tfm);
0038     unsigned long absize;
0039     u8 *buffer, *alignbuffer;
0040     int err;
0041 
0042     absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
0043     buffer = kmalloc(absize, GFP_KERNEL);
0044     if (!buffer)
0045         return -ENOMEM;
0046 
0047     alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
0048     memcpy(alignbuffer, key, keylen);
0049     err = shash->setkey(tfm, alignbuffer, keylen);
0050     kzfree(buffer);
0051     return err;
0052 }
0053 
0054 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
0055             unsigned int keylen)
0056 {
0057     struct shash_alg *shash = crypto_shash_alg(tfm);
0058     unsigned long alignmask = crypto_shash_alignmask(tfm);
0059 
0060     if ((unsigned long)key & alignmask)
0061         return shash_setkey_unaligned(tfm, key, keylen);
0062 
0063     return shash->setkey(tfm, key, keylen);
0064 }
0065 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
0066 
0067 static inline unsigned int shash_align_buffer_size(unsigned len,
0068                            unsigned long mask)
0069 {
0070     typedef u8 __attribute__ ((aligned)) u8_aligned;
0071     return len + (mask & ~(__alignof__(u8_aligned) - 1));
0072 }
0073 
0074 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
0075                   unsigned int len)
0076 {
0077     struct crypto_shash *tfm = desc->tfm;
0078     struct shash_alg *shash = crypto_shash_alg(tfm);
0079     unsigned long alignmask = crypto_shash_alignmask(tfm);
0080     unsigned int unaligned_len = alignmask + 1 -
0081                      ((unsigned long)data & alignmask);
0082     u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
0083         __attribute__ ((aligned));
0084     u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
0085     int err;
0086 
0087     if (unaligned_len > len)
0088         unaligned_len = len;
0089 
0090     memcpy(buf, data, unaligned_len);
0091     err = shash->update(desc, buf, unaligned_len);
0092     memset(buf, 0, unaligned_len);
0093 
0094     return err ?:
0095            shash->update(desc, data + unaligned_len, len - unaligned_len);
0096 }
0097 
0098 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
0099             unsigned int len)
0100 {
0101     struct crypto_shash *tfm = desc->tfm;
0102     struct shash_alg *shash = crypto_shash_alg(tfm);
0103     unsigned long alignmask = crypto_shash_alignmask(tfm);
0104 
0105     if ((unsigned long)data & alignmask)
0106         return shash_update_unaligned(desc, data, len);
0107 
0108     return shash->update(desc, data, len);
0109 }
0110 EXPORT_SYMBOL_GPL(crypto_shash_update);
0111 
0112 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
0113 {
0114     struct crypto_shash *tfm = desc->tfm;
0115     unsigned long alignmask = crypto_shash_alignmask(tfm);
0116     struct shash_alg *shash = crypto_shash_alg(tfm);
0117     unsigned int ds = crypto_shash_digestsize(tfm);
0118     u8 ubuf[shash_align_buffer_size(ds, alignmask)]
0119         __attribute__ ((aligned));
0120     u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
0121     int err;
0122 
0123     err = shash->final(desc, buf);
0124     if (err)
0125         goto out;
0126 
0127     memcpy(out, buf, ds);
0128 
0129 out:
0130     memset(buf, 0, ds);
0131     return err;
0132 }
0133 
0134 int crypto_shash_final(struct shash_desc *desc, u8 *out)
0135 {
0136     struct crypto_shash *tfm = desc->tfm;
0137     struct shash_alg *shash = crypto_shash_alg(tfm);
0138     unsigned long alignmask = crypto_shash_alignmask(tfm);
0139 
0140     if ((unsigned long)out & alignmask)
0141         return shash_final_unaligned(desc, out);
0142 
0143     return shash->final(desc, out);
0144 }
0145 EXPORT_SYMBOL_GPL(crypto_shash_final);
0146 
0147 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
0148                  unsigned int len, u8 *out)
0149 {
0150     return crypto_shash_update(desc, data, len) ?:
0151            crypto_shash_final(desc, out);
0152 }
0153 
0154 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
0155                unsigned int len, u8 *out)
0156 {
0157     struct crypto_shash *tfm = desc->tfm;
0158     struct shash_alg *shash = crypto_shash_alg(tfm);
0159     unsigned long alignmask = crypto_shash_alignmask(tfm);
0160 
0161     if (((unsigned long)data | (unsigned long)out) & alignmask)
0162         return shash_finup_unaligned(desc, data, len, out);
0163 
0164     return shash->finup(desc, data, len, out);
0165 }
0166 EXPORT_SYMBOL_GPL(crypto_shash_finup);
0167 
0168 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
0169                   unsigned int len, u8 *out)
0170 {
0171     return crypto_shash_init(desc) ?:
0172            crypto_shash_finup(desc, data, len, out);
0173 }
0174 
0175 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
0176             unsigned int len, u8 *out)
0177 {
0178     struct crypto_shash *tfm = desc->tfm;
0179     struct shash_alg *shash = crypto_shash_alg(tfm);
0180     unsigned long alignmask = crypto_shash_alignmask(tfm);
0181 
0182     if (((unsigned long)data | (unsigned long)out) & alignmask)
0183         return shash_digest_unaligned(desc, data, len, out);
0184 
0185     return shash->digest(desc, data, len, out);
0186 }
0187 EXPORT_SYMBOL_GPL(crypto_shash_digest);
0188 
0189 static int shash_default_export(struct shash_desc *desc, void *out)
0190 {
0191     memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
0192     return 0;
0193 }
0194 
0195 static int shash_default_import(struct shash_desc *desc, const void *in)
0196 {
0197     memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
0198     return 0;
0199 }
0200 
0201 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
0202                   unsigned int keylen)
0203 {
0204     struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
0205 
0206     return crypto_shash_setkey(*ctx, key, keylen);
0207 }
0208 
0209 static int shash_async_init(struct ahash_request *req)
0210 {
0211     struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
0212     struct shash_desc *desc = ahash_request_ctx(req);
0213 
0214     desc->tfm = *ctx;
0215     desc->flags = req->base.flags;
0216 
0217     return crypto_shash_init(desc);
0218 }
0219 
0220 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
0221 {
0222     struct crypto_hash_walk walk;
0223     int nbytes;
0224 
0225     for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
0226          nbytes = crypto_hash_walk_done(&walk, nbytes))
0227         nbytes = crypto_shash_update(desc, walk.data, nbytes);
0228 
0229     return nbytes;
0230 }
0231 EXPORT_SYMBOL_GPL(shash_ahash_update);
0232 
0233 static int shash_async_update(struct ahash_request *req)
0234 {
0235     return shash_ahash_update(req, ahash_request_ctx(req));
0236 }
0237 
0238 static int shash_async_final(struct ahash_request *req)
0239 {
0240     return crypto_shash_final(ahash_request_ctx(req), req->result);
0241 }
0242 
0243 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
0244 {
0245     struct crypto_hash_walk walk;
0246     int nbytes;
0247 
0248     nbytes = crypto_hash_walk_first(req, &walk);
0249     if (!nbytes)
0250         return crypto_shash_final(desc, req->result);
0251 
0252     do {
0253         nbytes = crypto_hash_walk_last(&walk) ?
0254              crypto_shash_finup(desc, walk.data, nbytes,
0255                         req->result) :
0256              crypto_shash_update(desc, walk.data, nbytes);
0257         nbytes = crypto_hash_walk_done(&walk, nbytes);
0258     } while (nbytes > 0);
0259 
0260     return nbytes;
0261 }
0262 EXPORT_SYMBOL_GPL(shash_ahash_finup);
0263 
0264 static int shash_async_finup(struct ahash_request *req)
0265 {
0266     struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
0267     struct shash_desc *desc = ahash_request_ctx(req);
0268 
0269     desc->tfm = *ctx;
0270     desc->flags = req->base.flags;
0271 
0272     return shash_ahash_finup(req, desc);
0273 }
0274 
0275 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
0276 {
0277     struct scatterlist *sg = req->src;
0278     unsigned int offset = sg->offset;
0279     unsigned int nbytes = req->nbytes;
0280     int err;
0281 
0282     if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
0283         void *data;
0284 
0285         data = kmap_atomic(sg_page(sg));
0286         err = crypto_shash_digest(desc, data + offset, nbytes,
0287                       req->result);
0288         kunmap_atomic(data);
0289         crypto_yield(desc->flags);
0290     } else
0291         err = crypto_shash_init(desc) ?:
0292               shash_ahash_finup(req, desc);
0293 
0294     return err;
0295 }
0296 EXPORT_SYMBOL_GPL(shash_ahash_digest);
0297 
0298 static int shash_async_digest(struct ahash_request *req)
0299 {
0300     struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
0301     struct shash_desc *desc = ahash_request_ctx(req);
0302 
0303     desc->tfm = *ctx;
0304     desc->flags = req->base.flags;
0305 
0306     return shash_ahash_digest(req, desc);
0307 }
0308 
0309 static int shash_async_export(struct ahash_request *req, void *out)
0310 {
0311     return crypto_shash_export(ahash_request_ctx(req), out);
0312 }
0313 
0314 static int shash_async_import(struct ahash_request *req, const void *in)
0315 {
0316     struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
0317     struct shash_desc *desc = ahash_request_ctx(req);
0318 
0319     desc->tfm = *ctx;
0320     desc->flags = req->base.flags;
0321 
0322     return crypto_shash_import(desc, in);
0323 }
0324 
0325 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
0326 {
0327     struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
0328 
0329     crypto_free_shash(*ctx);
0330 }
0331 
0332 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
0333 {
0334     struct crypto_alg *calg = tfm->__crt_alg;
0335     struct shash_alg *alg = __crypto_shash_alg(calg);
0336     struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
0337     struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
0338     struct crypto_shash *shash;
0339 
0340     if (!crypto_mod_get(calg))
0341         return -EAGAIN;
0342 
0343     shash = crypto_create_tfm(calg, &crypto_shash_type);
0344     if (IS_ERR(shash)) {
0345         crypto_mod_put(calg);
0346         return PTR_ERR(shash);
0347     }
0348 
0349     *ctx = shash;
0350     tfm->exit = crypto_exit_shash_ops_async;
0351 
0352     crt->init = shash_async_init;
0353     crt->update = shash_async_update;
0354     crt->final = shash_async_final;
0355     crt->finup = shash_async_finup;
0356     crt->digest = shash_async_digest;
0357     crt->setkey = shash_async_setkey;
0358 
0359     crt->has_setkey = alg->setkey != shash_no_setkey;
0360 
0361     if (alg->export)
0362         crt->export = shash_async_export;
0363     if (alg->import)
0364         crt->import = shash_async_import;
0365 
0366     crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
0367 
0368     return 0;
0369 }
0370 
0371 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
0372 {
0373     struct crypto_shash *hash = __crypto_shash_cast(tfm);
0374 
0375     hash->descsize = crypto_shash_alg(hash)->descsize;
0376     return 0;
0377 }
0378 
0379 #ifdef CONFIG_NET
0380 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
0381 {
0382     struct crypto_report_hash rhash;
0383     struct shash_alg *salg = __crypto_shash_alg(alg);
0384 
0385     strncpy(rhash.type, "shash", sizeof(rhash.type));
0386 
0387     rhash.blocksize = alg->cra_blocksize;
0388     rhash.digestsize = salg->digestsize;
0389 
0390     if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
0391             sizeof(struct crypto_report_hash), &rhash))
0392         goto nla_put_failure;
0393     return 0;
0394 
0395 nla_put_failure:
0396     return -EMSGSIZE;
0397 }
0398 #else
0399 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
0400 {
0401     return -ENOSYS;
0402 }
0403 #endif
0404 
0405 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
0406     __attribute__ ((unused));
0407 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
0408 {
0409     struct shash_alg *salg = __crypto_shash_alg(alg);
0410 
0411     seq_printf(m, "type         : shash\n");
0412     seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
0413     seq_printf(m, "digestsize   : %u\n", salg->digestsize);
0414 }
0415 
0416 static const struct crypto_type crypto_shash_type = {
0417     .extsize = crypto_alg_extsize,
0418     .init_tfm = crypto_shash_init_tfm,
0419 #ifdef CONFIG_PROC_FS
0420     .show = crypto_shash_show,
0421 #endif
0422     .report = crypto_shash_report,
0423     .maskclear = ~CRYPTO_ALG_TYPE_MASK,
0424     .maskset = CRYPTO_ALG_TYPE_MASK,
0425     .type = CRYPTO_ALG_TYPE_SHASH,
0426     .tfmsize = offsetof(struct crypto_shash, base),
0427 };
0428 
0429 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
0430                     u32 mask)
0431 {
0432     return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
0433 }
0434 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
0435 
0436 static int shash_prepare_alg(struct shash_alg *alg)
0437 {
0438     struct crypto_alg *base = &alg->base;
0439 
0440     if (alg->digestsize > PAGE_SIZE / 8 ||
0441         alg->descsize > PAGE_SIZE / 8 ||
0442         alg->statesize > PAGE_SIZE / 8)
0443         return -EINVAL;
0444 
0445     base->cra_type = &crypto_shash_type;
0446     base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
0447     base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
0448 
0449     if (!alg->finup)
0450         alg->finup = shash_finup_unaligned;
0451     if (!alg->digest)
0452         alg->digest = shash_digest_unaligned;
0453     if (!alg->export) {
0454         alg->export = shash_default_export;
0455         alg->import = shash_default_import;
0456         alg->statesize = alg->descsize;
0457     }
0458     if (!alg->setkey)
0459         alg->setkey = shash_no_setkey;
0460 
0461     return 0;
0462 }
0463 
0464 int crypto_register_shash(struct shash_alg *alg)
0465 {
0466     struct crypto_alg *base = &alg->base;
0467     int err;
0468 
0469     err = shash_prepare_alg(alg);
0470     if (err)
0471         return err;
0472 
0473     return crypto_register_alg(base);
0474 }
0475 EXPORT_SYMBOL_GPL(crypto_register_shash);
0476 
0477 int crypto_unregister_shash(struct shash_alg *alg)
0478 {
0479     return crypto_unregister_alg(&alg->base);
0480 }
0481 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
0482 
0483 int crypto_register_shashes(struct shash_alg *algs, int count)
0484 {
0485     int i, ret;
0486 
0487     for (i = 0; i < count; i++) {
0488         ret = crypto_register_shash(&algs[i]);
0489         if (ret)
0490             goto err;
0491     }
0492 
0493     return 0;
0494 
0495 err:
0496     for (--i; i >= 0; --i)
0497         crypto_unregister_shash(&algs[i]);
0498 
0499     return ret;
0500 }
0501 EXPORT_SYMBOL_GPL(crypto_register_shashes);
0502 
0503 int crypto_unregister_shashes(struct shash_alg *algs, int count)
0504 {
0505     int i, ret;
0506 
0507     for (i = count - 1; i >= 0; --i) {
0508         ret = crypto_unregister_shash(&algs[i]);
0509         if (ret)
0510             pr_err("Failed to unregister %s %s: %d\n",
0511                    algs[i].base.cra_driver_name,
0512                    algs[i].base.cra_name, ret);
0513     }
0514 
0515     return 0;
0516 }
0517 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
0518 
0519 int shash_register_instance(struct crypto_template *tmpl,
0520                 struct shash_instance *inst)
0521 {
0522     int err;
0523 
0524     err = shash_prepare_alg(&inst->alg);
0525     if (err)
0526         return err;
0527 
0528     return crypto_register_instance(tmpl, shash_crypto_instance(inst));
0529 }
0530 EXPORT_SYMBOL_GPL(shash_register_instance);
0531 
0532 void shash_free_instance(struct crypto_instance *inst)
0533 {
0534     crypto_drop_spawn(crypto_instance_ctx(inst));
0535     kfree(shash_instance(inst));
0536 }
0537 EXPORT_SYMBOL_GPL(shash_free_instance);
0538 
0539 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
0540                 struct shash_alg *alg,
0541                 struct crypto_instance *inst)
0542 {
0543     return crypto_init_spawn2(&spawn->base, &alg->base, inst,
0544                   &crypto_shash_type);
0545 }
0546 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
0547 
0548 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
0549 {
0550     struct crypto_alg *alg;
0551 
0552     alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
0553     return IS_ERR(alg) ? ERR_CAST(alg) :
0554            container_of(alg, struct shash_alg, base);
0555 }
0556 EXPORT_SYMBOL_GPL(shash_attr_alg);
0557 
0558 MODULE_LICENSE("GPL");
0559 MODULE_DESCRIPTION("Synchronous cryptographic hash type");