Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-or-later
0002 /*
0003  * Asynchronous Cryptographic Hash operations.
0004  *
0005  * This is the asynchronous version of hash.c with notification of
0006  * completion via a callback.
0007  *
0008  * Copyright (c) 2008 Loc Ho <lho@amcc.com>
0009  */
0010 
0011 #include <crypto/internal/hash.h>
0012 #include <crypto/scatterwalk.h>
0013 #include <linux/err.h>
0014 #include <linux/kernel.h>
0015 #include <linux/module.h>
0016 #include <linux/sched.h>
0017 #include <linux/slab.h>
0018 #include <linux/seq_file.h>
0019 #include <linux/cryptouser.h>
0020 #include <linux/compiler.h>
0021 #include <net/netlink.h>
0022 
0023 #include "internal.h"
0024 
0025 static const struct crypto_type crypto_ahash_type;
0026 
0027 struct ahash_request_priv {
0028     crypto_completion_t complete;
0029     void *data;
0030     u8 *result;
0031     u32 flags;
0032     void *ubuf[] CRYPTO_MINALIGN_ATTR;
0033 };
0034 
0035 static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash)
0036 {
0037     return container_of(crypto_hash_alg_common(hash), struct ahash_alg,
0038                 halg);
0039 }
0040 
0041 static int hash_walk_next(struct crypto_hash_walk *walk)
0042 {
0043     unsigned int alignmask = walk->alignmask;
0044     unsigned int offset = walk->offset;
0045     unsigned int nbytes = min(walk->entrylen,
0046                   ((unsigned int)(PAGE_SIZE)) - offset);
0047 
0048     walk->data = kmap_atomic(walk->pg);
0049     walk->data += offset;
0050 
0051     if (offset & alignmask) {
0052         unsigned int unaligned = alignmask + 1 - (offset & alignmask);
0053 
0054         if (nbytes > unaligned)
0055             nbytes = unaligned;
0056     }
0057 
0058     walk->entrylen -= nbytes;
0059     return nbytes;
0060 }
0061 
0062 static int hash_walk_new_entry(struct crypto_hash_walk *walk)
0063 {
0064     struct scatterlist *sg;
0065 
0066     sg = walk->sg;
0067     walk->offset = sg->offset;
0068     walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
0069     walk->offset = offset_in_page(walk->offset);
0070     walk->entrylen = sg->length;
0071 
0072     if (walk->entrylen > walk->total)
0073         walk->entrylen = walk->total;
0074     walk->total -= walk->entrylen;
0075 
0076     return hash_walk_next(walk);
0077 }
0078 
0079 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
0080 {
0081     unsigned int alignmask = walk->alignmask;
0082 
0083     walk->data -= walk->offset;
0084 
0085     if (walk->entrylen && (walk->offset & alignmask) && !err) {
0086         unsigned int nbytes;
0087 
0088         walk->offset = ALIGN(walk->offset, alignmask + 1);
0089         nbytes = min(walk->entrylen,
0090                  (unsigned int)(PAGE_SIZE - walk->offset));
0091         if (nbytes) {
0092             walk->entrylen -= nbytes;
0093             walk->data += walk->offset;
0094             return nbytes;
0095         }
0096     }
0097 
0098     kunmap_atomic(walk->data);
0099     crypto_yield(walk->flags);
0100 
0101     if (err)
0102         return err;
0103 
0104     if (walk->entrylen) {
0105         walk->offset = 0;
0106         walk->pg++;
0107         return hash_walk_next(walk);
0108     }
0109 
0110     if (!walk->total)
0111         return 0;
0112 
0113     walk->sg = sg_next(walk->sg);
0114 
0115     return hash_walk_new_entry(walk);
0116 }
0117 EXPORT_SYMBOL_GPL(crypto_hash_walk_done);
0118 
0119 int crypto_hash_walk_first(struct ahash_request *req,
0120                struct crypto_hash_walk *walk)
0121 {
0122     walk->total = req->nbytes;
0123 
0124     if (!walk->total) {
0125         walk->entrylen = 0;
0126         return 0;
0127     }
0128 
0129     walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req));
0130     walk->sg = req->src;
0131     walk->flags = req->base.flags;
0132 
0133     return hash_walk_new_entry(walk);
0134 }
0135 EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
0136 
0137 static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
0138                 unsigned int keylen)
0139 {
0140     unsigned long alignmask = crypto_ahash_alignmask(tfm);
0141     int ret;
0142     u8 *buffer, *alignbuffer;
0143     unsigned long absize;
0144 
0145     absize = keylen + alignmask;
0146     buffer = kmalloc(absize, GFP_KERNEL);
0147     if (!buffer)
0148         return -ENOMEM;
0149 
0150     alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
0151     memcpy(alignbuffer, key, keylen);
0152     ret = tfm->setkey(tfm, alignbuffer, keylen);
0153     kfree_sensitive(buffer);
0154     return ret;
0155 }
0156 
0157 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
0158               unsigned int keylen)
0159 {
0160     return -ENOSYS;
0161 }
0162 
0163 static void ahash_set_needkey(struct crypto_ahash *tfm)
0164 {
0165     const struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
0166 
0167     if (tfm->setkey != ahash_nosetkey &&
0168         !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
0169         crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
0170 }
0171 
0172 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
0173             unsigned int keylen)
0174 {
0175     unsigned long alignmask = crypto_ahash_alignmask(tfm);
0176     int err;
0177 
0178     if ((unsigned long)key & alignmask)
0179         err = ahash_setkey_unaligned(tfm, key, keylen);
0180     else
0181         err = tfm->setkey(tfm, key, keylen);
0182 
0183     if (unlikely(err)) {
0184         ahash_set_needkey(tfm);
0185         return err;
0186     }
0187 
0188     crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
0189     return 0;
0190 }
0191 EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
0192 
0193 static inline unsigned int ahash_align_buffer_size(unsigned len,
0194                            unsigned long mask)
0195 {
0196     return len + (mask & ~(crypto_tfm_ctx_alignment() - 1));
0197 }
0198 
0199 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt)
0200 {
0201     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0202     unsigned long alignmask = crypto_ahash_alignmask(tfm);
0203     unsigned int ds = crypto_ahash_digestsize(tfm);
0204     struct ahash_request_priv *priv;
0205 
0206     priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask),
0207                (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
0208                GFP_KERNEL : GFP_ATOMIC);
0209     if (!priv)
0210         return -ENOMEM;
0211 
0212     /*
0213      * WARNING: Voodoo programming below!
0214      *
0215      * The code below is obscure and hard to understand, thus explanation
0216      * is necessary. See include/crypto/hash.h and include/linux/crypto.h
0217      * to understand the layout of structures used here!
0218      *
0219      * The code here will replace portions of the ORIGINAL request with
0220      * pointers to new code and buffers so the hashing operation can store
0221      * the result in aligned buffer. We will call the modified request
0222      * an ADJUSTED request.
0223      *
0224      * The newly mangled request will look as such:
0225      *
0226      * req {
0227      *   .result        = ADJUSTED[new aligned buffer]
0228      *   .base.complete = ADJUSTED[pointer to completion function]
0229      *   .base.data     = ADJUSTED[*req (pointer to self)]
0230      *   .priv          = ADJUSTED[new priv] {
0231      *           .result   = ORIGINAL(result)
0232      *           .complete = ORIGINAL(base.complete)
0233      *           .data     = ORIGINAL(base.data)
0234      *   }
0235      */
0236 
0237     priv->result = req->result;
0238     priv->complete = req->base.complete;
0239     priv->data = req->base.data;
0240     priv->flags = req->base.flags;
0241 
0242     /*
0243      * WARNING: We do not backup req->priv here! The req->priv
0244      *          is for internal use of the Crypto API and the
0245      *          user must _NOT_ _EVER_ depend on it's content!
0246      */
0247 
0248     req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1);
0249     req->base.complete = cplt;
0250     req->base.data = req;
0251     req->priv = priv;
0252 
0253     return 0;
0254 }
0255 
0256 static void ahash_restore_req(struct ahash_request *req, int err)
0257 {
0258     struct ahash_request_priv *priv = req->priv;
0259 
0260     if (!err)
0261         memcpy(priv->result, req->result,
0262                crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
0263 
0264     /* Restore the original crypto request. */
0265     req->result = priv->result;
0266 
0267     ahash_request_set_callback(req, priv->flags,
0268                    priv->complete, priv->data);
0269     req->priv = NULL;
0270 
0271     /* Free the req->priv.priv from the ADJUSTED request. */
0272     kfree_sensitive(priv);
0273 }
0274 
0275 static void ahash_notify_einprogress(struct ahash_request *req)
0276 {
0277     struct ahash_request_priv *priv = req->priv;
0278     struct crypto_async_request oreq;
0279 
0280     oreq.data = priv->data;
0281 
0282     priv->complete(&oreq, -EINPROGRESS);
0283 }
0284 
0285 static void ahash_op_unaligned_done(struct crypto_async_request *req, int err)
0286 {
0287     struct ahash_request *areq = req->data;
0288 
0289     if (err == -EINPROGRESS) {
0290         ahash_notify_einprogress(areq);
0291         return;
0292     }
0293 
0294     /*
0295      * Restore the original request, see ahash_op_unaligned() for what
0296      * goes where.
0297      *
0298      * The "struct ahash_request *req" here is in fact the "req.base"
0299      * from the ADJUSTED request from ahash_op_unaligned(), thus as it
0300      * is a pointer to self, it is also the ADJUSTED "req" .
0301      */
0302 
0303     /* First copy req->result into req->priv.result */
0304     ahash_restore_req(areq, err);
0305 
0306     /* Complete the ORIGINAL request. */
0307     areq->base.complete(&areq->base, err);
0308 }
0309 
0310 static int ahash_op_unaligned(struct ahash_request *req,
0311                   int (*op)(struct ahash_request *))
0312 {
0313     int err;
0314 
0315     err = ahash_save_req(req, ahash_op_unaligned_done);
0316     if (err)
0317         return err;
0318 
0319     err = op(req);
0320     if (err == -EINPROGRESS || err == -EBUSY)
0321         return err;
0322 
0323     ahash_restore_req(req, err);
0324 
0325     return err;
0326 }
0327 
0328 static int crypto_ahash_op(struct ahash_request *req,
0329                int (*op)(struct ahash_request *))
0330 {
0331     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0332     unsigned long alignmask = crypto_ahash_alignmask(tfm);
0333 
0334     if ((unsigned long)req->result & alignmask)
0335         return ahash_op_unaligned(req, op);
0336 
0337     return op(req);
0338 }
0339 
0340 int crypto_ahash_final(struct ahash_request *req)
0341 {
0342     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0343     struct crypto_alg *alg = tfm->base.__crt_alg;
0344     unsigned int nbytes = req->nbytes;
0345     int ret;
0346 
0347     crypto_stats_get(alg);
0348     ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final);
0349     crypto_stats_ahash_final(nbytes, ret, alg);
0350     return ret;
0351 }
0352 EXPORT_SYMBOL_GPL(crypto_ahash_final);
0353 
0354 int crypto_ahash_finup(struct ahash_request *req)
0355 {
0356     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0357     struct crypto_alg *alg = tfm->base.__crt_alg;
0358     unsigned int nbytes = req->nbytes;
0359     int ret;
0360 
0361     crypto_stats_get(alg);
0362     ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup);
0363     crypto_stats_ahash_final(nbytes, ret, alg);
0364     return ret;
0365 }
0366 EXPORT_SYMBOL_GPL(crypto_ahash_finup);
0367 
0368 int crypto_ahash_digest(struct ahash_request *req)
0369 {
0370     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0371     struct crypto_alg *alg = tfm->base.__crt_alg;
0372     unsigned int nbytes = req->nbytes;
0373     int ret;
0374 
0375     crypto_stats_get(alg);
0376     if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
0377         ret = -ENOKEY;
0378     else
0379         ret = crypto_ahash_op(req, tfm->digest);
0380     crypto_stats_ahash_final(nbytes, ret, alg);
0381     return ret;
0382 }
0383 EXPORT_SYMBOL_GPL(crypto_ahash_digest);
0384 
0385 static void ahash_def_finup_done2(struct crypto_async_request *req, int err)
0386 {
0387     struct ahash_request *areq = req->data;
0388 
0389     if (err == -EINPROGRESS)
0390         return;
0391 
0392     ahash_restore_req(areq, err);
0393 
0394     areq->base.complete(&areq->base, err);
0395 }
0396 
0397 static int ahash_def_finup_finish1(struct ahash_request *req, int err)
0398 {
0399     if (err)
0400         goto out;
0401 
0402     req->base.complete = ahash_def_finup_done2;
0403 
0404     err = crypto_ahash_reqtfm(req)->final(req);
0405     if (err == -EINPROGRESS || err == -EBUSY)
0406         return err;
0407 
0408 out:
0409     ahash_restore_req(req, err);
0410     return err;
0411 }
0412 
0413 static void ahash_def_finup_done1(struct crypto_async_request *req, int err)
0414 {
0415     struct ahash_request *areq = req->data;
0416 
0417     if (err == -EINPROGRESS) {
0418         ahash_notify_einprogress(areq);
0419         return;
0420     }
0421 
0422     areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
0423 
0424     err = ahash_def_finup_finish1(areq, err);
0425     if (areq->priv)
0426         return;
0427 
0428     areq->base.complete(&areq->base, err);
0429 }
0430 
0431 static int ahash_def_finup(struct ahash_request *req)
0432 {
0433     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0434     int err;
0435 
0436     err = ahash_save_req(req, ahash_def_finup_done1);
0437     if (err)
0438         return err;
0439 
0440     err = tfm->update(req);
0441     if (err == -EINPROGRESS || err == -EBUSY)
0442         return err;
0443 
0444     return ahash_def_finup_finish1(req, err);
0445 }
0446 
0447 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
0448 {
0449     struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
0450     struct ahash_alg *alg = crypto_ahash_alg(hash);
0451 
0452     alg->exit_tfm(hash);
0453 }
0454 
0455 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
0456 {
0457     struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
0458     struct ahash_alg *alg = crypto_ahash_alg(hash);
0459 
0460     hash->setkey = ahash_nosetkey;
0461 
0462     if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
0463         return crypto_init_shash_ops_async(tfm);
0464 
0465     hash->init = alg->init;
0466     hash->update = alg->update;
0467     hash->final = alg->final;
0468     hash->finup = alg->finup ?: ahash_def_finup;
0469     hash->digest = alg->digest;
0470     hash->export = alg->export;
0471     hash->import = alg->import;
0472 
0473     if (alg->setkey) {
0474         hash->setkey = alg->setkey;
0475         ahash_set_needkey(hash);
0476     }
0477 
0478     if (alg->exit_tfm)
0479         tfm->exit = crypto_ahash_exit_tfm;
0480 
0481     return alg->init_tfm ? alg->init_tfm(hash) : 0;
0482 }
0483 
0484 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
0485 {
0486     if (alg->cra_type != &crypto_ahash_type)
0487         return sizeof(struct crypto_shash *);
0488 
0489     return crypto_alg_extsize(alg);
0490 }
0491 
0492 static void crypto_ahash_free_instance(struct crypto_instance *inst)
0493 {
0494     struct ahash_instance *ahash = ahash_instance(inst);
0495 
0496     ahash->free(ahash);
0497 }
0498 
0499 #ifdef CONFIG_NET
0500 static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
0501 {
0502     struct crypto_report_hash rhash;
0503 
0504     memset(&rhash, 0, sizeof(rhash));
0505 
0506     strscpy(rhash.type, "ahash", sizeof(rhash.type));
0507 
0508     rhash.blocksize = alg->cra_blocksize;
0509     rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
0510 
0511     return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
0512 }
0513 #else
0514 static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
0515 {
0516     return -ENOSYS;
0517 }
0518 #endif
0519 
0520 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
0521     __maybe_unused;
0522 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
0523 {
0524     seq_printf(m, "type         : ahash\n");
0525     seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
0526                          "yes" : "no");
0527     seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
0528     seq_printf(m, "digestsize   : %u\n",
0529            __crypto_hash_alg_common(alg)->digestsize);
0530 }
0531 
0532 static const struct crypto_type crypto_ahash_type = {
0533     .extsize = crypto_ahash_extsize,
0534     .init_tfm = crypto_ahash_init_tfm,
0535     .free = crypto_ahash_free_instance,
0536 #ifdef CONFIG_PROC_FS
0537     .show = crypto_ahash_show,
0538 #endif
0539     .report = crypto_ahash_report,
0540     .maskclear = ~CRYPTO_ALG_TYPE_MASK,
0541     .maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
0542     .type = CRYPTO_ALG_TYPE_AHASH,
0543     .tfmsize = offsetof(struct crypto_ahash, base),
0544 };
0545 
0546 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
0547               struct crypto_instance *inst,
0548               const char *name, u32 type, u32 mask)
0549 {
0550     spawn->base.frontend = &crypto_ahash_type;
0551     return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
0552 }
0553 EXPORT_SYMBOL_GPL(crypto_grab_ahash);
0554 
0555 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
0556                     u32 mask)
0557 {
0558     return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
0559 }
0560 EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
0561 
0562 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
0563 {
0564     return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
0565 }
0566 EXPORT_SYMBOL_GPL(crypto_has_ahash);
0567 
0568 static int ahash_prepare_alg(struct ahash_alg *alg)
0569 {
0570     struct crypto_alg *base = &alg->halg.base;
0571 
0572     if (alg->halg.digestsize > HASH_MAX_DIGESTSIZE ||
0573         alg->halg.statesize > HASH_MAX_STATESIZE ||
0574         alg->halg.statesize == 0)
0575         return -EINVAL;
0576 
0577     base->cra_type = &crypto_ahash_type;
0578     base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
0579     base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
0580 
0581     return 0;
0582 }
0583 
0584 int crypto_register_ahash(struct ahash_alg *alg)
0585 {
0586     struct crypto_alg *base = &alg->halg.base;
0587     int err;
0588 
0589     err = ahash_prepare_alg(alg);
0590     if (err)
0591         return err;
0592 
0593     return crypto_register_alg(base);
0594 }
0595 EXPORT_SYMBOL_GPL(crypto_register_ahash);
0596 
0597 void crypto_unregister_ahash(struct ahash_alg *alg)
0598 {
0599     crypto_unregister_alg(&alg->halg.base);
0600 }
0601 EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
0602 
0603 int crypto_register_ahashes(struct ahash_alg *algs, int count)
0604 {
0605     int i, ret;
0606 
0607     for (i = 0; i < count; i++) {
0608         ret = crypto_register_ahash(&algs[i]);
0609         if (ret)
0610             goto err;
0611     }
0612 
0613     return 0;
0614 
0615 err:
0616     for (--i; i >= 0; --i)
0617         crypto_unregister_ahash(&algs[i]);
0618 
0619     return ret;
0620 }
0621 EXPORT_SYMBOL_GPL(crypto_register_ahashes);
0622 
0623 void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
0624 {
0625     int i;
0626 
0627     for (i = count - 1; i >= 0; --i)
0628         crypto_unregister_ahash(&algs[i]);
0629 }
0630 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
0631 
0632 int ahash_register_instance(struct crypto_template *tmpl,
0633                 struct ahash_instance *inst)
0634 {
0635     int err;
0636 
0637     if (WARN_ON(!inst->free))
0638         return -EINVAL;
0639 
0640     err = ahash_prepare_alg(&inst->alg);
0641     if (err)
0642         return err;
0643 
0644     return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
0645 }
0646 EXPORT_SYMBOL_GPL(ahash_register_instance);
0647 
0648 bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
0649 {
0650     struct crypto_alg *alg = &halg->base;
0651 
0652     if (alg->cra_type != &crypto_ahash_type)
0653         return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
0654 
0655     return __crypto_ahash_alg(alg)->setkey != NULL;
0656 }
0657 EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey);
0658 
0659 MODULE_LICENSE("GPL");
0660 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");