Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-or-later
0002 /*
0003  * seqiv: Sequence Number IV Generator
0004  *
0005  * This generator generates an IV based on a sequence number by xoring it
0006  * with a salt.  This algorithm is mainly useful for CTR and similar modes.
0007  *
0008  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
0009  */
0010 
0011 #include <crypto/internal/geniv.h>
0012 #include <crypto/scatterwalk.h>
0013 #include <crypto/skcipher.h>
0014 #include <linux/err.h>
0015 #include <linux/init.h>
0016 #include <linux/kernel.h>
0017 #include <linux/module.h>
0018 #include <linux/slab.h>
0019 #include <linux/string.h>
0020 
0021 static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err)
0022 {
0023     struct aead_request *subreq = aead_request_ctx(req);
0024     struct crypto_aead *geniv;
0025 
0026     if (err == -EINPROGRESS)
0027         return;
0028 
0029     if (err)
0030         goto out;
0031 
0032     geniv = crypto_aead_reqtfm(req);
0033     memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv));
0034 
0035 out:
0036     kfree_sensitive(subreq->iv);
0037 }
0038 
0039 static void seqiv_aead_encrypt_complete(struct crypto_async_request *base,
0040                     int err)
0041 {
0042     struct aead_request *req = base->data;
0043 
0044     seqiv_aead_encrypt_complete2(req, err);
0045     aead_request_complete(req, err);
0046 }
0047 
0048 static int seqiv_aead_encrypt(struct aead_request *req)
0049 {
0050     struct crypto_aead *geniv = crypto_aead_reqtfm(req);
0051     struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
0052     struct aead_request *subreq = aead_request_ctx(req);
0053     crypto_completion_t compl;
0054     void *data;
0055     u8 *info;
0056     unsigned int ivsize = 8;
0057     int err;
0058 
0059     if (req->cryptlen < ivsize)
0060         return -EINVAL;
0061 
0062     aead_request_set_tfm(subreq, ctx->child);
0063 
0064     compl = req->base.complete;
0065     data = req->base.data;
0066     info = req->iv;
0067 
0068     if (req->src != req->dst) {
0069         SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->sknull);
0070 
0071         skcipher_request_set_sync_tfm(nreq, ctx->sknull);
0072         skcipher_request_set_callback(nreq, req->base.flags,
0073                           NULL, NULL);
0074         skcipher_request_set_crypt(nreq, req->src, req->dst,
0075                        req->assoclen + req->cryptlen,
0076                        NULL);
0077 
0078         err = crypto_skcipher_encrypt(nreq);
0079         if (err)
0080             return err;
0081     }
0082 
0083     if (unlikely(!IS_ALIGNED((unsigned long)info,
0084                  crypto_aead_alignmask(geniv) + 1))) {
0085         info = kmemdup(req->iv, ivsize, req->base.flags &
0086                    CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
0087                    GFP_ATOMIC);
0088         if (!info)
0089             return -ENOMEM;
0090 
0091         compl = seqiv_aead_encrypt_complete;
0092         data = req;
0093     }
0094 
0095     aead_request_set_callback(subreq, req->base.flags, compl, data);
0096     aead_request_set_crypt(subreq, req->dst, req->dst,
0097                    req->cryptlen - ivsize, info);
0098     aead_request_set_ad(subreq, req->assoclen + ivsize);
0099 
0100     crypto_xor(info, ctx->salt, ivsize);
0101     scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
0102 
0103     err = crypto_aead_encrypt(subreq);
0104     if (unlikely(info != req->iv))
0105         seqiv_aead_encrypt_complete2(req, err);
0106     return err;
0107 }
0108 
0109 static int seqiv_aead_decrypt(struct aead_request *req)
0110 {
0111     struct crypto_aead *geniv = crypto_aead_reqtfm(req);
0112     struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
0113     struct aead_request *subreq = aead_request_ctx(req);
0114     crypto_completion_t compl;
0115     void *data;
0116     unsigned int ivsize = 8;
0117 
0118     if (req->cryptlen < ivsize + crypto_aead_authsize(geniv))
0119         return -EINVAL;
0120 
0121     aead_request_set_tfm(subreq, ctx->child);
0122 
0123     compl = req->base.complete;
0124     data = req->base.data;
0125 
0126     aead_request_set_callback(subreq, req->base.flags, compl, data);
0127     aead_request_set_crypt(subreq, req->src, req->dst,
0128                    req->cryptlen - ivsize, req->iv);
0129     aead_request_set_ad(subreq, req->assoclen + ivsize);
0130 
0131     scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
0132 
0133     return crypto_aead_decrypt(subreq);
0134 }
0135 
0136 static int seqiv_aead_create(struct crypto_template *tmpl, struct rtattr **tb)
0137 {
0138     struct aead_instance *inst;
0139     int err;
0140 
0141     inst = aead_geniv_alloc(tmpl, tb);
0142 
0143     if (IS_ERR(inst))
0144         return PTR_ERR(inst);
0145 
0146     err = -EINVAL;
0147     if (inst->alg.ivsize != sizeof(u64))
0148         goto free_inst;
0149 
0150     inst->alg.encrypt = seqiv_aead_encrypt;
0151     inst->alg.decrypt = seqiv_aead_decrypt;
0152 
0153     inst->alg.init = aead_init_geniv;
0154     inst->alg.exit = aead_exit_geniv;
0155 
0156     inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx);
0157     inst->alg.base.cra_ctxsize += inst->alg.ivsize;
0158 
0159     err = aead_register_instance(tmpl, inst);
0160     if (err) {
0161 free_inst:
0162         inst->free(inst);
0163     }
0164     return err;
0165 }
0166 
0167 static struct crypto_template seqiv_tmpl = {
0168     .name = "seqiv",
0169     .create = seqiv_aead_create,
0170     .module = THIS_MODULE,
0171 };
0172 
0173 static int __init seqiv_module_init(void)
0174 {
0175     return crypto_register_template(&seqiv_tmpl);
0176 }
0177 
0178 static void __exit seqiv_module_exit(void)
0179 {
0180     crypto_unregister_template(&seqiv_tmpl);
0181 }
0182 
0183 subsys_initcall(seqiv_module_init);
0184 module_exit(seqiv_module_exit);
0185 
0186 MODULE_LICENSE("GPL");
0187 MODULE_DESCRIPTION("Sequence Number IV Generator");
0188 MODULE_ALIAS_CRYPTO("seqiv");