Back to home page

LXR

 
 

    


0001 /*
0002  * seqiv: Sequence Number IV Generator
0003  *
0004  * This generator generates an IV based on a sequence number by xoring it
0005  * with a salt.  This algorithm is mainly useful for CTR and similar modes.
0006  *
0007  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
0008  *
0009  * This program is free software; you can redistribute it and/or modify it
0010  * under the terms of the GNU General Public License as published by the Free
0011  * Software Foundation; either version 2 of the License, or (at your option)
0012  * any later version.
0013  *
0014  */
0015 
0016 #include <crypto/internal/geniv.h>
0017 #include <crypto/scatterwalk.h>
0018 #include <crypto/skcipher.h>
0019 #include <linux/err.h>
0020 #include <linux/init.h>
0021 #include <linux/kernel.h>
0022 #include <linux/module.h>
0023 #include <linux/slab.h>
0024 #include <linux/string.h>
0025 
0026 static void seqiv_free(struct crypto_instance *inst);
0027 
0028 static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err)
0029 {
0030     struct aead_request *subreq = aead_request_ctx(req);
0031     struct crypto_aead *geniv;
0032 
0033     if (err == -EINPROGRESS)
0034         return;
0035 
0036     if (err)
0037         goto out;
0038 
0039     geniv = crypto_aead_reqtfm(req);
0040     memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv));
0041 
0042 out:
0043     kzfree(subreq->iv);
0044 }
0045 
0046 static void seqiv_aead_encrypt_complete(struct crypto_async_request *base,
0047                     int err)
0048 {
0049     struct aead_request *req = base->data;
0050 
0051     seqiv_aead_encrypt_complete2(req, err);
0052     aead_request_complete(req, err);
0053 }
0054 
0055 static int seqiv_aead_encrypt(struct aead_request *req)
0056 {
0057     struct crypto_aead *geniv = crypto_aead_reqtfm(req);
0058     struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
0059     struct aead_request *subreq = aead_request_ctx(req);
0060     crypto_completion_t compl;
0061     void *data;
0062     u8 *info;
0063     unsigned int ivsize = 8;
0064     int err;
0065 
0066     if (req->cryptlen < ivsize)
0067         return -EINVAL;
0068 
0069     aead_request_set_tfm(subreq, ctx->child);
0070 
0071     compl = req->base.complete;
0072     data = req->base.data;
0073     info = req->iv;
0074 
0075     if (req->src != req->dst) {
0076         SKCIPHER_REQUEST_ON_STACK(nreq, ctx->sknull);
0077 
0078         skcipher_request_set_tfm(nreq, ctx->sknull);
0079         skcipher_request_set_callback(nreq, req->base.flags,
0080                           NULL, NULL);
0081         skcipher_request_set_crypt(nreq, req->src, req->dst,
0082                        req->assoclen + req->cryptlen,
0083                        NULL);
0084 
0085         err = crypto_skcipher_encrypt(nreq);
0086         if (err)
0087             return err;
0088     }
0089 
0090     if (unlikely(!IS_ALIGNED((unsigned long)info,
0091                  crypto_aead_alignmask(geniv) + 1))) {
0092         info = kmalloc(ivsize, req->base.flags &
0093                        CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
0094                                   GFP_ATOMIC);
0095         if (!info)
0096             return -ENOMEM;
0097 
0098         memcpy(info, req->iv, ivsize);
0099         compl = seqiv_aead_encrypt_complete;
0100         data = req;
0101     }
0102 
0103     aead_request_set_callback(subreq, req->base.flags, compl, data);
0104     aead_request_set_crypt(subreq, req->dst, req->dst,
0105                    req->cryptlen - ivsize, info);
0106     aead_request_set_ad(subreq, req->assoclen + ivsize);
0107 
0108     crypto_xor(info, ctx->salt, ivsize);
0109     scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
0110 
0111     err = crypto_aead_encrypt(subreq);
0112     if (unlikely(info != req->iv))
0113         seqiv_aead_encrypt_complete2(req, err);
0114     return err;
0115 }
0116 
0117 static int seqiv_aead_decrypt(struct aead_request *req)
0118 {
0119     struct crypto_aead *geniv = crypto_aead_reqtfm(req);
0120     struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
0121     struct aead_request *subreq = aead_request_ctx(req);
0122     crypto_completion_t compl;
0123     void *data;
0124     unsigned int ivsize = 8;
0125 
0126     if (req->cryptlen < ivsize + crypto_aead_authsize(geniv))
0127         return -EINVAL;
0128 
0129     aead_request_set_tfm(subreq, ctx->child);
0130 
0131     compl = req->base.complete;
0132     data = req->base.data;
0133 
0134     aead_request_set_callback(subreq, req->base.flags, compl, data);
0135     aead_request_set_crypt(subreq, req->src, req->dst,
0136                    req->cryptlen - ivsize, req->iv);
0137     aead_request_set_ad(subreq, req->assoclen + ivsize);
0138 
0139     scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
0140 
0141     return crypto_aead_decrypt(subreq);
0142 }
0143 
0144 static int seqiv_aead_create(struct crypto_template *tmpl, struct rtattr **tb)
0145 {
0146     struct aead_instance *inst;
0147     struct crypto_aead_spawn *spawn;
0148     struct aead_alg *alg;
0149     int err;
0150 
0151     inst = aead_geniv_alloc(tmpl, tb, 0, 0);
0152 
0153     if (IS_ERR(inst))
0154         return PTR_ERR(inst);
0155 
0156     inst->alg.base.cra_alignmask |= __alignof__(u32) - 1;
0157 
0158     spawn = aead_instance_ctx(inst);
0159     alg = crypto_spawn_aead_alg(spawn);
0160 
0161     err = -EINVAL;
0162     if (inst->alg.ivsize != sizeof(u64))
0163         goto free_inst;
0164 
0165     inst->alg.encrypt = seqiv_aead_encrypt;
0166     inst->alg.decrypt = seqiv_aead_decrypt;
0167 
0168     inst->alg.init = aead_init_geniv;
0169     inst->alg.exit = aead_exit_geniv;
0170 
0171     inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx);
0172     inst->alg.base.cra_ctxsize += inst->alg.ivsize;
0173 
0174     err = aead_register_instance(tmpl, inst);
0175     if (err)
0176         goto free_inst;
0177 
0178 out:
0179     return err;
0180 
0181 free_inst:
0182     aead_geniv_free(inst);
0183     goto out;
0184 }
0185 
0186 static int seqiv_create(struct crypto_template *tmpl, struct rtattr **tb)
0187 {
0188     struct crypto_attr_type *algt;
0189 
0190     algt = crypto_get_attr_type(tb);
0191     if (IS_ERR(algt))
0192         return PTR_ERR(algt);
0193 
0194     if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
0195         return -EINVAL;
0196 
0197     return seqiv_aead_create(tmpl, tb);
0198 }
0199 
0200 static void seqiv_free(struct crypto_instance *inst)
0201 {
0202     aead_geniv_free(aead_instance(inst));
0203 }
0204 
0205 static struct crypto_template seqiv_tmpl = {
0206     .name = "seqiv",
0207     .create = seqiv_create,
0208     .free = seqiv_free,
0209     .module = THIS_MODULE,
0210 };
0211 
0212 static int __init seqiv_module_init(void)
0213 {
0214     return crypto_register_template(&seqiv_tmpl);
0215 }
0216 
0217 static void __exit seqiv_module_exit(void)
0218 {
0219     crypto_unregister_template(&seqiv_tmpl);
0220 }
0221 
0222 module_init(seqiv_module_init);
0223 module_exit(seqiv_module_exit);
0224 
0225 MODULE_LICENSE("GPL");
0226 MODULE_DESCRIPTION("Sequence Number IV Generator");
0227 MODULE_ALIAS_CRYPTO("seqiv");