Back to home page

LXR

 
 

    


0001 /*
0002  * CTR: Counter mode
0003  *
0004  * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
0005  *
0006  * This program is free software; you can redistribute it and/or modify it
0007  * under the terms of the GNU General Public License as published by the Free
0008  * Software Foundation; either version 2 of the License, or (at your option)
0009  * any later version.
0010  *
0011  */
0012 
0013 #include <crypto/algapi.h>
0014 #include <crypto/ctr.h>
0015 #include <crypto/internal/skcipher.h>
0016 #include <linux/err.h>
0017 #include <linux/init.h>
0018 #include <linux/kernel.h>
0019 #include <linux/module.h>
0020 #include <linux/random.h>
0021 #include <linux/scatterlist.h>
0022 #include <linux/slab.h>
0023 
0024 struct crypto_ctr_ctx {
0025     struct crypto_cipher *child;
0026 };
0027 
0028 struct crypto_rfc3686_ctx {
0029     struct crypto_skcipher *child;
0030     u8 nonce[CTR_RFC3686_NONCE_SIZE];
0031 };
0032 
0033 struct crypto_rfc3686_req_ctx {
0034     u8 iv[CTR_RFC3686_BLOCK_SIZE];
0035     struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
0036 };
0037 
0038 static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
0039                  unsigned int keylen)
0040 {
0041     struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent);
0042     struct crypto_cipher *child = ctx->child;
0043     int err;
0044 
0045     crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
0046     crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
0047                 CRYPTO_TFM_REQ_MASK);
0048     err = crypto_cipher_setkey(child, key, keylen);
0049     crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
0050                  CRYPTO_TFM_RES_MASK);
0051 
0052     return err;
0053 }
0054 
0055 static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
0056                    struct crypto_cipher *tfm)
0057 {
0058     unsigned int bsize = crypto_cipher_blocksize(tfm);
0059     unsigned long alignmask = crypto_cipher_alignmask(tfm);
0060     u8 *ctrblk = walk->iv;
0061     u8 tmp[bsize + alignmask];
0062     u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
0063     u8 *src = walk->src.virt.addr;
0064     u8 *dst = walk->dst.virt.addr;
0065     unsigned int nbytes = walk->nbytes;
0066 
0067     crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
0068     crypto_xor(keystream, src, nbytes);
0069     memcpy(dst, keystream, nbytes);
0070 
0071     crypto_inc(ctrblk, bsize);
0072 }
0073 
0074 static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk,
0075                     struct crypto_cipher *tfm)
0076 {
0077     void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
0078            crypto_cipher_alg(tfm)->cia_encrypt;
0079     unsigned int bsize = crypto_cipher_blocksize(tfm);
0080     u8 *ctrblk = walk->iv;
0081     u8 *src = walk->src.virt.addr;
0082     u8 *dst = walk->dst.virt.addr;
0083     unsigned int nbytes = walk->nbytes;
0084 
0085     do {
0086         /* create keystream */
0087         fn(crypto_cipher_tfm(tfm), dst, ctrblk);
0088         crypto_xor(dst, src, bsize);
0089 
0090         /* increment counter in counterblock */
0091         crypto_inc(ctrblk, bsize);
0092 
0093         src += bsize;
0094         dst += bsize;
0095     } while ((nbytes -= bsize) >= bsize);
0096 
0097     return nbytes;
0098 }
0099 
0100 static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
0101                     struct crypto_cipher *tfm)
0102 {
0103     void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
0104            crypto_cipher_alg(tfm)->cia_encrypt;
0105     unsigned int bsize = crypto_cipher_blocksize(tfm);
0106     unsigned long alignmask = crypto_cipher_alignmask(tfm);
0107     unsigned int nbytes = walk->nbytes;
0108     u8 *ctrblk = walk->iv;
0109     u8 *src = walk->src.virt.addr;
0110     u8 tmp[bsize + alignmask];
0111     u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
0112 
0113     do {
0114         /* create keystream */
0115         fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
0116         crypto_xor(src, keystream, bsize);
0117 
0118         /* increment counter in counterblock */
0119         crypto_inc(ctrblk, bsize);
0120 
0121         src += bsize;
0122     } while ((nbytes -= bsize) >= bsize);
0123 
0124     return nbytes;
0125 }
0126 
0127 static int crypto_ctr_crypt(struct blkcipher_desc *desc,
0128                   struct scatterlist *dst, struct scatterlist *src,
0129                   unsigned int nbytes)
0130 {
0131     struct blkcipher_walk walk;
0132     struct crypto_blkcipher *tfm = desc->tfm;
0133     struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm);
0134     struct crypto_cipher *child = ctx->child;
0135     unsigned int bsize = crypto_cipher_blocksize(child);
0136     int err;
0137 
0138     blkcipher_walk_init(&walk, dst, src, nbytes);
0139     err = blkcipher_walk_virt_block(desc, &walk, bsize);
0140 
0141     while (walk.nbytes >= bsize) {
0142         if (walk.src.virt.addr == walk.dst.virt.addr)
0143             nbytes = crypto_ctr_crypt_inplace(&walk, child);
0144         else
0145             nbytes = crypto_ctr_crypt_segment(&walk, child);
0146 
0147         err = blkcipher_walk_done(desc, &walk, nbytes);
0148     }
0149 
0150     if (walk.nbytes) {
0151         crypto_ctr_crypt_final(&walk, child);
0152         err = blkcipher_walk_done(desc, &walk, 0);
0153     }
0154 
0155     return err;
0156 }
0157 
0158 static int crypto_ctr_init_tfm(struct crypto_tfm *tfm)
0159 {
0160     struct crypto_instance *inst = (void *)tfm->__crt_alg;
0161     struct crypto_spawn *spawn = crypto_instance_ctx(inst);
0162     struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
0163     struct crypto_cipher *cipher;
0164 
0165     cipher = crypto_spawn_cipher(spawn);
0166     if (IS_ERR(cipher))
0167         return PTR_ERR(cipher);
0168 
0169     ctx->child = cipher;
0170 
0171     return 0;
0172 }
0173 
0174 static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
0175 {
0176     struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
0177 
0178     crypto_free_cipher(ctx->child);
0179 }
0180 
0181 static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
0182 {
0183     struct crypto_instance *inst;
0184     struct crypto_alg *alg;
0185     int err;
0186 
0187     err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
0188     if (err)
0189         return ERR_PTR(err);
0190 
0191     alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
0192                   CRYPTO_ALG_TYPE_MASK);
0193     if (IS_ERR(alg))
0194         return ERR_CAST(alg);
0195 
0196     /* Block size must be >= 4 bytes. */
0197     err = -EINVAL;
0198     if (alg->cra_blocksize < 4)
0199         goto out_put_alg;
0200 
0201     /* If this is false we'd fail the alignment of crypto_inc. */
0202     if (alg->cra_blocksize % 4)
0203         goto out_put_alg;
0204 
0205     inst = crypto_alloc_instance("ctr", alg);
0206     if (IS_ERR(inst))
0207         goto out;
0208 
0209     inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
0210     inst->alg.cra_priority = alg->cra_priority;
0211     inst->alg.cra_blocksize = 1;
0212     inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1);
0213     inst->alg.cra_type = &crypto_blkcipher_type;
0214 
0215     inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
0216     inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
0217     inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
0218 
0219     inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx);
0220 
0221     inst->alg.cra_init = crypto_ctr_init_tfm;
0222     inst->alg.cra_exit = crypto_ctr_exit_tfm;
0223 
0224     inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey;
0225     inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
0226     inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
0227 
0228     inst->alg.cra_blkcipher.geniv = "chainiv";
0229 
0230 out:
0231     crypto_mod_put(alg);
0232     return inst;
0233 
0234 out_put_alg:
0235     inst = ERR_PTR(err);
0236     goto out;
0237 }
0238 
0239 static void crypto_ctr_free(struct crypto_instance *inst)
0240 {
0241     crypto_drop_spawn(crypto_instance_ctx(inst));
0242     kfree(inst);
0243 }
0244 
0245 static struct crypto_template crypto_ctr_tmpl = {
0246     .name = "ctr",
0247     .alloc = crypto_ctr_alloc,
0248     .free = crypto_ctr_free,
0249     .module = THIS_MODULE,
0250 };
0251 
0252 static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
0253                  const u8 *key, unsigned int keylen)
0254 {
0255     struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
0256     struct crypto_skcipher *child = ctx->child;
0257     int err;
0258 
0259     /* the nonce is stored in bytes at end of key */
0260     if (keylen < CTR_RFC3686_NONCE_SIZE)
0261         return -EINVAL;
0262 
0263     memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
0264            CTR_RFC3686_NONCE_SIZE);
0265 
0266     keylen -= CTR_RFC3686_NONCE_SIZE;
0267 
0268     crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
0269     crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
0270                      CRYPTO_TFM_REQ_MASK);
0271     err = crypto_skcipher_setkey(child, key, keylen);
0272     crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
0273                       CRYPTO_TFM_RES_MASK);
0274 
0275     return err;
0276 }
0277 
0278 static int crypto_rfc3686_crypt(struct skcipher_request *req)
0279 {
0280     struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0281     struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
0282     struct crypto_skcipher *child = ctx->child;
0283     unsigned long align = crypto_skcipher_alignmask(tfm);
0284     struct crypto_rfc3686_req_ctx *rctx =
0285         (void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1);
0286     struct skcipher_request *subreq = &rctx->subreq;
0287     u8 *iv = rctx->iv;
0288 
0289     /* set up counter block */
0290     memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
0291     memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
0292 
0293     /* initialize counter portion of counter block */
0294     *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
0295         cpu_to_be32(1);
0296 
0297     skcipher_request_set_tfm(subreq, child);
0298     skcipher_request_set_callback(subreq, req->base.flags,
0299                       req->base.complete, req->base.data);
0300     skcipher_request_set_crypt(subreq, req->src, req->dst,
0301                    req->cryptlen, iv);
0302 
0303     return crypto_skcipher_encrypt(subreq);
0304 }
0305 
0306 static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm)
0307 {
0308     struct skcipher_instance *inst = skcipher_alg_instance(tfm);
0309     struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
0310     struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
0311     struct crypto_skcipher *cipher;
0312     unsigned long align;
0313     unsigned int reqsize;
0314 
0315     cipher = crypto_spawn_skcipher(spawn);
0316     if (IS_ERR(cipher))
0317         return PTR_ERR(cipher);
0318 
0319     ctx->child = cipher;
0320 
0321     align = crypto_skcipher_alignmask(tfm);
0322     align &= ~(crypto_tfm_ctx_alignment() - 1);
0323     reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) +
0324           crypto_skcipher_reqsize(cipher);
0325     crypto_skcipher_set_reqsize(tfm, reqsize);
0326 
0327     return 0;
0328 }
0329 
0330 static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm)
0331 {
0332     struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
0333 
0334     crypto_free_skcipher(ctx->child);
0335 }
0336 
0337 static void crypto_rfc3686_free(struct skcipher_instance *inst)
0338 {
0339     struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
0340 
0341     crypto_drop_skcipher(spawn);
0342     kfree(inst);
0343 }
0344 
0345 static int crypto_rfc3686_create(struct crypto_template *tmpl,
0346                  struct rtattr **tb)
0347 {
0348     struct crypto_attr_type *algt;
0349     struct skcipher_instance *inst;
0350     struct skcipher_alg *alg;
0351     struct crypto_skcipher_spawn *spawn;
0352     const char *cipher_name;
0353     int err;
0354 
0355     algt = crypto_get_attr_type(tb);
0356     if (IS_ERR(algt))
0357         return PTR_ERR(algt);
0358 
0359     if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
0360         return -EINVAL;
0361 
0362     cipher_name = crypto_attr_alg_name(tb[1]);
0363     if (IS_ERR(cipher_name))
0364         return PTR_ERR(cipher_name);
0365 
0366     inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
0367     if (!inst)
0368         return -ENOMEM;
0369 
0370     spawn = skcipher_instance_ctx(inst);
0371 
0372     crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
0373     err = crypto_grab_skcipher(spawn, cipher_name, 0,
0374                    crypto_requires_sync(algt->type,
0375                             algt->mask));
0376     if (err)
0377         goto err_free_inst;
0378 
0379     alg = crypto_spawn_skcipher_alg(spawn);
0380 
0381     /* We only support 16-byte blocks. */
0382     err = -EINVAL;
0383     if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE)
0384         goto err_drop_spawn;
0385 
0386     /* Not a stream cipher? */
0387     if (alg->base.cra_blocksize != 1)
0388         goto err_drop_spawn;
0389 
0390     err = -ENAMETOOLONG;
0391     if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
0392              "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
0393         goto err_drop_spawn;
0394     if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
0395              "rfc3686(%s)", alg->base.cra_driver_name) >=
0396         CRYPTO_MAX_ALG_NAME)
0397         goto err_drop_spawn;
0398 
0399     inst->alg.base.cra_priority = alg->base.cra_priority;
0400     inst->alg.base.cra_blocksize = 1;
0401     inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
0402 
0403     inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
0404 
0405     inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
0406     inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
0407     inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) +
0408                 CTR_RFC3686_NONCE_SIZE;
0409     inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) +
0410                 CTR_RFC3686_NONCE_SIZE;
0411 
0412     inst->alg.setkey = crypto_rfc3686_setkey;
0413     inst->alg.encrypt = crypto_rfc3686_crypt;
0414     inst->alg.decrypt = crypto_rfc3686_crypt;
0415 
0416     inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
0417 
0418     inst->alg.init = crypto_rfc3686_init_tfm;
0419     inst->alg.exit = crypto_rfc3686_exit_tfm;
0420 
0421     inst->free = crypto_rfc3686_free;
0422 
0423     err = skcipher_register_instance(tmpl, inst);
0424     if (err)
0425         goto err_drop_spawn;
0426 
0427 out:
0428     return err;
0429 
0430 err_drop_spawn:
0431     crypto_drop_skcipher(spawn);
0432 err_free_inst:
0433     kfree(inst);
0434     goto out;
0435 }
0436 
0437 static struct crypto_template crypto_rfc3686_tmpl = {
0438     .name = "rfc3686",
0439     .create = crypto_rfc3686_create,
0440     .module = THIS_MODULE,
0441 };
0442 
0443 static int __init crypto_ctr_module_init(void)
0444 {
0445     int err;
0446 
0447     err = crypto_register_template(&crypto_ctr_tmpl);
0448     if (err)
0449         goto out;
0450 
0451     err = crypto_register_template(&crypto_rfc3686_tmpl);
0452     if (err)
0453         goto out_drop_ctr;
0454 
0455 out:
0456     return err;
0457 
0458 out_drop_ctr:
0459     crypto_unregister_template(&crypto_ctr_tmpl);
0460     goto out;
0461 }
0462 
0463 static void __exit crypto_ctr_module_exit(void)
0464 {
0465     crypto_unregister_template(&crypto_rfc3686_tmpl);
0466     crypto_unregister_template(&crypto_ctr_tmpl);
0467 }
0468 
0469 module_init(crypto_ctr_module_init);
0470 module_exit(crypto_ctr_module_exit);
0471 
0472 MODULE_LICENSE("GPL");
0473 MODULE_DESCRIPTION("CTR Counter block mode");
0474 MODULE_ALIAS_CRYPTO("rfc3686");
0475 MODULE_ALIAS_CRYPTO("ctr");