Back to home page

LXR

 
 

    


0001 /* XTS: as defined in IEEE1619/D16
0002  *  http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
0003  *  (sector sizes which are not a multiple of 16 bytes are,
0004  *  however currently unsupported)
0005  *
0006  * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
0007  *
0008  * Based on ecb.c
0009  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
0010  *
0011  * This program is free software; you can redistribute it and/or modify it
0012  * under the terms of the GNU General Public License as published by the Free
0013  * Software Foundation; either version 2 of the License, or (at your option)
0014  * any later version.
0015  */
0016 #include <crypto/internal/skcipher.h>
0017 #include <crypto/scatterwalk.h>
0018 #include <linux/err.h>
0019 #include <linux/init.h>
0020 #include <linux/kernel.h>
0021 #include <linux/module.h>
0022 #include <linux/scatterlist.h>
0023 #include <linux/slab.h>
0024 
0025 #include <crypto/xts.h>
0026 #include <crypto/b128ops.h>
0027 #include <crypto/gf128mul.h>
0028 
0029 #define XTS_BUFFER_SIZE 128u
0030 
0031 struct priv {
0032     struct crypto_skcipher *child;
0033     struct crypto_cipher *tweak;
0034 };
0035 
0036 struct xts_instance_ctx {
0037     struct crypto_skcipher_spawn spawn;
0038     char name[CRYPTO_MAX_ALG_NAME];
0039 };
0040 
0041 struct rctx {
0042     be128 buf[XTS_BUFFER_SIZE / sizeof(be128)];
0043 
0044     be128 t;
0045 
0046     be128 *ext;
0047 
0048     struct scatterlist srcbuf[2];
0049     struct scatterlist dstbuf[2];
0050     struct scatterlist *src;
0051     struct scatterlist *dst;
0052 
0053     unsigned int left;
0054 
0055     struct skcipher_request subreq;
0056 };
0057 
0058 static int setkey(struct crypto_skcipher *parent, const u8 *key,
0059           unsigned int keylen)
0060 {
0061     struct priv *ctx = crypto_skcipher_ctx(parent);
0062     struct crypto_skcipher *child;
0063     struct crypto_cipher *tweak;
0064     int err;
0065 
0066     err = xts_verify_key(parent, key, keylen);
0067     if (err)
0068         return err;
0069 
0070     keylen /= 2;
0071 
0072     /* we need two cipher instances: one to compute the initial 'tweak'
0073      * by encrypting the IV (usually the 'plain' iv) and the other
0074      * one to encrypt and decrypt the data */
0075 
0076     /* tweak cipher, uses Key2 i.e. the second half of *key */
0077     tweak = ctx->tweak;
0078     crypto_cipher_clear_flags(tweak, CRYPTO_TFM_REQ_MASK);
0079     crypto_cipher_set_flags(tweak, crypto_skcipher_get_flags(parent) &
0080                        CRYPTO_TFM_REQ_MASK);
0081     err = crypto_cipher_setkey(tweak, key + keylen, keylen);
0082     crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(tweak) &
0083                       CRYPTO_TFM_RES_MASK);
0084     if (err)
0085         return err;
0086 
0087     /* data cipher, uses Key1 i.e. the first half of *key */
0088     child = ctx->child;
0089     crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
0090     crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
0091                      CRYPTO_TFM_REQ_MASK);
0092     err = crypto_skcipher_setkey(child, key, keylen);
0093     crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
0094                       CRYPTO_TFM_RES_MASK);
0095 
0096     return err;
0097 }
0098 
0099 static int post_crypt(struct skcipher_request *req)
0100 {
0101     struct rctx *rctx = skcipher_request_ctx(req);
0102     be128 *buf = rctx->ext ?: rctx->buf;
0103     struct skcipher_request *subreq;
0104     const int bs = XTS_BLOCK_SIZE;
0105     struct skcipher_walk w;
0106     struct scatterlist *sg;
0107     unsigned offset;
0108     int err;
0109 
0110     subreq = &rctx->subreq;
0111     err = skcipher_walk_virt(&w, subreq, false);
0112 
0113     while (w.nbytes) {
0114         unsigned int avail = w.nbytes;
0115         be128 *wdst;
0116 
0117         wdst = w.dst.virt.addr;
0118 
0119         do {
0120             be128_xor(wdst, buf++, wdst);
0121             wdst++;
0122         } while ((avail -= bs) >= bs);
0123 
0124         err = skcipher_walk_done(&w, avail);
0125     }
0126 
0127     rctx->left -= subreq->cryptlen;
0128 
0129     if (err || !rctx->left)
0130         goto out;
0131 
0132     rctx->dst = rctx->dstbuf;
0133 
0134     scatterwalk_done(&w.out, 0, 1);
0135     sg = w.out.sg;
0136     offset = w.out.offset;
0137 
0138     if (rctx->dst != sg) {
0139         rctx->dst[0] = *sg;
0140         sg_unmark_end(rctx->dst);
0141         scatterwalk_crypto_chain(rctx->dst, sg_next(sg), 0, 2);
0142     }
0143     rctx->dst[0].length -= offset - sg->offset;
0144     rctx->dst[0].offset = offset;
0145 
0146 out:
0147     return err;
0148 }
0149 
0150 static int pre_crypt(struct skcipher_request *req)
0151 {
0152     struct rctx *rctx = skcipher_request_ctx(req);
0153     be128 *buf = rctx->ext ?: rctx->buf;
0154     struct skcipher_request *subreq;
0155     const int bs = XTS_BLOCK_SIZE;
0156     struct skcipher_walk w;
0157     struct scatterlist *sg;
0158     unsigned cryptlen;
0159     unsigned offset;
0160     bool more;
0161     int err;
0162 
0163     subreq = &rctx->subreq;
0164     cryptlen = subreq->cryptlen;
0165 
0166     more = rctx->left > cryptlen;
0167     if (!more)
0168         cryptlen = rctx->left;
0169 
0170     skcipher_request_set_crypt(subreq, rctx->src, rctx->dst,
0171                    cryptlen, NULL);
0172 
0173     err = skcipher_walk_virt(&w, subreq, false);
0174 
0175     while (w.nbytes) {
0176         unsigned int avail = w.nbytes;
0177         be128 *wsrc;
0178         be128 *wdst;
0179 
0180         wsrc = w.src.virt.addr;
0181         wdst = w.dst.virt.addr;
0182 
0183         do {
0184             *buf++ = rctx->t;
0185             be128_xor(wdst++, &rctx->t, wsrc++);
0186             gf128mul_x_ble(&rctx->t, &rctx->t);
0187         } while ((avail -= bs) >= bs);
0188 
0189         err = skcipher_walk_done(&w, avail);
0190     }
0191 
0192     skcipher_request_set_crypt(subreq, rctx->dst, rctx->dst,
0193                    cryptlen, NULL);
0194 
0195     if (err || !more)
0196         goto out;
0197 
0198     rctx->src = rctx->srcbuf;
0199 
0200     scatterwalk_done(&w.in, 0, 1);
0201     sg = w.in.sg;
0202     offset = w.in.offset;
0203 
0204     if (rctx->src != sg) {
0205         rctx->src[0] = *sg;
0206         sg_unmark_end(rctx->src);
0207         scatterwalk_crypto_chain(rctx->src, sg_next(sg), 0, 2);
0208     }
0209     rctx->src[0].length -= offset - sg->offset;
0210     rctx->src[0].offset = offset;
0211 
0212 out:
0213     return err;
0214 }
0215 
0216 static int init_crypt(struct skcipher_request *req, crypto_completion_t done)
0217 {
0218     struct priv *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
0219     struct rctx *rctx = skcipher_request_ctx(req);
0220     struct skcipher_request *subreq;
0221     gfp_t gfp;
0222 
0223     subreq = &rctx->subreq;
0224     skcipher_request_set_tfm(subreq, ctx->child);
0225     skcipher_request_set_callback(subreq, req->base.flags, done, req);
0226 
0227     gfp = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
0228                                GFP_ATOMIC;
0229     rctx->ext = NULL;
0230 
0231     subreq->cryptlen = XTS_BUFFER_SIZE;
0232     if (req->cryptlen > XTS_BUFFER_SIZE) {
0233         subreq->cryptlen = min(req->cryptlen, (unsigned)PAGE_SIZE);
0234         rctx->ext = kmalloc(subreq->cryptlen, gfp);
0235     }
0236 
0237     rctx->src = req->src;
0238     rctx->dst = req->dst;
0239     rctx->left = req->cryptlen;
0240 
0241     /* calculate first value of T */
0242     crypto_cipher_encrypt_one(ctx->tweak, (u8 *)&rctx->t, req->iv);
0243 
0244     return 0;
0245 }
0246 
0247 static void exit_crypt(struct skcipher_request *req)
0248 {
0249     struct rctx *rctx = skcipher_request_ctx(req);
0250 
0251     rctx->left = 0;
0252 
0253     if (rctx->ext)
0254         kzfree(rctx->ext);
0255 }
0256 
0257 static int do_encrypt(struct skcipher_request *req, int err)
0258 {
0259     struct rctx *rctx = skcipher_request_ctx(req);
0260     struct skcipher_request *subreq;
0261 
0262     subreq = &rctx->subreq;
0263 
0264     while (!err && rctx->left) {
0265         err = pre_crypt(req) ?:
0266               crypto_skcipher_encrypt(subreq) ?:
0267               post_crypt(req);
0268 
0269         if (err == -EINPROGRESS ||
0270             (err == -EBUSY &&
0271              req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
0272             return err;
0273     }
0274 
0275     exit_crypt(req);
0276     return err;
0277 }
0278 
0279 static void encrypt_done(struct crypto_async_request *areq, int err)
0280 {
0281     struct skcipher_request *req = areq->data;
0282     struct skcipher_request *subreq;
0283     struct rctx *rctx;
0284 
0285     rctx = skcipher_request_ctx(req);
0286     subreq = &rctx->subreq;
0287     subreq->base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG;
0288 
0289     err = do_encrypt(req, err ?: post_crypt(req));
0290     if (rctx->left)
0291         return;
0292 
0293     skcipher_request_complete(req, err);
0294 }
0295 
0296 static int encrypt(struct skcipher_request *req)
0297 {
0298     return do_encrypt(req, init_crypt(req, encrypt_done));
0299 }
0300 
0301 static int do_decrypt(struct skcipher_request *req, int err)
0302 {
0303     struct rctx *rctx = skcipher_request_ctx(req);
0304     struct skcipher_request *subreq;
0305 
0306     subreq = &rctx->subreq;
0307 
0308     while (!err && rctx->left) {
0309         err = pre_crypt(req) ?:
0310               crypto_skcipher_decrypt(subreq) ?:
0311               post_crypt(req);
0312 
0313         if (err == -EINPROGRESS ||
0314             (err == -EBUSY &&
0315              req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
0316             return err;
0317     }
0318 
0319     exit_crypt(req);
0320     return err;
0321 }
0322 
0323 static void decrypt_done(struct crypto_async_request *areq, int err)
0324 {
0325     struct skcipher_request *req = areq->data;
0326     struct skcipher_request *subreq;
0327     struct rctx *rctx;
0328 
0329     rctx = skcipher_request_ctx(req);
0330     subreq = &rctx->subreq;
0331     subreq->base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG;
0332 
0333     err = do_decrypt(req, err ?: post_crypt(req));
0334     if (rctx->left)
0335         return;
0336 
0337     skcipher_request_complete(req, err);
0338 }
0339 
0340 static int decrypt(struct skcipher_request *req)
0341 {
0342     return do_decrypt(req, init_crypt(req, decrypt_done));
0343 }
0344 
0345 int xts_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst,
0346           struct scatterlist *ssrc, unsigned int nbytes,
0347           struct xts_crypt_req *req)
0348 {
0349     const unsigned int bsize = XTS_BLOCK_SIZE;
0350     const unsigned int max_blks = req->tbuflen / bsize;
0351     struct blkcipher_walk walk;
0352     unsigned int nblocks;
0353     be128 *src, *dst, *t;
0354     be128 *t_buf = req->tbuf;
0355     int err, i;
0356 
0357     BUG_ON(max_blks < 1);
0358 
0359     blkcipher_walk_init(&walk, sdst, ssrc, nbytes);
0360 
0361     err = blkcipher_walk_virt(desc, &walk);
0362     nbytes = walk.nbytes;
0363     if (!nbytes)
0364         return err;
0365 
0366     nblocks = min(nbytes / bsize, max_blks);
0367     src = (be128 *)walk.src.virt.addr;
0368     dst = (be128 *)walk.dst.virt.addr;
0369 
0370     /* calculate first value of T */
0371     req->tweak_fn(req->tweak_ctx, (u8 *)&t_buf[0], walk.iv);
0372 
0373     i = 0;
0374     goto first;
0375 
0376     for (;;) {
0377         do {
0378             for (i = 0; i < nblocks; i++) {
0379                 gf128mul_x_ble(&t_buf[i], t);
0380 first:
0381                 t = &t_buf[i];
0382 
0383                 /* PP <- T xor P */
0384                 be128_xor(dst + i, t, src + i);
0385             }
0386 
0387             /* CC <- E(Key2,PP) */
0388             req->crypt_fn(req->crypt_ctx, (u8 *)dst,
0389                       nblocks * bsize);
0390 
0391             /* C <- T xor CC */
0392             for (i = 0; i < nblocks; i++)
0393                 be128_xor(dst + i, dst + i, &t_buf[i]);
0394 
0395             src += nblocks;
0396             dst += nblocks;
0397             nbytes -= nblocks * bsize;
0398             nblocks = min(nbytes / bsize, max_blks);
0399         } while (nblocks > 0);
0400 
0401         *(be128 *)walk.iv = *t;
0402 
0403         err = blkcipher_walk_done(desc, &walk, nbytes);
0404         nbytes = walk.nbytes;
0405         if (!nbytes)
0406             break;
0407 
0408         nblocks = min(nbytes / bsize, max_blks);
0409         src = (be128 *)walk.src.virt.addr;
0410         dst = (be128 *)walk.dst.virt.addr;
0411     }
0412 
0413     return err;
0414 }
0415 EXPORT_SYMBOL_GPL(xts_crypt);
0416 
0417 static int init_tfm(struct crypto_skcipher *tfm)
0418 {
0419     struct skcipher_instance *inst = skcipher_alg_instance(tfm);
0420     struct xts_instance_ctx *ictx = skcipher_instance_ctx(inst);
0421     struct priv *ctx = crypto_skcipher_ctx(tfm);
0422     struct crypto_skcipher *child;
0423     struct crypto_cipher *tweak;
0424 
0425     child = crypto_spawn_skcipher(&ictx->spawn);
0426     if (IS_ERR(child))
0427         return PTR_ERR(child);
0428 
0429     ctx->child = child;
0430 
0431     tweak = crypto_alloc_cipher(ictx->name, 0, 0);
0432     if (IS_ERR(tweak)) {
0433         crypto_free_skcipher(ctx->child);
0434         return PTR_ERR(tweak);
0435     }
0436 
0437     ctx->tweak = tweak;
0438 
0439     crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(child) +
0440                      sizeof(struct rctx));
0441 
0442     return 0;
0443 }
0444 
0445 static void exit_tfm(struct crypto_skcipher *tfm)
0446 {
0447     struct priv *ctx = crypto_skcipher_ctx(tfm);
0448 
0449     crypto_free_skcipher(ctx->child);
0450     crypto_free_cipher(ctx->tweak);
0451 }
0452 
0453 static void free(struct skcipher_instance *inst)
0454 {
0455     crypto_drop_skcipher(skcipher_instance_ctx(inst));
0456     kfree(inst);
0457 }
0458 
0459 static int create(struct crypto_template *tmpl, struct rtattr **tb)
0460 {
0461     struct skcipher_instance *inst;
0462     struct crypto_attr_type *algt;
0463     struct xts_instance_ctx *ctx;
0464     struct skcipher_alg *alg;
0465     const char *cipher_name;
0466     int err;
0467 
0468     algt = crypto_get_attr_type(tb);
0469     if (IS_ERR(algt))
0470         return PTR_ERR(algt);
0471 
0472     if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
0473         return -EINVAL;
0474 
0475     cipher_name = crypto_attr_alg_name(tb[1]);
0476     if (IS_ERR(cipher_name))
0477         return PTR_ERR(cipher_name);
0478 
0479     inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
0480     if (!inst)
0481         return -ENOMEM;
0482 
0483     ctx = skcipher_instance_ctx(inst);
0484 
0485     crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
0486     err = crypto_grab_skcipher(&ctx->spawn, cipher_name, 0,
0487                    crypto_requires_sync(algt->type,
0488                             algt->mask));
0489     if (err == -ENOENT) {
0490         err = -ENAMETOOLONG;
0491         if (snprintf(ctx->name, CRYPTO_MAX_ALG_NAME, "ecb(%s)",
0492                  cipher_name) >= CRYPTO_MAX_ALG_NAME)
0493             goto err_free_inst;
0494 
0495         err = crypto_grab_skcipher(&ctx->spawn, ctx->name, 0,
0496                        crypto_requires_sync(algt->type,
0497                                 algt->mask));
0498     }
0499 
0500     if (err)
0501         goto err_free_inst;
0502 
0503     alg = crypto_skcipher_spawn_alg(&ctx->spawn);
0504 
0505     err = -EINVAL;
0506     if (alg->base.cra_blocksize != XTS_BLOCK_SIZE)
0507         goto err_drop_spawn;
0508 
0509     if (crypto_skcipher_alg_ivsize(alg))
0510         goto err_drop_spawn;
0511 
0512     err = crypto_inst_setname(skcipher_crypto_instance(inst), "xts",
0513                   &alg->base);
0514     if (err)
0515         goto err_drop_spawn;
0516 
0517     err = -EINVAL;
0518     cipher_name = alg->base.cra_name;
0519 
0520     /* Alas we screwed up the naming so we have to mangle the
0521      * cipher name.
0522      */
0523     if (!strncmp(cipher_name, "ecb(", 4)) {
0524         unsigned len;
0525 
0526         len = strlcpy(ctx->name, cipher_name + 4, sizeof(ctx->name));
0527         if (len < 2 || len >= sizeof(ctx->name))
0528             goto err_drop_spawn;
0529 
0530         if (ctx->name[len - 1] != ')')
0531             goto err_drop_spawn;
0532 
0533         ctx->name[len - 1] = 0;
0534 
0535         if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
0536                  "xts(%s)", ctx->name) >= CRYPTO_MAX_ALG_NAME)
0537             return -ENAMETOOLONG;
0538     } else
0539         goto err_drop_spawn;
0540 
0541     inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
0542     inst->alg.base.cra_priority = alg->base.cra_priority;
0543     inst->alg.base.cra_blocksize = XTS_BLOCK_SIZE;
0544     inst->alg.base.cra_alignmask = alg->base.cra_alignmask |
0545                        (__alignof__(u64) - 1);
0546 
0547     inst->alg.ivsize = XTS_BLOCK_SIZE;
0548     inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) * 2;
0549     inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) * 2;
0550 
0551     inst->alg.base.cra_ctxsize = sizeof(struct priv);
0552 
0553     inst->alg.init = init_tfm;
0554     inst->alg.exit = exit_tfm;
0555 
0556     inst->alg.setkey = setkey;
0557     inst->alg.encrypt = encrypt;
0558     inst->alg.decrypt = decrypt;
0559 
0560     inst->free = free;
0561 
0562     err = skcipher_register_instance(tmpl, inst);
0563     if (err)
0564         goto err_drop_spawn;
0565 
0566 out:
0567     return err;
0568 
0569 err_drop_spawn:
0570     crypto_drop_skcipher(&ctx->spawn);
0571 err_free_inst:
0572     kfree(inst);
0573     goto out;
0574 }
0575 
0576 static struct crypto_template crypto_tmpl = {
0577     .name = "xts",
0578     .create = create,
0579     .module = THIS_MODULE,
0580 };
0581 
0582 static int __init crypto_module_init(void)
0583 {
0584     return crypto_register_template(&crypto_tmpl);
0585 }
0586 
0587 static void __exit crypto_module_exit(void)
0588 {
0589     crypto_unregister_template(&crypto_tmpl);
0590 }
0591 
0592 module_init(crypto_module_init);
0593 module_exit(crypto_module_exit);
0594 
0595 MODULE_LICENSE("GPL");
0596 MODULE_DESCRIPTION("XTS block cipher mode");
0597 MODULE_ALIAS_CRYPTO("xts");