Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-or-later
0002 /* XTS: as defined in IEEE1619/D16
0003  *  http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
0004  *
0005  * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
0006  *
0007  * Based on ecb.c
0008  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
0009  */
0010 #include <crypto/internal/cipher.h>
0011 #include <crypto/internal/skcipher.h>
0012 #include <crypto/scatterwalk.h>
0013 #include <linux/err.h>
0014 #include <linux/init.h>
0015 #include <linux/kernel.h>
0016 #include <linux/module.h>
0017 #include <linux/scatterlist.h>
0018 #include <linux/slab.h>
0019 
0020 #include <crypto/xts.h>
0021 #include <crypto/b128ops.h>
0022 #include <crypto/gf128mul.h>
0023 
0024 struct xts_tfm_ctx {
0025     struct crypto_skcipher *child;
0026     struct crypto_cipher *tweak;
0027 };
0028 
0029 struct xts_instance_ctx {
0030     struct crypto_skcipher_spawn spawn;
0031     char name[CRYPTO_MAX_ALG_NAME];
0032 };
0033 
0034 struct xts_request_ctx {
0035     le128 t;
0036     struct scatterlist *tail;
0037     struct scatterlist sg[2];
0038     struct skcipher_request subreq;
0039 };
0040 
0041 static int xts_setkey(struct crypto_skcipher *parent, const u8 *key,
0042               unsigned int keylen)
0043 {
0044     struct xts_tfm_ctx *ctx = crypto_skcipher_ctx(parent);
0045     struct crypto_skcipher *child;
0046     struct crypto_cipher *tweak;
0047     int err;
0048 
0049     err = xts_verify_key(parent, key, keylen);
0050     if (err)
0051         return err;
0052 
0053     keylen /= 2;
0054 
0055     /* we need two cipher instances: one to compute the initial 'tweak'
0056      * by encrypting the IV (usually the 'plain' iv) and the other
0057      * one to encrypt and decrypt the data */
0058 
0059     /* tweak cipher, uses Key2 i.e. the second half of *key */
0060     tweak = ctx->tweak;
0061     crypto_cipher_clear_flags(tweak, CRYPTO_TFM_REQ_MASK);
0062     crypto_cipher_set_flags(tweak, crypto_skcipher_get_flags(parent) &
0063                        CRYPTO_TFM_REQ_MASK);
0064     err = crypto_cipher_setkey(tweak, key + keylen, keylen);
0065     if (err)
0066         return err;
0067 
0068     /* data cipher, uses Key1 i.e. the first half of *key */
0069     child = ctx->child;
0070     crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
0071     crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
0072                      CRYPTO_TFM_REQ_MASK);
0073     return crypto_skcipher_setkey(child, key, keylen);
0074 }
0075 
0076 /*
0077  * We compute the tweak masks twice (both before and after the ECB encryption or
0078  * decryption) to avoid having to allocate a temporary buffer and/or make
0079  * mutliple calls to the 'ecb(..)' instance, which usually would be slower than
0080  * just doing the gf128mul_x_ble() calls again.
0081  */
0082 static int xts_xor_tweak(struct skcipher_request *req, bool second_pass,
0083              bool enc)
0084 {
0085     struct xts_request_ctx *rctx = skcipher_request_ctx(req);
0086     struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0087     const bool cts = (req->cryptlen % XTS_BLOCK_SIZE);
0088     const int bs = XTS_BLOCK_SIZE;
0089     struct skcipher_walk w;
0090     le128 t = rctx->t;
0091     int err;
0092 
0093     if (second_pass) {
0094         req = &rctx->subreq;
0095         /* set to our TFM to enforce correct alignment: */
0096         skcipher_request_set_tfm(req, tfm);
0097     }
0098     err = skcipher_walk_virt(&w, req, false);
0099 
0100     while (w.nbytes) {
0101         unsigned int avail = w.nbytes;
0102         le128 *wsrc;
0103         le128 *wdst;
0104 
0105         wsrc = w.src.virt.addr;
0106         wdst = w.dst.virt.addr;
0107 
0108         do {
0109             if (unlikely(cts) &&
0110                 w.total - w.nbytes + avail < 2 * XTS_BLOCK_SIZE) {
0111                 if (!enc) {
0112                     if (second_pass)
0113                         rctx->t = t;
0114                     gf128mul_x_ble(&t, &t);
0115                 }
0116                 le128_xor(wdst, &t, wsrc);
0117                 if (enc && second_pass)
0118                     gf128mul_x_ble(&rctx->t, &t);
0119                 skcipher_walk_done(&w, avail - bs);
0120                 return 0;
0121             }
0122 
0123             le128_xor(wdst++, &t, wsrc++);
0124             gf128mul_x_ble(&t, &t);
0125         } while ((avail -= bs) >= bs);
0126 
0127         err = skcipher_walk_done(&w, avail);
0128     }
0129 
0130     return err;
0131 }
0132 
0133 static int xts_xor_tweak_pre(struct skcipher_request *req, bool enc)
0134 {
0135     return xts_xor_tweak(req, false, enc);
0136 }
0137 
0138 static int xts_xor_tweak_post(struct skcipher_request *req, bool enc)
0139 {
0140     return xts_xor_tweak(req, true, enc);
0141 }
0142 
0143 static void xts_cts_done(struct crypto_async_request *areq, int err)
0144 {
0145     struct skcipher_request *req = areq->data;
0146     le128 b;
0147 
0148     if (!err) {
0149         struct xts_request_ctx *rctx = skcipher_request_ctx(req);
0150 
0151         scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
0152         le128_xor(&b, &rctx->t, &b);
0153         scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 1);
0154     }
0155 
0156     skcipher_request_complete(req, err);
0157 }
0158 
0159 static int xts_cts_final(struct skcipher_request *req,
0160              int (*crypt)(struct skcipher_request *req))
0161 {
0162     const struct xts_tfm_ctx *ctx =
0163         crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
0164     int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1);
0165     struct xts_request_ctx *rctx = skcipher_request_ctx(req);
0166     struct skcipher_request *subreq = &rctx->subreq;
0167     int tail = req->cryptlen % XTS_BLOCK_SIZE;
0168     le128 b[2];
0169     int err;
0170 
0171     rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst,
0172                       offset - XTS_BLOCK_SIZE);
0173 
0174     scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
0175     b[1] = b[0];
0176     scatterwalk_map_and_copy(b, req->src, offset, tail, 0);
0177 
0178     le128_xor(b, &rctx->t, b);
0179 
0180     scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE + tail, 1);
0181 
0182     skcipher_request_set_tfm(subreq, ctx->child);
0183     skcipher_request_set_callback(subreq, req->base.flags, xts_cts_done,
0184                       req);
0185     skcipher_request_set_crypt(subreq, rctx->tail, rctx->tail,
0186                    XTS_BLOCK_SIZE, NULL);
0187 
0188     err = crypt(subreq);
0189     if (err)
0190         return err;
0191 
0192     scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
0193     le128_xor(b, &rctx->t, b);
0194     scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 1);
0195 
0196     return 0;
0197 }
0198 
0199 static void xts_encrypt_done(struct crypto_async_request *areq, int err)
0200 {
0201     struct skcipher_request *req = areq->data;
0202 
0203     if (!err) {
0204         struct xts_request_ctx *rctx = skcipher_request_ctx(req);
0205 
0206         rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
0207         err = xts_xor_tweak_post(req, true);
0208 
0209         if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
0210             err = xts_cts_final(req, crypto_skcipher_encrypt);
0211             if (err == -EINPROGRESS)
0212                 return;
0213         }
0214     }
0215 
0216     skcipher_request_complete(req, err);
0217 }
0218 
0219 static void xts_decrypt_done(struct crypto_async_request *areq, int err)
0220 {
0221     struct skcipher_request *req = areq->data;
0222 
0223     if (!err) {
0224         struct xts_request_ctx *rctx = skcipher_request_ctx(req);
0225 
0226         rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
0227         err = xts_xor_tweak_post(req, false);
0228 
0229         if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
0230             err = xts_cts_final(req, crypto_skcipher_decrypt);
0231             if (err == -EINPROGRESS)
0232                 return;
0233         }
0234     }
0235 
0236     skcipher_request_complete(req, err);
0237 }
0238 
0239 static int xts_init_crypt(struct skcipher_request *req,
0240               crypto_completion_t compl)
0241 {
0242     const struct xts_tfm_ctx *ctx =
0243         crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
0244     struct xts_request_ctx *rctx = skcipher_request_ctx(req);
0245     struct skcipher_request *subreq = &rctx->subreq;
0246 
0247     if (req->cryptlen < XTS_BLOCK_SIZE)
0248         return -EINVAL;
0249 
0250     skcipher_request_set_tfm(subreq, ctx->child);
0251     skcipher_request_set_callback(subreq, req->base.flags, compl, req);
0252     skcipher_request_set_crypt(subreq, req->dst, req->dst,
0253                    req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL);
0254 
0255     /* calculate first value of T */
0256     crypto_cipher_encrypt_one(ctx->tweak, (u8 *)&rctx->t, req->iv);
0257 
0258     return 0;
0259 }
0260 
0261 static int xts_encrypt(struct skcipher_request *req)
0262 {
0263     struct xts_request_ctx *rctx = skcipher_request_ctx(req);
0264     struct skcipher_request *subreq = &rctx->subreq;
0265     int err;
0266 
0267     err = xts_init_crypt(req, xts_encrypt_done) ?:
0268           xts_xor_tweak_pre(req, true) ?:
0269           crypto_skcipher_encrypt(subreq) ?:
0270           xts_xor_tweak_post(req, true);
0271 
0272     if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
0273         return err;
0274 
0275     return xts_cts_final(req, crypto_skcipher_encrypt);
0276 }
0277 
0278 static int xts_decrypt(struct skcipher_request *req)
0279 {
0280     struct xts_request_ctx *rctx = skcipher_request_ctx(req);
0281     struct skcipher_request *subreq = &rctx->subreq;
0282     int err;
0283 
0284     err = xts_init_crypt(req, xts_decrypt_done) ?:
0285           xts_xor_tweak_pre(req, false) ?:
0286           crypto_skcipher_decrypt(subreq) ?:
0287           xts_xor_tweak_post(req, false);
0288 
0289     if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
0290         return err;
0291 
0292     return xts_cts_final(req, crypto_skcipher_decrypt);
0293 }
0294 
0295 static int xts_init_tfm(struct crypto_skcipher *tfm)
0296 {
0297     struct skcipher_instance *inst = skcipher_alg_instance(tfm);
0298     struct xts_instance_ctx *ictx = skcipher_instance_ctx(inst);
0299     struct xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
0300     struct crypto_skcipher *child;
0301     struct crypto_cipher *tweak;
0302 
0303     child = crypto_spawn_skcipher(&ictx->spawn);
0304     if (IS_ERR(child))
0305         return PTR_ERR(child);
0306 
0307     ctx->child = child;
0308 
0309     tweak = crypto_alloc_cipher(ictx->name, 0, 0);
0310     if (IS_ERR(tweak)) {
0311         crypto_free_skcipher(ctx->child);
0312         return PTR_ERR(tweak);
0313     }
0314 
0315     ctx->tweak = tweak;
0316 
0317     crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(child) +
0318                      sizeof(struct xts_request_ctx));
0319 
0320     return 0;
0321 }
0322 
0323 static void xts_exit_tfm(struct crypto_skcipher *tfm)
0324 {
0325     struct xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
0326 
0327     crypto_free_skcipher(ctx->child);
0328     crypto_free_cipher(ctx->tweak);
0329 }
0330 
0331 static void xts_free_instance(struct skcipher_instance *inst)
0332 {
0333     struct xts_instance_ctx *ictx = skcipher_instance_ctx(inst);
0334 
0335     crypto_drop_skcipher(&ictx->spawn);
0336     kfree(inst);
0337 }
0338 
0339 static int xts_create(struct crypto_template *tmpl, struct rtattr **tb)
0340 {
0341     struct skcipher_instance *inst;
0342     struct xts_instance_ctx *ctx;
0343     struct skcipher_alg *alg;
0344     const char *cipher_name;
0345     u32 mask;
0346     int err;
0347 
0348     err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
0349     if (err)
0350         return err;
0351 
0352     cipher_name = crypto_attr_alg_name(tb[1]);
0353     if (IS_ERR(cipher_name))
0354         return PTR_ERR(cipher_name);
0355 
0356     inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
0357     if (!inst)
0358         return -ENOMEM;
0359 
0360     ctx = skcipher_instance_ctx(inst);
0361 
0362     err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst),
0363                    cipher_name, 0, mask);
0364     if (err == -ENOENT) {
0365         err = -ENAMETOOLONG;
0366         if (snprintf(ctx->name, CRYPTO_MAX_ALG_NAME, "ecb(%s)",
0367                  cipher_name) >= CRYPTO_MAX_ALG_NAME)
0368             goto err_free_inst;
0369 
0370         err = crypto_grab_skcipher(&ctx->spawn,
0371                        skcipher_crypto_instance(inst),
0372                        ctx->name, 0, mask);
0373     }
0374 
0375     if (err)
0376         goto err_free_inst;
0377 
0378     alg = crypto_skcipher_spawn_alg(&ctx->spawn);
0379 
0380     err = -EINVAL;
0381     if (alg->base.cra_blocksize != XTS_BLOCK_SIZE)
0382         goto err_free_inst;
0383 
0384     if (crypto_skcipher_alg_ivsize(alg))
0385         goto err_free_inst;
0386 
0387     err = crypto_inst_setname(skcipher_crypto_instance(inst), "xts",
0388                   &alg->base);
0389     if (err)
0390         goto err_free_inst;
0391 
0392     err = -EINVAL;
0393     cipher_name = alg->base.cra_name;
0394 
0395     /* Alas we screwed up the naming so we have to mangle the
0396      * cipher name.
0397      */
0398     if (!strncmp(cipher_name, "ecb(", 4)) {
0399         unsigned len;
0400 
0401         len = strlcpy(ctx->name, cipher_name + 4, sizeof(ctx->name));
0402         if (len < 2 || len >= sizeof(ctx->name))
0403             goto err_free_inst;
0404 
0405         if (ctx->name[len - 1] != ')')
0406             goto err_free_inst;
0407 
0408         ctx->name[len - 1] = 0;
0409 
0410         if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
0411                  "xts(%s)", ctx->name) >= CRYPTO_MAX_ALG_NAME) {
0412             err = -ENAMETOOLONG;
0413             goto err_free_inst;
0414         }
0415     } else
0416         goto err_free_inst;
0417 
0418     inst->alg.base.cra_priority = alg->base.cra_priority;
0419     inst->alg.base.cra_blocksize = XTS_BLOCK_SIZE;
0420     inst->alg.base.cra_alignmask = alg->base.cra_alignmask |
0421                        (__alignof__(u64) - 1);
0422 
0423     inst->alg.ivsize = XTS_BLOCK_SIZE;
0424     inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) * 2;
0425     inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) * 2;
0426 
0427     inst->alg.base.cra_ctxsize = sizeof(struct xts_tfm_ctx);
0428 
0429     inst->alg.init = xts_init_tfm;
0430     inst->alg.exit = xts_exit_tfm;
0431 
0432     inst->alg.setkey = xts_setkey;
0433     inst->alg.encrypt = xts_encrypt;
0434     inst->alg.decrypt = xts_decrypt;
0435 
0436     inst->free = xts_free_instance;
0437 
0438     err = skcipher_register_instance(tmpl, inst);
0439     if (err) {
0440 err_free_inst:
0441         xts_free_instance(inst);
0442     }
0443     return err;
0444 }
0445 
0446 static struct crypto_template xts_tmpl = {
0447     .name = "xts",
0448     .create = xts_create,
0449     .module = THIS_MODULE,
0450 };
0451 
0452 static int __init xts_module_init(void)
0453 {
0454     return crypto_register_template(&xts_tmpl);
0455 }
0456 
0457 static void __exit xts_module_exit(void)
0458 {
0459     crypto_unregister_template(&xts_tmpl);
0460 }
0461 
0462 subsys_initcall(xts_module_init);
0463 module_exit(xts_module_exit);
0464 
0465 MODULE_LICENSE("GPL");
0466 MODULE_DESCRIPTION("XTS block cipher mode");
0467 MODULE_ALIAS_CRYPTO("xts");
0468 MODULE_IMPORT_NS(CRYPTO_INTERNAL);
0469 MODULE_SOFTDEP("pre: ecb");