0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043 #include <crypto/algapi.h>
0044 #include <crypto/internal/skcipher.h>
0045 #include <linux/err.h>
0046 #include <linux/init.h>
0047 #include <linux/kernel.h>
0048 #include <linux/log2.h>
0049 #include <linux/module.h>
0050 #include <linux/scatterlist.h>
0051 #include <crypto/scatterwalk.h>
0052 #include <linux/slab.h>
0053 #include <linux/compiler.h>
0054
0055 struct crypto_cts_ctx {
0056 struct crypto_skcipher *child;
0057 };
0058
0059 struct crypto_cts_reqctx {
0060 struct scatterlist sg[2];
0061 unsigned offset;
0062 struct skcipher_request subreq;
0063 };
0064
0065 static inline u8 *crypto_cts_reqctx_space(struct skcipher_request *req)
0066 {
0067 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
0068 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0069 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
0070 struct crypto_skcipher *child = ctx->child;
0071
0072 return PTR_ALIGN((u8 *)(rctx + 1) + crypto_skcipher_reqsize(child),
0073 crypto_skcipher_alignmask(tfm) + 1);
0074 }
0075
0076 static int crypto_cts_setkey(struct crypto_skcipher *parent, const u8 *key,
0077 unsigned int keylen)
0078 {
0079 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(parent);
0080 struct crypto_skcipher *child = ctx->child;
0081
0082 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
0083 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
0084 CRYPTO_TFM_REQ_MASK);
0085 return crypto_skcipher_setkey(child, key, keylen);
0086 }
0087
0088 static void cts_cbc_crypt_done(struct crypto_async_request *areq, int err)
0089 {
0090 struct skcipher_request *req = areq->data;
0091
0092 if (err == -EINPROGRESS)
0093 return;
0094
0095 skcipher_request_complete(req, err);
0096 }
0097
0098 static int cts_cbc_encrypt(struct skcipher_request *req)
0099 {
0100 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
0101 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0102 struct skcipher_request *subreq = &rctx->subreq;
0103 int bsize = crypto_skcipher_blocksize(tfm);
0104 u8 d[MAX_CIPHER_BLOCKSIZE * 2] __aligned(__alignof__(u32));
0105 struct scatterlist *sg;
0106 unsigned int offset;
0107 int lastn;
0108
0109 offset = rctx->offset;
0110 lastn = req->cryptlen - offset;
0111
0112 sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize);
0113 scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
0114
0115 memset(d, 0, bsize);
0116 scatterwalk_map_and_copy(d, req->src, offset, lastn, 0);
0117
0118 scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
0119 memzero_explicit(d, sizeof(d));
0120
0121 skcipher_request_set_callback(subreq, req->base.flags &
0122 CRYPTO_TFM_REQ_MAY_BACKLOG,
0123 cts_cbc_crypt_done, req);
0124 skcipher_request_set_crypt(subreq, sg, sg, bsize, req->iv);
0125 return crypto_skcipher_encrypt(subreq);
0126 }
0127
0128 static void crypto_cts_encrypt_done(struct crypto_async_request *areq, int err)
0129 {
0130 struct skcipher_request *req = areq->data;
0131
0132 if (err)
0133 goto out;
0134
0135 err = cts_cbc_encrypt(req);
0136 if (err == -EINPROGRESS || err == -EBUSY)
0137 return;
0138
0139 out:
0140 skcipher_request_complete(req, err);
0141 }
0142
0143 static int crypto_cts_encrypt(struct skcipher_request *req)
0144 {
0145 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0146 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
0147 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
0148 struct skcipher_request *subreq = &rctx->subreq;
0149 int bsize = crypto_skcipher_blocksize(tfm);
0150 unsigned int nbytes = req->cryptlen;
0151 unsigned int offset;
0152
0153 skcipher_request_set_tfm(subreq, ctx->child);
0154
0155 if (nbytes < bsize)
0156 return -EINVAL;
0157
0158 if (nbytes == bsize) {
0159 skcipher_request_set_callback(subreq, req->base.flags,
0160 req->base.complete,
0161 req->base.data);
0162 skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes,
0163 req->iv);
0164 return crypto_skcipher_encrypt(subreq);
0165 }
0166
0167 offset = rounddown(nbytes - 1, bsize);
0168 rctx->offset = offset;
0169
0170 skcipher_request_set_callback(subreq, req->base.flags,
0171 crypto_cts_encrypt_done, req);
0172 skcipher_request_set_crypt(subreq, req->src, req->dst,
0173 offset, req->iv);
0174
0175 return crypto_skcipher_encrypt(subreq) ?:
0176 cts_cbc_encrypt(req);
0177 }
0178
0179 static int cts_cbc_decrypt(struct skcipher_request *req)
0180 {
0181 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
0182 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0183 struct skcipher_request *subreq = &rctx->subreq;
0184 int bsize = crypto_skcipher_blocksize(tfm);
0185 u8 d[MAX_CIPHER_BLOCKSIZE * 2] __aligned(__alignof__(u32));
0186 struct scatterlist *sg;
0187 unsigned int offset;
0188 u8 *space;
0189 int lastn;
0190
0191 offset = rctx->offset;
0192 lastn = req->cryptlen - offset;
0193
0194 sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize);
0195
0196
0197 scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
0198 space = crypto_cts_reqctx_space(req);
0199 crypto_xor(d + bsize, space, bsize);
0200
0201 memset(d, 0, bsize);
0202 scatterwalk_map_and_copy(d, req->src, offset, lastn, 0);
0203
0204
0205 crypto_xor(d + bsize, d, lastn);
0206
0207
0208 memcpy(d + lastn, d + bsize + lastn, bsize - lastn);
0209
0210
0211 scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
0212 memzero_explicit(d, sizeof(d));
0213
0214 skcipher_request_set_callback(subreq, req->base.flags &
0215 CRYPTO_TFM_REQ_MAY_BACKLOG,
0216 cts_cbc_crypt_done, req);
0217
0218 skcipher_request_set_crypt(subreq, sg, sg, bsize, space);
0219 return crypto_skcipher_decrypt(subreq);
0220 }
0221
0222 static void crypto_cts_decrypt_done(struct crypto_async_request *areq, int err)
0223 {
0224 struct skcipher_request *req = areq->data;
0225
0226 if (err)
0227 goto out;
0228
0229 err = cts_cbc_decrypt(req);
0230 if (err == -EINPROGRESS || err == -EBUSY)
0231 return;
0232
0233 out:
0234 skcipher_request_complete(req, err);
0235 }
0236
0237 static int crypto_cts_decrypt(struct skcipher_request *req)
0238 {
0239 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0240 struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
0241 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
0242 struct skcipher_request *subreq = &rctx->subreq;
0243 int bsize = crypto_skcipher_blocksize(tfm);
0244 unsigned int nbytes = req->cryptlen;
0245 unsigned int offset;
0246 u8 *space;
0247
0248 skcipher_request_set_tfm(subreq, ctx->child);
0249
0250 if (nbytes < bsize)
0251 return -EINVAL;
0252
0253 if (nbytes == bsize) {
0254 skcipher_request_set_callback(subreq, req->base.flags,
0255 req->base.complete,
0256 req->base.data);
0257 skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes,
0258 req->iv);
0259 return crypto_skcipher_decrypt(subreq);
0260 }
0261
0262 skcipher_request_set_callback(subreq, req->base.flags,
0263 crypto_cts_decrypt_done, req);
0264
0265 space = crypto_cts_reqctx_space(req);
0266
0267 offset = rounddown(nbytes - 1, bsize);
0268 rctx->offset = offset;
0269
0270 if (offset <= bsize)
0271 memcpy(space, req->iv, bsize);
0272 else
0273 scatterwalk_map_and_copy(space, req->src, offset - 2 * bsize,
0274 bsize, 0);
0275
0276 skcipher_request_set_crypt(subreq, req->src, req->dst,
0277 offset, req->iv);
0278
0279 return crypto_skcipher_decrypt(subreq) ?:
0280 cts_cbc_decrypt(req);
0281 }
0282
0283 static int crypto_cts_init_tfm(struct crypto_skcipher *tfm)
0284 {
0285 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
0286 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
0287 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
0288 struct crypto_skcipher *cipher;
0289 unsigned reqsize;
0290 unsigned bsize;
0291 unsigned align;
0292
0293 cipher = crypto_spawn_skcipher(spawn);
0294 if (IS_ERR(cipher))
0295 return PTR_ERR(cipher);
0296
0297 ctx->child = cipher;
0298
0299 align = crypto_skcipher_alignmask(tfm);
0300 bsize = crypto_skcipher_blocksize(cipher);
0301 reqsize = ALIGN(sizeof(struct crypto_cts_reqctx) +
0302 crypto_skcipher_reqsize(cipher),
0303 crypto_tfm_ctx_alignment()) +
0304 (align & ~(crypto_tfm_ctx_alignment() - 1)) + bsize;
0305
0306 crypto_skcipher_set_reqsize(tfm, reqsize);
0307
0308 return 0;
0309 }
0310
0311 static void crypto_cts_exit_tfm(struct crypto_skcipher *tfm)
0312 {
0313 struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
0314
0315 crypto_free_skcipher(ctx->child);
0316 }
0317
0318 static void crypto_cts_free(struct skcipher_instance *inst)
0319 {
0320 crypto_drop_skcipher(skcipher_instance_ctx(inst));
0321 kfree(inst);
0322 }
0323
0324 static int crypto_cts_create(struct crypto_template *tmpl, struct rtattr **tb)
0325 {
0326 struct crypto_skcipher_spawn *spawn;
0327 struct skcipher_instance *inst;
0328 struct skcipher_alg *alg;
0329 u32 mask;
0330 int err;
0331
0332 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
0333 if (err)
0334 return err;
0335
0336 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
0337 if (!inst)
0338 return -ENOMEM;
0339
0340 spawn = skcipher_instance_ctx(inst);
0341
0342 err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst),
0343 crypto_attr_alg_name(tb[1]), 0, mask);
0344 if (err)
0345 goto err_free_inst;
0346
0347 alg = crypto_spawn_skcipher_alg(spawn);
0348
0349 err = -EINVAL;
0350 if (crypto_skcipher_alg_ivsize(alg) != alg->base.cra_blocksize)
0351 goto err_free_inst;
0352
0353 if (strncmp(alg->base.cra_name, "cbc(", 4))
0354 goto err_free_inst;
0355
0356 err = crypto_inst_setname(skcipher_crypto_instance(inst), "cts",
0357 &alg->base);
0358 if (err)
0359 goto err_free_inst;
0360
0361 inst->alg.base.cra_priority = alg->base.cra_priority;
0362 inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
0363 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
0364
0365 inst->alg.ivsize = alg->base.cra_blocksize;
0366 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
0367 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
0368 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
0369
0370 inst->alg.base.cra_ctxsize = sizeof(struct crypto_cts_ctx);
0371
0372 inst->alg.init = crypto_cts_init_tfm;
0373 inst->alg.exit = crypto_cts_exit_tfm;
0374
0375 inst->alg.setkey = crypto_cts_setkey;
0376 inst->alg.encrypt = crypto_cts_encrypt;
0377 inst->alg.decrypt = crypto_cts_decrypt;
0378
0379 inst->free = crypto_cts_free;
0380
0381 err = skcipher_register_instance(tmpl, inst);
0382 if (err) {
0383 err_free_inst:
0384 crypto_cts_free(inst);
0385 }
0386 return err;
0387 }
0388
0389 static struct crypto_template crypto_cts_tmpl = {
0390 .name = "cts",
0391 .create = crypto_cts_create,
0392 .module = THIS_MODULE,
0393 };
0394
0395 static int __init crypto_cts_module_init(void)
0396 {
0397 return crypto_register_template(&crypto_cts_tmpl);
0398 }
0399
0400 static void __exit crypto_cts_module_exit(void)
0401 {
0402 crypto_unregister_template(&crypto_cts_tmpl);
0403 }
0404
0405 subsys_initcall(crypto_cts_module_init);
0406 module_exit(crypto_cts_module_exit);
0407
0408 MODULE_LICENSE("Dual BSD/GPL");
0409 MODULE_DESCRIPTION("CTS-CBC CipherText Stealing for CBC");
0410 MODULE_ALIAS_CRYPTO("cts");