0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033 #include <crypto/b128ops.h>
0034 #include <crypto/chacha.h>
0035 #include <crypto/internal/cipher.h>
0036 #include <crypto/internal/hash.h>
0037 #include <crypto/internal/poly1305.h>
0038 #include <crypto/internal/skcipher.h>
0039 #include <crypto/nhpoly1305.h>
0040 #include <crypto/scatterwalk.h>
0041 #include <linux/module.h>
0042
0043
0044
0045
0046
0047 #define BLOCKCIPHER_BLOCK_SIZE 16
0048
0049
0050 #define BLOCKCIPHER_KEY_SIZE 32
0051
0052
0053 #define HASH_KEY_SIZE (POLY1305_BLOCK_SIZE + NHPOLY1305_KEY_SIZE)
0054
0055
0056
0057
0058
0059
0060
0061
0062 #define TWEAK_SIZE 32
0063
0064 struct adiantum_instance_ctx {
0065 struct crypto_skcipher_spawn streamcipher_spawn;
0066 struct crypto_cipher_spawn blockcipher_spawn;
0067 struct crypto_shash_spawn hash_spawn;
0068 };
0069
0070 struct adiantum_tfm_ctx {
0071 struct crypto_skcipher *streamcipher;
0072 struct crypto_cipher *blockcipher;
0073 struct crypto_shash *hash;
0074 struct poly1305_core_key header_hash_key;
0075 };
0076
0077 struct adiantum_request_ctx {
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087 union {
0088 u8 bytes[XCHACHA_IV_SIZE];
0089 __le32 words[XCHACHA_IV_SIZE / sizeof(__le32)];
0090 le128 bignum;
0091 } rbuf;
0092
0093 bool enc;
0094
0095
0096
0097
0098
0099 le128 header_hash;
0100
0101
0102 union {
0103 struct shash_desc hash_desc;
0104 struct skcipher_request streamcipher_req;
0105 } u;
0106 };
0107
0108
0109
0110
0111
0112
0113
0114
0115
0116
0117 static int adiantum_setkey(struct crypto_skcipher *tfm, const u8 *key,
0118 unsigned int keylen)
0119 {
0120 struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0121 struct {
0122 u8 iv[XCHACHA_IV_SIZE];
0123 u8 derived_keys[BLOCKCIPHER_KEY_SIZE + HASH_KEY_SIZE];
0124 struct scatterlist sg;
0125 struct crypto_wait wait;
0126 struct skcipher_request req;
0127 } *data;
0128 u8 *keyp;
0129 int err;
0130
0131
0132 crypto_skcipher_clear_flags(tctx->streamcipher, CRYPTO_TFM_REQ_MASK);
0133 crypto_skcipher_set_flags(tctx->streamcipher,
0134 crypto_skcipher_get_flags(tfm) &
0135 CRYPTO_TFM_REQ_MASK);
0136 err = crypto_skcipher_setkey(tctx->streamcipher, key, keylen);
0137 if (err)
0138 return err;
0139
0140
0141 data = kzalloc(sizeof(*data) +
0142 crypto_skcipher_reqsize(tctx->streamcipher), GFP_KERNEL);
0143 if (!data)
0144 return -ENOMEM;
0145 data->iv[0] = 1;
0146 sg_init_one(&data->sg, data->derived_keys, sizeof(data->derived_keys));
0147 crypto_init_wait(&data->wait);
0148 skcipher_request_set_tfm(&data->req, tctx->streamcipher);
0149 skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
0150 CRYPTO_TFM_REQ_MAY_BACKLOG,
0151 crypto_req_done, &data->wait);
0152 skcipher_request_set_crypt(&data->req, &data->sg, &data->sg,
0153 sizeof(data->derived_keys), data->iv);
0154 err = crypto_wait_req(crypto_skcipher_encrypt(&data->req), &data->wait);
0155 if (err)
0156 goto out;
0157 keyp = data->derived_keys;
0158
0159
0160 crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK);
0161 crypto_cipher_set_flags(tctx->blockcipher,
0162 crypto_skcipher_get_flags(tfm) &
0163 CRYPTO_TFM_REQ_MASK);
0164 err = crypto_cipher_setkey(tctx->blockcipher, keyp,
0165 BLOCKCIPHER_KEY_SIZE);
0166 if (err)
0167 goto out;
0168 keyp += BLOCKCIPHER_KEY_SIZE;
0169
0170
0171 poly1305_core_setkey(&tctx->header_hash_key, keyp);
0172 keyp += POLY1305_BLOCK_SIZE;
0173
0174 crypto_shash_clear_flags(tctx->hash, CRYPTO_TFM_REQ_MASK);
0175 crypto_shash_set_flags(tctx->hash, crypto_skcipher_get_flags(tfm) &
0176 CRYPTO_TFM_REQ_MASK);
0177 err = crypto_shash_setkey(tctx->hash, keyp, NHPOLY1305_KEY_SIZE);
0178 keyp += NHPOLY1305_KEY_SIZE;
0179 WARN_ON(keyp != &data->derived_keys[ARRAY_SIZE(data->derived_keys)]);
0180 out:
0181 kfree_sensitive(data);
0182 return err;
0183 }
0184
0185
0186 static inline void le128_add(le128 *r, const le128 *v1, const le128 *v2)
0187 {
0188 u64 x = le64_to_cpu(v1->b);
0189 u64 y = le64_to_cpu(v2->b);
0190
0191 r->b = cpu_to_le64(x + y);
0192 r->a = cpu_to_le64(le64_to_cpu(v1->a) + le64_to_cpu(v2->a) +
0193 (x + y < x));
0194 }
0195
0196
0197 static inline void le128_sub(le128 *r, const le128 *v1, const le128 *v2)
0198 {
0199 u64 x = le64_to_cpu(v1->b);
0200 u64 y = le64_to_cpu(v2->b);
0201
0202 r->b = cpu_to_le64(x - y);
0203 r->a = cpu_to_le64(le64_to_cpu(v1->a) - le64_to_cpu(v2->a) -
0204 (x - y > x));
0205 }
0206
0207
0208
0209
0210
0211
0212
0213
0214
0215
0216
0217
0218
0219 static void adiantum_hash_header(struct skcipher_request *req)
0220 {
0221 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0222 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0223 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req);
0224 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
0225 struct {
0226 __le64 message_bits;
0227 __le64 padding;
0228 } header = {
0229 .message_bits = cpu_to_le64((u64)bulk_len * 8)
0230 };
0231 struct poly1305_state state;
0232
0233 poly1305_core_init(&state);
0234
0235 BUILD_BUG_ON(sizeof(header) % POLY1305_BLOCK_SIZE != 0);
0236 poly1305_core_blocks(&state, &tctx->header_hash_key,
0237 &header, sizeof(header) / POLY1305_BLOCK_SIZE, 1);
0238
0239 BUILD_BUG_ON(TWEAK_SIZE % POLY1305_BLOCK_SIZE != 0);
0240 poly1305_core_blocks(&state, &tctx->header_hash_key, req->iv,
0241 TWEAK_SIZE / POLY1305_BLOCK_SIZE, 1);
0242
0243 poly1305_core_emit(&state, NULL, &rctx->header_hash);
0244 }
0245
0246
0247 static int adiantum_hash_message(struct skcipher_request *req,
0248 struct scatterlist *sgl, le128 *digest)
0249 {
0250 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0251 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0252 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req);
0253 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
0254 struct shash_desc *hash_desc = &rctx->u.hash_desc;
0255 struct sg_mapping_iter miter;
0256 unsigned int i, n;
0257 int err;
0258
0259 hash_desc->tfm = tctx->hash;
0260
0261 err = crypto_shash_init(hash_desc);
0262 if (err)
0263 return err;
0264
0265 sg_miter_start(&miter, sgl, sg_nents(sgl),
0266 SG_MITER_FROM_SG | SG_MITER_ATOMIC);
0267 for (i = 0; i < bulk_len; i += n) {
0268 sg_miter_next(&miter);
0269 n = min_t(unsigned int, miter.length, bulk_len - i);
0270 err = crypto_shash_update(hash_desc, miter.addr, n);
0271 if (err)
0272 break;
0273 }
0274 sg_miter_stop(&miter);
0275 if (err)
0276 return err;
0277
0278 return crypto_shash_final(hash_desc, (u8 *)digest);
0279 }
0280
0281
0282 static int adiantum_finish(struct skcipher_request *req)
0283 {
0284 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0285 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0286 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req);
0287 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
0288 le128 digest;
0289 int err;
0290
0291
0292 if (!rctx->enc)
0293 crypto_cipher_decrypt_one(tctx->blockcipher, rctx->rbuf.bytes,
0294 rctx->rbuf.bytes);
0295
0296
0297
0298
0299
0300
0301 err = adiantum_hash_message(req, req->dst, &digest);
0302 if (err)
0303 return err;
0304 le128_add(&digest, &digest, &rctx->header_hash);
0305 le128_sub(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest);
0306 scatterwalk_map_and_copy(&rctx->rbuf.bignum, req->dst,
0307 bulk_len, BLOCKCIPHER_BLOCK_SIZE, 1);
0308 return 0;
0309 }
0310
0311 static void adiantum_streamcipher_done(struct crypto_async_request *areq,
0312 int err)
0313 {
0314 struct skcipher_request *req = areq->data;
0315
0316 if (!err)
0317 err = adiantum_finish(req);
0318
0319 skcipher_request_complete(req, err);
0320 }
0321
0322 static int adiantum_crypt(struct skcipher_request *req, bool enc)
0323 {
0324 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0325 const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0326 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req);
0327 const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
0328 unsigned int stream_len;
0329 le128 digest;
0330 int err;
0331
0332 if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE)
0333 return -EINVAL;
0334
0335 rctx->enc = enc;
0336
0337
0338
0339
0340
0341
0342 adiantum_hash_header(req);
0343 err = adiantum_hash_message(req, req->src, &digest);
0344 if (err)
0345 return err;
0346 le128_add(&digest, &digest, &rctx->header_hash);
0347 scatterwalk_map_and_copy(&rctx->rbuf.bignum, req->src,
0348 bulk_len, BLOCKCIPHER_BLOCK_SIZE, 0);
0349 le128_add(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest);
0350
0351
0352 if (enc)
0353 crypto_cipher_encrypt_one(tctx->blockcipher, rctx->rbuf.bytes,
0354 rctx->rbuf.bytes);
0355
0356
0357 BUILD_BUG_ON(BLOCKCIPHER_BLOCK_SIZE != 16);
0358 BUILD_BUG_ON(XCHACHA_IV_SIZE != 32);
0359 rctx->rbuf.words[4] = cpu_to_le32(1);
0360 rctx->rbuf.words[5] = 0;
0361 rctx->rbuf.words[6] = 0;
0362 rctx->rbuf.words[7] = 0;
0363
0364
0365
0366
0367
0368
0369
0370
0371
0372
0373 stream_len = bulk_len;
0374 if (round_up(stream_len, CHACHA_BLOCK_SIZE) <= req->cryptlen)
0375 stream_len = round_up(stream_len, CHACHA_BLOCK_SIZE);
0376
0377 skcipher_request_set_tfm(&rctx->u.streamcipher_req, tctx->streamcipher);
0378 skcipher_request_set_crypt(&rctx->u.streamcipher_req, req->src,
0379 req->dst, stream_len, &rctx->rbuf);
0380 skcipher_request_set_callback(&rctx->u.streamcipher_req,
0381 req->base.flags,
0382 adiantum_streamcipher_done, req);
0383 return crypto_skcipher_encrypt(&rctx->u.streamcipher_req) ?:
0384 adiantum_finish(req);
0385 }
0386
0387 static int adiantum_encrypt(struct skcipher_request *req)
0388 {
0389 return adiantum_crypt(req, true);
0390 }
0391
0392 static int adiantum_decrypt(struct skcipher_request *req)
0393 {
0394 return adiantum_crypt(req, false);
0395 }
0396
0397 static int adiantum_init_tfm(struct crypto_skcipher *tfm)
0398 {
0399 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
0400 struct adiantum_instance_ctx *ictx = skcipher_instance_ctx(inst);
0401 struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0402 struct crypto_skcipher *streamcipher;
0403 struct crypto_cipher *blockcipher;
0404 struct crypto_shash *hash;
0405 unsigned int subreq_size;
0406 int err;
0407
0408 streamcipher = crypto_spawn_skcipher(&ictx->streamcipher_spawn);
0409 if (IS_ERR(streamcipher))
0410 return PTR_ERR(streamcipher);
0411
0412 blockcipher = crypto_spawn_cipher(&ictx->blockcipher_spawn);
0413 if (IS_ERR(blockcipher)) {
0414 err = PTR_ERR(blockcipher);
0415 goto err_free_streamcipher;
0416 }
0417
0418 hash = crypto_spawn_shash(&ictx->hash_spawn);
0419 if (IS_ERR(hash)) {
0420 err = PTR_ERR(hash);
0421 goto err_free_blockcipher;
0422 }
0423
0424 tctx->streamcipher = streamcipher;
0425 tctx->blockcipher = blockcipher;
0426 tctx->hash = hash;
0427
0428 BUILD_BUG_ON(offsetofend(struct adiantum_request_ctx, u) !=
0429 sizeof(struct adiantum_request_ctx));
0430 subreq_size = max(sizeof_field(struct adiantum_request_ctx,
0431 u.hash_desc) +
0432 crypto_shash_descsize(hash),
0433 sizeof_field(struct adiantum_request_ctx,
0434 u.streamcipher_req) +
0435 crypto_skcipher_reqsize(streamcipher));
0436
0437 crypto_skcipher_set_reqsize(tfm,
0438 offsetof(struct adiantum_request_ctx, u) +
0439 subreq_size);
0440 return 0;
0441
0442 err_free_blockcipher:
0443 crypto_free_cipher(blockcipher);
0444 err_free_streamcipher:
0445 crypto_free_skcipher(streamcipher);
0446 return err;
0447 }
0448
0449 static void adiantum_exit_tfm(struct crypto_skcipher *tfm)
0450 {
0451 struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0452
0453 crypto_free_skcipher(tctx->streamcipher);
0454 crypto_free_cipher(tctx->blockcipher);
0455 crypto_free_shash(tctx->hash);
0456 }
0457
0458 static void adiantum_free_instance(struct skcipher_instance *inst)
0459 {
0460 struct adiantum_instance_ctx *ictx = skcipher_instance_ctx(inst);
0461
0462 crypto_drop_skcipher(&ictx->streamcipher_spawn);
0463 crypto_drop_cipher(&ictx->blockcipher_spawn);
0464 crypto_drop_shash(&ictx->hash_spawn);
0465 kfree(inst);
0466 }
0467
0468
0469
0470
0471
0472 static bool adiantum_supported_algorithms(struct skcipher_alg *streamcipher_alg,
0473 struct crypto_alg *blockcipher_alg,
0474 struct shash_alg *hash_alg)
0475 {
0476 if (strcmp(streamcipher_alg->base.cra_name, "xchacha12") != 0 &&
0477 strcmp(streamcipher_alg->base.cra_name, "xchacha20") != 0)
0478 return false;
0479
0480 if (blockcipher_alg->cra_cipher.cia_min_keysize > BLOCKCIPHER_KEY_SIZE ||
0481 blockcipher_alg->cra_cipher.cia_max_keysize < BLOCKCIPHER_KEY_SIZE)
0482 return false;
0483 if (blockcipher_alg->cra_blocksize != BLOCKCIPHER_BLOCK_SIZE)
0484 return false;
0485
0486 if (strcmp(hash_alg->base.cra_name, "nhpoly1305") != 0)
0487 return false;
0488
0489 return true;
0490 }
0491
0492 static int adiantum_create(struct crypto_template *tmpl, struct rtattr **tb)
0493 {
0494 u32 mask;
0495 const char *nhpoly1305_name;
0496 struct skcipher_instance *inst;
0497 struct adiantum_instance_ctx *ictx;
0498 struct skcipher_alg *streamcipher_alg;
0499 struct crypto_alg *blockcipher_alg;
0500 struct shash_alg *hash_alg;
0501 int err;
0502
0503 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
0504 if (err)
0505 return err;
0506
0507 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
0508 if (!inst)
0509 return -ENOMEM;
0510 ictx = skcipher_instance_ctx(inst);
0511
0512
0513 err = crypto_grab_skcipher(&ictx->streamcipher_spawn,
0514 skcipher_crypto_instance(inst),
0515 crypto_attr_alg_name(tb[1]), 0, mask);
0516 if (err)
0517 goto err_free_inst;
0518 streamcipher_alg = crypto_spawn_skcipher_alg(&ictx->streamcipher_spawn);
0519
0520
0521 err = crypto_grab_cipher(&ictx->blockcipher_spawn,
0522 skcipher_crypto_instance(inst),
0523 crypto_attr_alg_name(tb[2]), 0, mask);
0524 if (err)
0525 goto err_free_inst;
0526 blockcipher_alg = crypto_spawn_cipher_alg(&ictx->blockcipher_spawn);
0527
0528
0529 nhpoly1305_name = crypto_attr_alg_name(tb[3]);
0530 if (nhpoly1305_name == ERR_PTR(-ENOENT))
0531 nhpoly1305_name = "nhpoly1305";
0532 err = crypto_grab_shash(&ictx->hash_spawn,
0533 skcipher_crypto_instance(inst),
0534 nhpoly1305_name, 0, mask);
0535 if (err)
0536 goto err_free_inst;
0537 hash_alg = crypto_spawn_shash_alg(&ictx->hash_spawn);
0538
0539
0540 if (!adiantum_supported_algorithms(streamcipher_alg, blockcipher_alg,
0541 hash_alg)) {
0542 pr_warn("Unsupported Adiantum instantiation: (%s,%s,%s)\n",
0543 streamcipher_alg->base.cra_name,
0544 blockcipher_alg->cra_name, hash_alg->base.cra_name);
0545 err = -EINVAL;
0546 goto err_free_inst;
0547 }
0548
0549
0550
0551 err = -ENAMETOOLONG;
0552 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
0553 "adiantum(%s,%s)", streamcipher_alg->base.cra_name,
0554 blockcipher_alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
0555 goto err_free_inst;
0556 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
0557 "adiantum(%s,%s,%s)",
0558 streamcipher_alg->base.cra_driver_name,
0559 blockcipher_alg->cra_driver_name,
0560 hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
0561 goto err_free_inst;
0562
0563 inst->alg.base.cra_blocksize = BLOCKCIPHER_BLOCK_SIZE;
0564 inst->alg.base.cra_ctxsize = sizeof(struct adiantum_tfm_ctx);
0565 inst->alg.base.cra_alignmask = streamcipher_alg->base.cra_alignmask |
0566 hash_alg->base.cra_alignmask;
0567
0568
0569
0570
0571
0572
0573 inst->alg.base.cra_priority = (4 * streamcipher_alg->base.cra_priority +
0574 2 * hash_alg->base.cra_priority +
0575 blockcipher_alg->cra_priority) / 7;
0576
0577 inst->alg.setkey = adiantum_setkey;
0578 inst->alg.encrypt = adiantum_encrypt;
0579 inst->alg.decrypt = adiantum_decrypt;
0580 inst->alg.init = adiantum_init_tfm;
0581 inst->alg.exit = adiantum_exit_tfm;
0582 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(streamcipher_alg);
0583 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(streamcipher_alg);
0584 inst->alg.ivsize = TWEAK_SIZE;
0585
0586 inst->free = adiantum_free_instance;
0587
0588 err = skcipher_register_instance(tmpl, inst);
0589 if (err) {
0590 err_free_inst:
0591 adiantum_free_instance(inst);
0592 }
0593 return err;
0594 }
0595
0596
0597 static struct crypto_template adiantum_tmpl = {
0598 .name = "adiantum",
0599 .create = adiantum_create,
0600 .module = THIS_MODULE,
0601 };
0602
0603 static int __init adiantum_module_init(void)
0604 {
0605 return crypto_register_template(&adiantum_tmpl);
0606 }
0607
0608 static void __exit adiantum_module_exit(void)
0609 {
0610 crypto_unregister_template(&adiantum_tmpl);
0611 }
0612
0613 subsys_initcall(adiantum_module_init);
0614 module_exit(adiantum_module_exit);
0615
0616 MODULE_DESCRIPTION("Adiantum length-preserving encryption mode");
0617 MODULE_LICENSE("GPL v2");
0618 MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>");
0619 MODULE_ALIAS_CRYPTO("adiantum");
0620 MODULE_IMPORT_NS(CRYPTO_INTERNAL);