0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031 #include <crypto/authenc.h>
0032 #include <crypto/internal/aead.h>
0033 #include <crypto/internal/cipher.h>
0034 #include <crypto/internal/hash.h>
0035 #include <crypto/internal/skcipher.h>
0036 #include <crypto/scatterwalk.h>
0037 #include <linux/module.h>
0038
0039 #include "internal.h"
0040
0041 struct essiv_instance_ctx {
0042 union {
0043 struct crypto_skcipher_spawn skcipher_spawn;
0044 struct crypto_aead_spawn aead_spawn;
0045 } u;
0046 char essiv_cipher_name[CRYPTO_MAX_ALG_NAME];
0047 char shash_driver_name[CRYPTO_MAX_ALG_NAME];
0048 };
0049
0050 struct essiv_tfm_ctx {
0051 union {
0052 struct crypto_skcipher *skcipher;
0053 struct crypto_aead *aead;
0054 } u;
0055 struct crypto_cipher *essiv_cipher;
0056 struct crypto_shash *hash;
0057 int ivoffset;
0058 };
0059
0060 struct essiv_aead_request_ctx {
0061 struct scatterlist sg[4];
0062 u8 *assoc;
0063 struct aead_request aead_req;
0064 };
0065
0066 static int essiv_skcipher_setkey(struct crypto_skcipher *tfm,
0067 const u8 *key, unsigned int keylen)
0068 {
0069 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0070 u8 salt[HASH_MAX_DIGESTSIZE];
0071 int err;
0072
0073 crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK);
0074 crypto_skcipher_set_flags(tctx->u.skcipher,
0075 crypto_skcipher_get_flags(tfm) &
0076 CRYPTO_TFM_REQ_MASK);
0077 err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen);
0078 if (err)
0079 return err;
0080
0081 err = crypto_shash_tfm_digest(tctx->hash, key, keylen, salt);
0082 if (err)
0083 return err;
0084
0085 crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
0086 crypto_cipher_set_flags(tctx->essiv_cipher,
0087 crypto_skcipher_get_flags(tfm) &
0088 CRYPTO_TFM_REQ_MASK);
0089 return crypto_cipher_setkey(tctx->essiv_cipher, salt,
0090 crypto_shash_digestsize(tctx->hash));
0091 }
0092
0093 static int essiv_aead_setkey(struct crypto_aead *tfm, const u8 *key,
0094 unsigned int keylen)
0095 {
0096 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
0097 SHASH_DESC_ON_STACK(desc, tctx->hash);
0098 struct crypto_authenc_keys keys;
0099 u8 salt[HASH_MAX_DIGESTSIZE];
0100 int err;
0101
0102 crypto_aead_clear_flags(tctx->u.aead, CRYPTO_TFM_REQ_MASK);
0103 crypto_aead_set_flags(tctx->u.aead, crypto_aead_get_flags(tfm) &
0104 CRYPTO_TFM_REQ_MASK);
0105 err = crypto_aead_setkey(tctx->u.aead, key, keylen);
0106 if (err)
0107 return err;
0108
0109 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
0110 return -EINVAL;
0111
0112 desc->tfm = tctx->hash;
0113 err = crypto_shash_init(desc) ?:
0114 crypto_shash_update(desc, keys.enckey, keys.enckeylen) ?:
0115 crypto_shash_finup(desc, keys.authkey, keys.authkeylen, salt);
0116 if (err)
0117 return err;
0118
0119 crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
0120 crypto_cipher_set_flags(tctx->essiv_cipher, crypto_aead_get_flags(tfm) &
0121 CRYPTO_TFM_REQ_MASK);
0122 return crypto_cipher_setkey(tctx->essiv_cipher, salt,
0123 crypto_shash_digestsize(tctx->hash));
0124 }
0125
0126 static int essiv_aead_setauthsize(struct crypto_aead *tfm,
0127 unsigned int authsize)
0128 {
0129 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
0130
0131 return crypto_aead_setauthsize(tctx->u.aead, authsize);
0132 }
0133
0134 static void essiv_skcipher_done(struct crypto_async_request *areq, int err)
0135 {
0136 struct skcipher_request *req = areq->data;
0137
0138 skcipher_request_complete(req, err);
0139 }
0140
0141 static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc)
0142 {
0143 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0144 const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0145 struct skcipher_request *subreq = skcipher_request_ctx(req);
0146
0147 crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
0148
0149 skcipher_request_set_tfm(subreq, tctx->u.skcipher);
0150 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
0151 req->iv);
0152 skcipher_request_set_callback(subreq, skcipher_request_flags(req),
0153 essiv_skcipher_done, req);
0154
0155 return enc ? crypto_skcipher_encrypt(subreq) :
0156 crypto_skcipher_decrypt(subreq);
0157 }
0158
0159 static int essiv_skcipher_encrypt(struct skcipher_request *req)
0160 {
0161 return essiv_skcipher_crypt(req, true);
0162 }
0163
0164 static int essiv_skcipher_decrypt(struct skcipher_request *req)
0165 {
0166 return essiv_skcipher_crypt(req, false);
0167 }
0168
0169 static void essiv_aead_done(struct crypto_async_request *areq, int err)
0170 {
0171 struct aead_request *req = areq->data;
0172 struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
0173
0174 kfree(rctx->assoc);
0175 aead_request_complete(req, err);
0176 }
0177
0178 static int essiv_aead_crypt(struct aead_request *req, bool enc)
0179 {
0180 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
0181 const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
0182 struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
0183 struct aead_request *subreq = &rctx->aead_req;
0184 struct scatterlist *src = req->src;
0185 int err;
0186
0187 crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
0188
0189
0190
0191
0192
0193
0194 rctx->assoc = NULL;
0195 if (req->src == req->dst || !enc) {
0196 scatterwalk_map_and_copy(req->iv, req->dst,
0197 req->assoclen - crypto_aead_ivsize(tfm),
0198 crypto_aead_ivsize(tfm), 1);
0199 } else {
0200 u8 *iv = (u8 *)aead_request_ctx(req) + tctx->ivoffset;
0201 int ivsize = crypto_aead_ivsize(tfm);
0202 int ssize = req->assoclen - ivsize;
0203 struct scatterlist *sg;
0204 int nents;
0205
0206 if (ssize < 0)
0207 return -EINVAL;
0208
0209 nents = sg_nents_for_len(req->src, ssize);
0210 if (nents < 0)
0211 return -EINVAL;
0212
0213 memcpy(iv, req->iv, ivsize);
0214 sg_init_table(rctx->sg, 4);
0215
0216 if (unlikely(nents > 1)) {
0217
0218
0219
0220
0221 rctx->assoc = kmalloc(ssize, GFP_ATOMIC);
0222 if (!rctx->assoc)
0223 return -ENOMEM;
0224
0225 scatterwalk_map_and_copy(rctx->assoc, req->src, 0,
0226 ssize, 0);
0227 sg_set_buf(rctx->sg, rctx->assoc, ssize);
0228 } else {
0229 sg_set_page(rctx->sg, sg_page(req->src), ssize,
0230 req->src->offset);
0231 }
0232
0233 sg_set_buf(rctx->sg + 1, iv, ivsize);
0234 sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen);
0235 if (sg != rctx->sg + 2)
0236 sg_chain(rctx->sg, 3, sg);
0237
0238 src = rctx->sg;
0239 }
0240
0241 aead_request_set_tfm(subreq, tctx->u.aead);
0242 aead_request_set_ad(subreq, req->assoclen);
0243 aead_request_set_callback(subreq, aead_request_flags(req),
0244 essiv_aead_done, req);
0245 aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv);
0246
0247 err = enc ? crypto_aead_encrypt(subreq) :
0248 crypto_aead_decrypt(subreq);
0249
0250 if (rctx->assoc && err != -EINPROGRESS)
0251 kfree(rctx->assoc);
0252 return err;
0253 }
0254
0255 static int essiv_aead_encrypt(struct aead_request *req)
0256 {
0257 return essiv_aead_crypt(req, true);
0258 }
0259
0260 static int essiv_aead_decrypt(struct aead_request *req)
0261 {
0262 return essiv_aead_crypt(req, false);
0263 }
0264
0265 static int essiv_init_tfm(struct essiv_instance_ctx *ictx,
0266 struct essiv_tfm_ctx *tctx)
0267 {
0268 struct crypto_cipher *essiv_cipher;
0269 struct crypto_shash *hash;
0270 int err;
0271
0272 essiv_cipher = crypto_alloc_cipher(ictx->essiv_cipher_name, 0, 0);
0273 if (IS_ERR(essiv_cipher))
0274 return PTR_ERR(essiv_cipher);
0275
0276 hash = crypto_alloc_shash(ictx->shash_driver_name, 0, 0);
0277 if (IS_ERR(hash)) {
0278 err = PTR_ERR(hash);
0279 goto err_free_essiv_cipher;
0280 }
0281
0282 tctx->essiv_cipher = essiv_cipher;
0283 tctx->hash = hash;
0284
0285 return 0;
0286
0287 err_free_essiv_cipher:
0288 crypto_free_cipher(essiv_cipher);
0289 return err;
0290 }
0291
0292 static int essiv_skcipher_init_tfm(struct crypto_skcipher *tfm)
0293 {
0294 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
0295 struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
0296 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0297 struct crypto_skcipher *skcipher;
0298 int err;
0299
0300 skcipher = crypto_spawn_skcipher(&ictx->u.skcipher_spawn);
0301 if (IS_ERR(skcipher))
0302 return PTR_ERR(skcipher);
0303
0304 crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
0305 crypto_skcipher_reqsize(skcipher));
0306
0307 err = essiv_init_tfm(ictx, tctx);
0308 if (err) {
0309 crypto_free_skcipher(skcipher);
0310 return err;
0311 }
0312
0313 tctx->u.skcipher = skcipher;
0314 return 0;
0315 }
0316
0317 static int essiv_aead_init_tfm(struct crypto_aead *tfm)
0318 {
0319 struct aead_instance *inst = aead_alg_instance(tfm);
0320 struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
0321 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
0322 struct crypto_aead *aead;
0323 unsigned int subreq_size;
0324 int err;
0325
0326 BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx, aead_req) !=
0327 sizeof(struct essiv_aead_request_ctx));
0328
0329 aead = crypto_spawn_aead(&ictx->u.aead_spawn);
0330 if (IS_ERR(aead))
0331 return PTR_ERR(aead);
0332
0333 subreq_size = sizeof_field(struct essiv_aead_request_ctx, aead_req) +
0334 crypto_aead_reqsize(aead);
0335
0336 tctx->ivoffset = offsetof(struct essiv_aead_request_ctx, aead_req) +
0337 subreq_size;
0338 crypto_aead_set_reqsize(tfm, tctx->ivoffset + crypto_aead_ivsize(aead));
0339
0340 err = essiv_init_tfm(ictx, tctx);
0341 if (err) {
0342 crypto_free_aead(aead);
0343 return err;
0344 }
0345
0346 tctx->u.aead = aead;
0347 return 0;
0348 }
0349
0350 static void essiv_skcipher_exit_tfm(struct crypto_skcipher *tfm)
0351 {
0352 struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0353
0354 crypto_free_skcipher(tctx->u.skcipher);
0355 crypto_free_cipher(tctx->essiv_cipher);
0356 crypto_free_shash(tctx->hash);
0357 }
0358
0359 static void essiv_aead_exit_tfm(struct crypto_aead *tfm)
0360 {
0361 struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
0362
0363 crypto_free_aead(tctx->u.aead);
0364 crypto_free_cipher(tctx->essiv_cipher);
0365 crypto_free_shash(tctx->hash);
0366 }
0367
0368 static void essiv_skcipher_free_instance(struct skcipher_instance *inst)
0369 {
0370 struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
0371
0372 crypto_drop_skcipher(&ictx->u.skcipher_spawn);
0373 kfree(inst);
0374 }
0375
0376 static void essiv_aead_free_instance(struct aead_instance *inst)
0377 {
0378 struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
0379
0380 crypto_drop_aead(&ictx->u.aead_spawn);
0381 kfree(inst);
0382 }
0383
0384 static bool parse_cipher_name(char *essiv_cipher_name, const char *cra_name)
0385 {
0386 const char *p, *q;
0387 int len;
0388
0389
0390 p = strrchr(cra_name, '(');
0391 if (!p++)
0392 return false;
0393
0394
0395 q = strchr(p, ')');
0396 if (!q)
0397 return false;
0398
0399 len = q - p;
0400 if (len >= CRYPTO_MAX_ALG_NAME)
0401 return false;
0402
0403 memcpy(essiv_cipher_name, p, len);
0404 essiv_cipher_name[len] = '\0';
0405 return true;
0406 }
0407
0408 static bool essiv_supported_algorithms(const char *essiv_cipher_name,
0409 struct shash_alg *hash_alg,
0410 int ivsize)
0411 {
0412 struct crypto_alg *alg;
0413 bool ret = false;
0414
0415 alg = crypto_alg_mod_lookup(essiv_cipher_name,
0416 CRYPTO_ALG_TYPE_CIPHER,
0417 CRYPTO_ALG_TYPE_MASK);
0418 if (IS_ERR(alg))
0419 return false;
0420
0421 if (hash_alg->digestsize < alg->cra_cipher.cia_min_keysize ||
0422 hash_alg->digestsize > alg->cra_cipher.cia_max_keysize)
0423 goto out;
0424
0425 if (ivsize != alg->cra_blocksize)
0426 goto out;
0427
0428 if (crypto_shash_alg_needs_key(hash_alg))
0429 goto out;
0430
0431 ret = true;
0432
0433 out:
0434 crypto_mod_put(alg);
0435 return ret;
0436 }
0437
0438 static int essiv_create(struct crypto_template *tmpl, struct rtattr **tb)
0439 {
0440 struct crypto_attr_type *algt;
0441 const char *inner_cipher_name;
0442 const char *shash_name;
0443 struct skcipher_instance *skcipher_inst = NULL;
0444 struct aead_instance *aead_inst = NULL;
0445 struct crypto_instance *inst;
0446 struct crypto_alg *base, *block_base;
0447 struct essiv_instance_ctx *ictx;
0448 struct skcipher_alg *skcipher_alg = NULL;
0449 struct aead_alg *aead_alg = NULL;
0450 struct crypto_alg *_hash_alg;
0451 struct shash_alg *hash_alg;
0452 int ivsize;
0453 u32 type;
0454 u32 mask;
0455 int err;
0456
0457 algt = crypto_get_attr_type(tb);
0458 if (IS_ERR(algt))
0459 return PTR_ERR(algt);
0460
0461 inner_cipher_name = crypto_attr_alg_name(tb[1]);
0462 if (IS_ERR(inner_cipher_name))
0463 return PTR_ERR(inner_cipher_name);
0464
0465 shash_name = crypto_attr_alg_name(tb[2]);
0466 if (IS_ERR(shash_name))
0467 return PTR_ERR(shash_name);
0468
0469 type = algt->type & algt->mask;
0470 mask = crypto_algt_inherited_mask(algt);
0471
0472 switch (type) {
0473 case CRYPTO_ALG_TYPE_SKCIPHER:
0474 skcipher_inst = kzalloc(sizeof(*skcipher_inst) +
0475 sizeof(*ictx), GFP_KERNEL);
0476 if (!skcipher_inst)
0477 return -ENOMEM;
0478 inst = skcipher_crypto_instance(skcipher_inst);
0479 base = &skcipher_inst->alg.base;
0480 ictx = crypto_instance_ctx(inst);
0481
0482
0483 err = crypto_grab_skcipher(&ictx->u.skcipher_spawn, inst,
0484 inner_cipher_name, 0, mask);
0485 if (err)
0486 goto out_free_inst;
0487 skcipher_alg = crypto_spawn_skcipher_alg(&ictx->u.skcipher_spawn);
0488 block_base = &skcipher_alg->base;
0489 ivsize = crypto_skcipher_alg_ivsize(skcipher_alg);
0490 break;
0491
0492 case CRYPTO_ALG_TYPE_AEAD:
0493 aead_inst = kzalloc(sizeof(*aead_inst) +
0494 sizeof(*ictx), GFP_KERNEL);
0495 if (!aead_inst)
0496 return -ENOMEM;
0497 inst = aead_crypto_instance(aead_inst);
0498 base = &aead_inst->alg.base;
0499 ictx = crypto_instance_ctx(inst);
0500
0501
0502 err = crypto_grab_aead(&ictx->u.aead_spawn, inst,
0503 inner_cipher_name, 0, mask);
0504 if (err)
0505 goto out_free_inst;
0506 aead_alg = crypto_spawn_aead_alg(&ictx->u.aead_spawn);
0507 block_base = &aead_alg->base;
0508 if (!strstarts(block_base->cra_name, "authenc(")) {
0509 pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
0510 err = -EINVAL;
0511 goto out_drop_skcipher;
0512 }
0513 ivsize = aead_alg->ivsize;
0514 break;
0515
0516 default:
0517 return -EINVAL;
0518 }
0519
0520 if (!parse_cipher_name(ictx->essiv_cipher_name, block_base->cra_name)) {
0521 pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
0522 err = -EINVAL;
0523 goto out_drop_skcipher;
0524 }
0525
0526
0527 _hash_alg = crypto_alg_mod_lookup(shash_name,
0528 CRYPTO_ALG_TYPE_SHASH,
0529 CRYPTO_ALG_TYPE_MASK | mask);
0530 if (IS_ERR(_hash_alg)) {
0531 err = PTR_ERR(_hash_alg);
0532 goto out_drop_skcipher;
0533 }
0534 hash_alg = __crypto_shash_alg(_hash_alg);
0535
0536
0537 if (!essiv_supported_algorithms(ictx->essiv_cipher_name, hash_alg,
0538 ivsize)) {
0539 pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
0540 block_base->cra_name, hash_alg->base.cra_name);
0541 err = -EINVAL;
0542 goto out_free_hash;
0543 }
0544
0545
0546 strlcpy(ictx->shash_driver_name, hash_alg->base.cra_driver_name,
0547 CRYPTO_MAX_ALG_NAME);
0548
0549
0550
0551 err = -ENAMETOOLONG;
0552 if (snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME,
0553 "essiv(%s,%s)", block_base->cra_name,
0554 hash_alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
0555 goto out_free_hash;
0556 if (snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME,
0557 "essiv(%s,%s)", block_base->cra_driver_name,
0558 hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
0559 goto out_free_hash;
0560
0561
0562
0563
0564
0565 base->cra_flags |= (hash_alg->base.cra_flags &
0566 CRYPTO_ALG_INHERITED_FLAGS);
0567 base->cra_blocksize = block_base->cra_blocksize;
0568 base->cra_ctxsize = sizeof(struct essiv_tfm_ctx);
0569 base->cra_alignmask = block_base->cra_alignmask;
0570 base->cra_priority = block_base->cra_priority;
0571
0572 if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
0573 skcipher_inst->alg.setkey = essiv_skcipher_setkey;
0574 skcipher_inst->alg.encrypt = essiv_skcipher_encrypt;
0575 skcipher_inst->alg.decrypt = essiv_skcipher_decrypt;
0576 skcipher_inst->alg.init = essiv_skcipher_init_tfm;
0577 skcipher_inst->alg.exit = essiv_skcipher_exit_tfm;
0578
0579 skcipher_inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(skcipher_alg);
0580 skcipher_inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(skcipher_alg);
0581 skcipher_inst->alg.ivsize = ivsize;
0582 skcipher_inst->alg.chunksize = crypto_skcipher_alg_chunksize(skcipher_alg);
0583 skcipher_inst->alg.walksize = crypto_skcipher_alg_walksize(skcipher_alg);
0584
0585 skcipher_inst->free = essiv_skcipher_free_instance;
0586
0587 err = skcipher_register_instance(tmpl, skcipher_inst);
0588 } else {
0589 aead_inst->alg.setkey = essiv_aead_setkey;
0590 aead_inst->alg.setauthsize = essiv_aead_setauthsize;
0591 aead_inst->alg.encrypt = essiv_aead_encrypt;
0592 aead_inst->alg.decrypt = essiv_aead_decrypt;
0593 aead_inst->alg.init = essiv_aead_init_tfm;
0594 aead_inst->alg.exit = essiv_aead_exit_tfm;
0595
0596 aead_inst->alg.ivsize = ivsize;
0597 aead_inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(aead_alg);
0598 aead_inst->alg.chunksize = crypto_aead_alg_chunksize(aead_alg);
0599
0600 aead_inst->free = essiv_aead_free_instance;
0601
0602 err = aead_register_instance(tmpl, aead_inst);
0603 }
0604
0605 if (err)
0606 goto out_free_hash;
0607
0608 crypto_mod_put(_hash_alg);
0609 return 0;
0610
0611 out_free_hash:
0612 crypto_mod_put(_hash_alg);
0613 out_drop_skcipher:
0614 if (type == CRYPTO_ALG_TYPE_SKCIPHER)
0615 crypto_drop_skcipher(&ictx->u.skcipher_spawn);
0616 else
0617 crypto_drop_aead(&ictx->u.aead_spawn);
0618 out_free_inst:
0619 kfree(skcipher_inst);
0620 kfree(aead_inst);
0621 return err;
0622 }
0623
0624
0625 static struct crypto_template essiv_tmpl = {
0626 .name = "essiv",
0627 .create = essiv_create,
0628 .module = THIS_MODULE,
0629 };
0630
0631 static int __init essiv_module_init(void)
0632 {
0633 return crypto_register_template(&essiv_tmpl);
0634 }
0635
0636 static void __exit essiv_module_exit(void)
0637 {
0638 crypto_unregister_template(&essiv_tmpl);
0639 }
0640
0641 subsys_initcall(essiv_module_init);
0642 module_exit(essiv_module_exit);
0643
0644 MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
0645 MODULE_LICENSE("GPL v2");
0646 MODULE_ALIAS_CRYPTO("essiv");
0647 MODULE_IMPORT_NS(CRYPTO_INTERNAL);