Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0
0002 /*
0003  * HCTR2 length-preserving encryption mode
0004  *
0005  * Copyright 2021 Google LLC
0006  */
0007 
0008 
0009 /*
0010  * HCTR2 is a length-preserving encryption mode that is efficient on
0011  * processors with instructions to accelerate AES and carryless
0012  * multiplication, e.g. x86 processors with AES-NI and CLMUL, and ARM
0013  * processors with the ARMv8 crypto extensions.
0014  *
0015  * For more details, see the paper: "Length-preserving encryption with HCTR2"
0016  * (https://eprint.iacr.org/2021/1441.pdf)
0017  */
0018 
0019 #include <crypto/internal/cipher.h>
0020 #include <crypto/internal/hash.h>
0021 #include <crypto/internal/skcipher.h>
0022 #include <crypto/polyval.h>
0023 #include <crypto/scatterwalk.h>
0024 #include <linux/module.h>
0025 
0026 #define BLOCKCIPHER_BLOCK_SIZE      16
0027 
0028 /*
0029  * The specification allows variable-length tweaks, but Linux's crypto API
0030  * currently only allows algorithms to support a single length.  The "natural"
0031  * tweak length for HCTR2 is 16, since that fits into one POLYVAL block for
0032  * the best performance.  But longer tweaks are useful for fscrypt, to avoid
0033  * needing to derive per-file keys.  So instead we use two blocks, or 32 bytes.
0034  */
0035 #define TWEAK_SIZE      32
0036 
0037 struct hctr2_instance_ctx {
0038     struct crypto_cipher_spawn blockcipher_spawn;
0039     struct crypto_skcipher_spawn xctr_spawn;
0040     struct crypto_shash_spawn polyval_spawn;
0041 };
0042 
0043 struct hctr2_tfm_ctx {
0044     struct crypto_cipher *blockcipher;
0045     struct crypto_skcipher *xctr;
0046     struct crypto_shash *polyval;
0047     u8 L[BLOCKCIPHER_BLOCK_SIZE];
0048     int hashed_tweak_offset;
0049     /*
0050      * This struct is allocated with extra space for two exported hash
0051      * states.  Since the hash state size is not known at compile-time, we
0052      * can't add these to the struct directly.
0053      *
0054      * hashed_tweaklen_divisible;
0055      * hashed_tweaklen_remainder;
0056      */
0057 };
0058 
0059 struct hctr2_request_ctx {
0060     u8 first_block[BLOCKCIPHER_BLOCK_SIZE];
0061     u8 xctr_iv[BLOCKCIPHER_BLOCK_SIZE];
0062     struct scatterlist *bulk_part_dst;
0063     struct scatterlist *bulk_part_src;
0064     struct scatterlist sg_src[2];
0065     struct scatterlist sg_dst[2];
0066     /*
0067      * Sub-request sizes are unknown at compile-time, so they need to go
0068      * after the members with known sizes.
0069      */
0070     union {
0071         struct shash_desc hash_desc;
0072         struct skcipher_request xctr_req;
0073     } u;
0074     /*
0075      * This struct is allocated with extra space for one exported hash
0076      * state.  Since the hash state size is not known at compile-time, we
0077      * can't add it to the struct directly.
0078      *
0079      * hashed_tweak;
0080      */
0081 };
0082 
0083 static inline u8 *hctr2_hashed_tweaklen(const struct hctr2_tfm_ctx *tctx,
0084                     bool has_remainder)
0085 {
0086     u8 *p = (u8 *)tctx + sizeof(*tctx);
0087 
0088     if (has_remainder) /* For messages not a multiple of block length */
0089         p += crypto_shash_statesize(tctx->polyval);
0090     return p;
0091 }
0092 
0093 static inline u8 *hctr2_hashed_tweak(const struct hctr2_tfm_ctx *tctx,
0094                      struct hctr2_request_ctx *rctx)
0095 {
0096     return (u8 *)rctx + tctx->hashed_tweak_offset;
0097 }
0098 
0099 /*
0100  * The input data for each HCTR2 hash step begins with a 16-byte block that
0101  * contains the tweak length and a flag that indicates whether the input is evenly
0102  * divisible into blocks.  Since this implementation only supports one tweak
0103  * length, we precompute the two hash states resulting from hashing the two
0104  * possible values of this initial block.  This reduces by one block the amount of
0105  * data that needs to be hashed for each encryption/decryption
0106  *
0107  * These precomputed hashes are stored in hctr2_tfm_ctx.
0108  */
0109 static int hctr2_hash_tweaklen(struct hctr2_tfm_ctx *tctx, bool has_remainder)
0110 {
0111     SHASH_DESC_ON_STACK(shash, tfm->polyval);
0112     __le64 tweak_length_block[2];
0113     int err;
0114 
0115     shash->tfm = tctx->polyval;
0116     memset(tweak_length_block, 0, sizeof(tweak_length_block));
0117 
0118     tweak_length_block[0] = cpu_to_le64(TWEAK_SIZE * 8 * 2 + 2 + has_remainder);
0119     err = crypto_shash_init(shash);
0120     if (err)
0121         return err;
0122     err = crypto_shash_update(shash, (u8 *)tweak_length_block,
0123                   POLYVAL_BLOCK_SIZE);
0124     if (err)
0125         return err;
0126     return crypto_shash_export(shash, hctr2_hashed_tweaklen(tctx, has_remainder));
0127 }
0128 
0129 static int hctr2_setkey(struct crypto_skcipher *tfm, const u8 *key,
0130             unsigned int keylen)
0131 {
0132     struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0133     u8 hbar[BLOCKCIPHER_BLOCK_SIZE];
0134     int err;
0135 
0136     crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK);
0137     crypto_cipher_set_flags(tctx->blockcipher,
0138                 crypto_skcipher_get_flags(tfm) &
0139                 CRYPTO_TFM_REQ_MASK);
0140     err = crypto_cipher_setkey(tctx->blockcipher, key, keylen);
0141     if (err)
0142         return err;
0143 
0144     crypto_skcipher_clear_flags(tctx->xctr, CRYPTO_TFM_REQ_MASK);
0145     crypto_skcipher_set_flags(tctx->xctr,
0146                   crypto_skcipher_get_flags(tfm) &
0147                   CRYPTO_TFM_REQ_MASK);
0148     err = crypto_skcipher_setkey(tctx->xctr, key, keylen);
0149     if (err)
0150         return err;
0151 
0152     memset(hbar, 0, sizeof(hbar));
0153     crypto_cipher_encrypt_one(tctx->blockcipher, hbar, hbar);
0154 
0155     memset(tctx->L, 0, sizeof(tctx->L));
0156     tctx->L[0] = 0x01;
0157     crypto_cipher_encrypt_one(tctx->blockcipher, tctx->L, tctx->L);
0158 
0159     crypto_shash_clear_flags(tctx->polyval, CRYPTO_TFM_REQ_MASK);
0160     crypto_shash_set_flags(tctx->polyval, crypto_skcipher_get_flags(tfm) &
0161                    CRYPTO_TFM_REQ_MASK);
0162     err = crypto_shash_setkey(tctx->polyval, hbar, BLOCKCIPHER_BLOCK_SIZE);
0163     if (err)
0164         return err;
0165     memzero_explicit(hbar, sizeof(hbar));
0166 
0167     return hctr2_hash_tweaklen(tctx, true) ?: hctr2_hash_tweaklen(tctx, false);
0168 }
0169 
0170 static int hctr2_hash_tweak(struct skcipher_request *req)
0171 {
0172     struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0173     const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0174     struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
0175     struct shash_desc *hash_desc = &rctx->u.hash_desc;
0176     int err;
0177     bool has_remainder = req->cryptlen % POLYVAL_BLOCK_SIZE;
0178 
0179     hash_desc->tfm = tctx->polyval;
0180     err = crypto_shash_import(hash_desc, hctr2_hashed_tweaklen(tctx, has_remainder));
0181     if (err)
0182         return err;
0183     err = crypto_shash_update(hash_desc, req->iv, TWEAK_SIZE);
0184     if (err)
0185         return err;
0186 
0187     // Store the hashed tweak, since we need it when computing both
0188     // H(T || N) and H(T || V).
0189     return crypto_shash_export(hash_desc, hctr2_hashed_tweak(tctx, rctx));
0190 }
0191 
0192 static int hctr2_hash_message(struct skcipher_request *req,
0193                   struct scatterlist *sgl,
0194                   u8 digest[POLYVAL_DIGEST_SIZE])
0195 {
0196     static const u8 padding[BLOCKCIPHER_BLOCK_SIZE] = { 0x1 };
0197     struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
0198     struct shash_desc *hash_desc = &rctx->u.hash_desc;
0199     const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
0200     struct sg_mapping_iter miter;
0201     unsigned int remainder = bulk_len % BLOCKCIPHER_BLOCK_SIZE;
0202     int i;
0203     int err = 0;
0204     int n = 0;
0205 
0206     sg_miter_start(&miter, sgl, sg_nents(sgl),
0207                SG_MITER_FROM_SG | SG_MITER_ATOMIC);
0208     for (i = 0; i < bulk_len; i += n) {
0209         sg_miter_next(&miter);
0210         n = min_t(unsigned int, miter.length, bulk_len - i);
0211         err = crypto_shash_update(hash_desc, miter.addr, n);
0212         if (err)
0213             break;
0214     }
0215     sg_miter_stop(&miter);
0216 
0217     if (err)
0218         return err;
0219 
0220     if (remainder) {
0221         err = crypto_shash_update(hash_desc, padding,
0222                       BLOCKCIPHER_BLOCK_SIZE - remainder);
0223         if (err)
0224             return err;
0225     }
0226     return crypto_shash_final(hash_desc, digest);
0227 }
0228 
0229 static int hctr2_finish(struct skcipher_request *req)
0230 {
0231     struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0232     const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0233     struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
0234     u8 digest[POLYVAL_DIGEST_SIZE];
0235     struct shash_desc *hash_desc = &rctx->u.hash_desc;
0236     int err;
0237 
0238     // U = UU ^ H(T || V)
0239     // or M = MM ^ H(T || N)
0240     hash_desc->tfm = tctx->polyval;
0241     err = crypto_shash_import(hash_desc, hctr2_hashed_tweak(tctx, rctx));
0242     if (err)
0243         return err;
0244     err = hctr2_hash_message(req, rctx->bulk_part_dst, digest);
0245     if (err)
0246         return err;
0247     crypto_xor(rctx->first_block, digest, BLOCKCIPHER_BLOCK_SIZE);
0248 
0249     // Copy U (or M) into dst scatterlist
0250     scatterwalk_map_and_copy(rctx->first_block, req->dst,
0251                  0, BLOCKCIPHER_BLOCK_SIZE, 1);
0252     return 0;
0253 }
0254 
0255 static void hctr2_xctr_done(struct crypto_async_request *areq,
0256                     int err)
0257 {
0258     struct skcipher_request *req = areq->data;
0259 
0260     if (!err)
0261         err = hctr2_finish(req);
0262 
0263     skcipher_request_complete(req, err);
0264 }
0265 
0266 static int hctr2_crypt(struct skcipher_request *req, bool enc)
0267 {
0268     struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0269     const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0270     struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
0271     u8 digest[POLYVAL_DIGEST_SIZE];
0272     int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
0273     int err;
0274 
0275     // Requests must be at least one block
0276     if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE)
0277         return -EINVAL;
0278 
0279     // Copy M (or U) into a temporary buffer
0280     scatterwalk_map_and_copy(rctx->first_block, req->src,
0281                  0, BLOCKCIPHER_BLOCK_SIZE, 0);
0282 
0283     // Create scatterlists for N and V
0284     rctx->bulk_part_src = scatterwalk_ffwd(rctx->sg_src, req->src,
0285                            BLOCKCIPHER_BLOCK_SIZE);
0286     rctx->bulk_part_dst = scatterwalk_ffwd(rctx->sg_dst, req->dst,
0287                            BLOCKCIPHER_BLOCK_SIZE);
0288 
0289     // MM = M ^ H(T || N)
0290     // or UU = U ^ H(T || V)
0291     err = hctr2_hash_tweak(req);
0292     if (err)
0293         return err;
0294     err = hctr2_hash_message(req, rctx->bulk_part_src, digest);
0295     if (err)
0296         return err;
0297     crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
0298 
0299     // UU = E(MM)
0300     // or MM = D(UU)
0301     if (enc)
0302         crypto_cipher_encrypt_one(tctx->blockcipher, rctx->first_block,
0303                       digest);
0304     else
0305         crypto_cipher_decrypt_one(tctx->blockcipher, rctx->first_block,
0306                       digest);
0307 
0308     // S = MM ^ UU ^ L
0309     crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
0310     crypto_xor_cpy(rctx->xctr_iv, digest, tctx->L, BLOCKCIPHER_BLOCK_SIZE);
0311 
0312     // V = XCTR(S, N)
0313     // or N = XCTR(S, V)
0314     skcipher_request_set_tfm(&rctx->u.xctr_req, tctx->xctr);
0315     skcipher_request_set_crypt(&rctx->u.xctr_req, rctx->bulk_part_src,
0316                    rctx->bulk_part_dst, bulk_len,
0317                    rctx->xctr_iv);
0318     skcipher_request_set_callback(&rctx->u.xctr_req,
0319                       req->base.flags,
0320                       hctr2_xctr_done, req);
0321     return crypto_skcipher_encrypt(&rctx->u.xctr_req) ?:
0322         hctr2_finish(req);
0323 }
0324 
0325 static int hctr2_encrypt(struct skcipher_request *req)
0326 {
0327     return hctr2_crypt(req, true);
0328 }
0329 
0330 static int hctr2_decrypt(struct skcipher_request *req)
0331 {
0332     return hctr2_crypt(req, false);
0333 }
0334 
0335 static int hctr2_init_tfm(struct crypto_skcipher *tfm)
0336 {
0337     struct skcipher_instance *inst = skcipher_alg_instance(tfm);
0338     struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst);
0339     struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0340     struct crypto_skcipher *xctr;
0341     struct crypto_cipher *blockcipher;
0342     struct crypto_shash *polyval;
0343     unsigned int subreq_size;
0344     int err;
0345 
0346     xctr = crypto_spawn_skcipher(&ictx->xctr_spawn);
0347     if (IS_ERR(xctr))
0348         return PTR_ERR(xctr);
0349 
0350     blockcipher = crypto_spawn_cipher(&ictx->blockcipher_spawn);
0351     if (IS_ERR(blockcipher)) {
0352         err = PTR_ERR(blockcipher);
0353         goto err_free_xctr;
0354     }
0355 
0356     polyval = crypto_spawn_shash(&ictx->polyval_spawn);
0357     if (IS_ERR(polyval)) {
0358         err = PTR_ERR(polyval);
0359         goto err_free_blockcipher;
0360     }
0361 
0362     tctx->xctr = xctr;
0363     tctx->blockcipher = blockcipher;
0364     tctx->polyval = polyval;
0365 
0366     BUILD_BUG_ON(offsetofend(struct hctr2_request_ctx, u) !=
0367                  sizeof(struct hctr2_request_ctx));
0368     subreq_size = max(sizeof_field(struct hctr2_request_ctx, u.hash_desc) +
0369               crypto_shash_descsize(polyval),
0370               sizeof_field(struct hctr2_request_ctx, u.xctr_req) +
0371               crypto_skcipher_reqsize(xctr));
0372 
0373     tctx->hashed_tweak_offset = offsetof(struct hctr2_request_ctx, u) +
0374                     subreq_size;
0375     crypto_skcipher_set_reqsize(tfm, tctx->hashed_tweak_offset +
0376                     crypto_shash_statesize(polyval));
0377     return 0;
0378 
0379 err_free_blockcipher:
0380     crypto_free_cipher(blockcipher);
0381 err_free_xctr:
0382     crypto_free_skcipher(xctr);
0383     return err;
0384 }
0385 
0386 static void hctr2_exit_tfm(struct crypto_skcipher *tfm)
0387 {
0388     struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
0389 
0390     crypto_free_cipher(tctx->blockcipher);
0391     crypto_free_skcipher(tctx->xctr);
0392     crypto_free_shash(tctx->polyval);
0393 }
0394 
0395 static void hctr2_free_instance(struct skcipher_instance *inst)
0396 {
0397     struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst);
0398 
0399     crypto_drop_cipher(&ictx->blockcipher_spawn);
0400     crypto_drop_skcipher(&ictx->xctr_spawn);
0401     crypto_drop_shash(&ictx->polyval_spawn);
0402     kfree(inst);
0403 }
0404 
0405 static int hctr2_create_common(struct crypto_template *tmpl,
0406                    struct rtattr **tb,
0407                    const char *xctr_name,
0408                    const char *polyval_name)
0409 {
0410     u32 mask;
0411     struct skcipher_instance *inst;
0412     struct hctr2_instance_ctx *ictx;
0413     struct skcipher_alg *xctr_alg;
0414     struct crypto_alg *blockcipher_alg;
0415     struct shash_alg *polyval_alg;
0416     char blockcipher_name[CRYPTO_MAX_ALG_NAME];
0417     int len;
0418     int err;
0419 
0420     err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
0421     if (err)
0422         return err;
0423 
0424     inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
0425     if (!inst)
0426         return -ENOMEM;
0427     ictx = skcipher_instance_ctx(inst);
0428 
0429     /* Stream cipher, xctr(block_cipher) */
0430     err = crypto_grab_skcipher(&ictx->xctr_spawn,
0431                    skcipher_crypto_instance(inst),
0432                    xctr_name, 0, mask);
0433     if (err)
0434         goto err_free_inst;
0435     xctr_alg = crypto_spawn_skcipher_alg(&ictx->xctr_spawn);
0436 
0437     err = -EINVAL;
0438     if (strncmp(xctr_alg->base.cra_name, "xctr(", 5))
0439         goto err_free_inst;
0440     len = strscpy(blockcipher_name, xctr_alg->base.cra_name + 5,
0441               sizeof(blockcipher_name));
0442     if (len < 1)
0443         goto err_free_inst;
0444     if (blockcipher_name[len - 1] != ')')
0445         goto err_free_inst;
0446     blockcipher_name[len - 1] = 0;
0447 
0448     /* Block cipher, e.g. "aes" */
0449     err = crypto_grab_cipher(&ictx->blockcipher_spawn,
0450                  skcipher_crypto_instance(inst),
0451                  blockcipher_name, 0, mask);
0452     if (err)
0453         goto err_free_inst;
0454     blockcipher_alg = crypto_spawn_cipher_alg(&ictx->blockcipher_spawn);
0455 
0456     /* Require blocksize of 16 bytes */
0457     err = -EINVAL;
0458     if (blockcipher_alg->cra_blocksize != BLOCKCIPHER_BLOCK_SIZE)
0459         goto err_free_inst;
0460 
0461     /* Polyval ε-∆U hash function */
0462     err = crypto_grab_shash(&ictx->polyval_spawn,
0463                 skcipher_crypto_instance(inst),
0464                 polyval_name, 0, mask);
0465     if (err)
0466         goto err_free_inst;
0467     polyval_alg = crypto_spawn_shash_alg(&ictx->polyval_spawn);
0468 
0469     /* Ensure Polyval is being used */
0470     err = -EINVAL;
0471     if (strcmp(polyval_alg->base.cra_name, "polyval") != 0)
0472         goto err_free_inst;
0473 
0474     /* Instance fields */
0475 
0476     err = -ENAMETOOLONG;
0477     if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, "hctr2(%s)",
0478              blockcipher_alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
0479         goto err_free_inst;
0480     if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
0481              "hctr2_base(%s,%s)",
0482              xctr_alg->base.cra_driver_name,
0483              polyval_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
0484         goto err_free_inst;
0485 
0486     inst->alg.base.cra_blocksize = BLOCKCIPHER_BLOCK_SIZE;
0487     inst->alg.base.cra_ctxsize = sizeof(struct hctr2_tfm_ctx) +
0488                      polyval_alg->statesize * 2;
0489     inst->alg.base.cra_alignmask = xctr_alg->base.cra_alignmask |
0490                        polyval_alg->base.cra_alignmask;
0491     /*
0492      * The hash function is called twice, so it is weighted higher than the
0493      * xctr and blockcipher.
0494      */
0495     inst->alg.base.cra_priority = (2 * xctr_alg->base.cra_priority +
0496                        4 * polyval_alg->base.cra_priority +
0497                        blockcipher_alg->cra_priority) / 7;
0498 
0499     inst->alg.setkey = hctr2_setkey;
0500     inst->alg.encrypt = hctr2_encrypt;
0501     inst->alg.decrypt = hctr2_decrypt;
0502     inst->alg.init = hctr2_init_tfm;
0503     inst->alg.exit = hctr2_exit_tfm;
0504     inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(xctr_alg);
0505     inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(xctr_alg);
0506     inst->alg.ivsize = TWEAK_SIZE;
0507 
0508     inst->free = hctr2_free_instance;
0509 
0510     err = skcipher_register_instance(tmpl, inst);
0511     if (err) {
0512 err_free_inst:
0513         hctr2_free_instance(inst);
0514     }
0515     return err;
0516 }
0517 
0518 static int hctr2_create_base(struct crypto_template *tmpl, struct rtattr **tb)
0519 {
0520     const char *xctr_name;
0521     const char *polyval_name;
0522 
0523     xctr_name = crypto_attr_alg_name(tb[1]);
0524     if (IS_ERR(xctr_name))
0525         return PTR_ERR(xctr_name);
0526 
0527     polyval_name = crypto_attr_alg_name(tb[2]);
0528     if (IS_ERR(polyval_name))
0529         return PTR_ERR(polyval_name);
0530 
0531     return hctr2_create_common(tmpl, tb, xctr_name, polyval_name);
0532 }
0533 
0534 static int hctr2_create(struct crypto_template *tmpl, struct rtattr **tb)
0535 {
0536     const char *blockcipher_name;
0537     char xctr_name[CRYPTO_MAX_ALG_NAME];
0538 
0539     blockcipher_name = crypto_attr_alg_name(tb[1]);
0540     if (IS_ERR(blockcipher_name))
0541         return PTR_ERR(blockcipher_name);
0542 
0543     if (snprintf(xctr_name, CRYPTO_MAX_ALG_NAME, "xctr(%s)",
0544             blockcipher_name) >= CRYPTO_MAX_ALG_NAME)
0545         return -ENAMETOOLONG;
0546 
0547     return hctr2_create_common(tmpl, tb, xctr_name, "polyval");
0548 }
0549 
0550 static struct crypto_template hctr2_tmpls[] = {
0551     {
0552         /* hctr2_base(xctr_name, polyval_name) */
0553         .name = "hctr2_base",
0554         .create = hctr2_create_base,
0555         .module = THIS_MODULE,
0556     }, {
0557         /* hctr2(blockcipher_name) */
0558         .name = "hctr2",
0559         .create = hctr2_create,
0560         .module = THIS_MODULE,
0561     }
0562 };
0563 
0564 static int __init hctr2_module_init(void)
0565 {
0566     return crypto_register_templates(hctr2_tmpls, ARRAY_SIZE(hctr2_tmpls));
0567 }
0568 
0569 static void __exit hctr2_module_exit(void)
0570 {
0571     return crypto_unregister_templates(hctr2_tmpls,
0572                        ARRAY_SIZE(hctr2_tmpls));
0573 }
0574 
0575 subsys_initcall(hctr2_module_init);
0576 module_exit(hctr2_module_exit);
0577 
0578 MODULE_DESCRIPTION("HCTR2 length-preserving encryption mode");
0579 MODULE_LICENSE("GPL v2");
0580 MODULE_ALIAS_CRYPTO("hctr2");
0581 MODULE_IMPORT_NS(CRYPTO_INTERNAL);