Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-or-later
0002 /*
0003  * The AEGIS-128 Authenticated-Encryption Algorithm
0004  *
0005  * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
0006  * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
0007  */
0008 
0009 #include <crypto/algapi.h>
0010 #include <crypto/internal/aead.h>
0011 #include <crypto/internal/simd.h>
0012 #include <crypto/internal/skcipher.h>
0013 #include <crypto/scatterwalk.h>
0014 #include <linux/err.h>
0015 #include <linux/init.h>
0016 #include <linux/jump_label.h>
0017 #include <linux/kernel.h>
0018 #include <linux/module.h>
0019 #include <linux/scatterlist.h>
0020 
0021 #include <asm/simd.h>
0022 
0023 #include "aegis.h"
0024 
0025 #define AEGIS128_NONCE_SIZE 16
0026 #define AEGIS128_STATE_BLOCKS 5
0027 #define AEGIS128_KEY_SIZE 16
0028 #define AEGIS128_MIN_AUTH_SIZE 8
0029 #define AEGIS128_MAX_AUTH_SIZE 16
0030 
0031 struct aegis_state {
0032     union aegis_block blocks[AEGIS128_STATE_BLOCKS];
0033 };
0034 
0035 struct aegis_ctx {
0036     union aegis_block key;
0037 };
0038 
0039 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_simd);
0040 
0041 static const union aegis_block crypto_aegis_const[2] = {
0042     { .words64 = {
0043         cpu_to_le64(U64_C(0x0d08050302010100)),
0044         cpu_to_le64(U64_C(0x6279e99059372215)),
0045     } },
0046     { .words64 = {
0047         cpu_to_le64(U64_C(0xf12fc26d55183ddb)),
0048         cpu_to_le64(U64_C(0xdd28b57342311120)),
0049     } },
0050 };
0051 
0052 static bool aegis128_do_simd(void)
0053 {
0054 #ifdef CONFIG_CRYPTO_AEGIS128_SIMD
0055     if (static_branch_likely(&have_simd))
0056         return crypto_simd_usable();
0057 #endif
0058     return false;
0059 }
0060 
0061 static void crypto_aegis128_update(struct aegis_state *state)
0062 {
0063     union aegis_block tmp;
0064     unsigned int i;
0065 
0066     tmp = state->blocks[AEGIS128_STATE_BLOCKS - 1];
0067     for (i = AEGIS128_STATE_BLOCKS - 1; i > 0; i--)
0068         crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1],
0069                     &state->blocks[i]);
0070     crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]);
0071 }
0072 
0073 static void crypto_aegis128_update_a(struct aegis_state *state,
0074                      const union aegis_block *msg,
0075                      bool do_simd)
0076 {
0077     if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) {
0078         crypto_aegis128_update_simd(state, msg);
0079         return;
0080     }
0081 
0082     crypto_aegis128_update(state);
0083     crypto_aegis_block_xor(&state->blocks[0], msg);
0084 }
0085 
0086 static void crypto_aegis128_update_u(struct aegis_state *state, const void *msg,
0087                      bool do_simd)
0088 {
0089     if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) {
0090         crypto_aegis128_update_simd(state, msg);
0091         return;
0092     }
0093 
0094     crypto_aegis128_update(state);
0095     crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE);
0096 }
0097 
0098 static void crypto_aegis128_init(struct aegis_state *state,
0099                  const union aegis_block *key,
0100                  const u8 *iv)
0101 {
0102     union aegis_block key_iv;
0103     unsigned int i;
0104 
0105     key_iv = *key;
0106     crypto_xor(key_iv.bytes, iv, AEGIS_BLOCK_SIZE);
0107 
0108     state->blocks[0] = key_iv;
0109     state->blocks[1] = crypto_aegis_const[1];
0110     state->blocks[2] = crypto_aegis_const[0];
0111     state->blocks[3] = *key;
0112     state->blocks[4] = *key;
0113 
0114     crypto_aegis_block_xor(&state->blocks[3], &crypto_aegis_const[0]);
0115     crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[1]);
0116 
0117     for (i = 0; i < 5; i++) {
0118         crypto_aegis128_update_a(state, key, false);
0119         crypto_aegis128_update_a(state, &key_iv, false);
0120     }
0121 }
0122 
0123 static void crypto_aegis128_ad(struct aegis_state *state,
0124                    const u8 *src, unsigned int size,
0125                    bool do_simd)
0126 {
0127     if (AEGIS_ALIGNED(src)) {
0128         const union aegis_block *src_blk =
0129                 (const union aegis_block *)src;
0130 
0131         while (size >= AEGIS_BLOCK_SIZE) {
0132             crypto_aegis128_update_a(state, src_blk, do_simd);
0133 
0134             size -= AEGIS_BLOCK_SIZE;
0135             src_blk++;
0136         }
0137     } else {
0138         while (size >= AEGIS_BLOCK_SIZE) {
0139             crypto_aegis128_update_u(state, src, do_simd);
0140 
0141             size -= AEGIS_BLOCK_SIZE;
0142             src += AEGIS_BLOCK_SIZE;
0143         }
0144     }
0145 }
0146 
0147 static void crypto_aegis128_wipe_chunk(struct aegis_state *state, u8 *dst,
0148                        const u8 *src, unsigned int size)
0149 {
0150     memzero_explicit(dst, size);
0151 }
0152 
0153 static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst,
0154                       const u8 *src, unsigned int size)
0155 {
0156     union aegis_block tmp;
0157 
0158     if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
0159         while (size >= AEGIS_BLOCK_SIZE) {
0160             union aegis_block *dst_blk =
0161                     (union aegis_block *)dst;
0162             const union aegis_block *src_blk =
0163                     (const union aegis_block *)src;
0164 
0165             tmp = state->blocks[2];
0166             crypto_aegis_block_and(&tmp, &state->blocks[3]);
0167             crypto_aegis_block_xor(&tmp, &state->blocks[4]);
0168             crypto_aegis_block_xor(&tmp, &state->blocks[1]);
0169             crypto_aegis_block_xor(&tmp, src_blk);
0170 
0171             crypto_aegis128_update_a(state, src_blk, false);
0172 
0173             *dst_blk = tmp;
0174 
0175             size -= AEGIS_BLOCK_SIZE;
0176             src += AEGIS_BLOCK_SIZE;
0177             dst += AEGIS_BLOCK_SIZE;
0178         }
0179     } else {
0180         while (size >= AEGIS_BLOCK_SIZE) {
0181             tmp = state->blocks[2];
0182             crypto_aegis_block_and(&tmp, &state->blocks[3]);
0183             crypto_aegis_block_xor(&tmp, &state->blocks[4]);
0184             crypto_aegis_block_xor(&tmp, &state->blocks[1]);
0185             crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
0186 
0187             crypto_aegis128_update_u(state, src, false);
0188 
0189             memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
0190 
0191             size -= AEGIS_BLOCK_SIZE;
0192             src += AEGIS_BLOCK_SIZE;
0193             dst += AEGIS_BLOCK_SIZE;
0194         }
0195     }
0196 
0197     if (size > 0) {
0198         union aegis_block msg = {};
0199         memcpy(msg.bytes, src, size);
0200 
0201         tmp = state->blocks[2];
0202         crypto_aegis_block_and(&tmp, &state->blocks[3]);
0203         crypto_aegis_block_xor(&tmp, &state->blocks[4]);
0204         crypto_aegis_block_xor(&tmp, &state->blocks[1]);
0205 
0206         crypto_aegis128_update_a(state, &msg, false);
0207 
0208         crypto_aegis_block_xor(&msg, &tmp);
0209 
0210         memcpy(dst, msg.bytes, size);
0211     }
0212 }
0213 
0214 static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst,
0215                       const u8 *src, unsigned int size)
0216 {
0217     union aegis_block tmp;
0218 
0219     if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
0220         while (size >= AEGIS_BLOCK_SIZE) {
0221             union aegis_block *dst_blk =
0222                     (union aegis_block *)dst;
0223             const union aegis_block *src_blk =
0224                     (const union aegis_block *)src;
0225 
0226             tmp = state->blocks[2];
0227             crypto_aegis_block_and(&tmp, &state->blocks[3]);
0228             crypto_aegis_block_xor(&tmp, &state->blocks[4]);
0229             crypto_aegis_block_xor(&tmp, &state->blocks[1]);
0230             crypto_aegis_block_xor(&tmp, src_blk);
0231 
0232             crypto_aegis128_update_a(state, &tmp, false);
0233 
0234             *dst_blk = tmp;
0235 
0236             size -= AEGIS_BLOCK_SIZE;
0237             src += AEGIS_BLOCK_SIZE;
0238             dst += AEGIS_BLOCK_SIZE;
0239         }
0240     } else {
0241         while (size >= AEGIS_BLOCK_SIZE) {
0242             tmp = state->blocks[2];
0243             crypto_aegis_block_and(&tmp, &state->blocks[3]);
0244             crypto_aegis_block_xor(&tmp, &state->blocks[4]);
0245             crypto_aegis_block_xor(&tmp, &state->blocks[1]);
0246             crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
0247 
0248             crypto_aegis128_update_a(state, &tmp, false);
0249 
0250             memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
0251 
0252             size -= AEGIS_BLOCK_SIZE;
0253             src += AEGIS_BLOCK_SIZE;
0254             dst += AEGIS_BLOCK_SIZE;
0255         }
0256     }
0257 
0258     if (size > 0) {
0259         union aegis_block msg = {};
0260         memcpy(msg.bytes, src, size);
0261 
0262         tmp = state->blocks[2];
0263         crypto_aegis_block_and(&tmp, &state->blocks[3]);
0264         crypto_aegis_block_xor(&tmp, &state->blocks[4]);
0265         crypto_aegis_block_xor(&tmp, &state->blocks[1]);
0266         crypto_aegis_block_xor(&msg, &tmp);
0267 
0268         memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size);
0269 
0270         crypto_aegis128_update_a(state, &msg, false);
0271 
0272         memcpy(dst, msg.bytes, size);
0273     }
0274 }
0275 
0276 static void crypto_aegis128_process_ad(struct aegis_state *state,
0277                        struct scatterlist *sg_src,
0278                        unsigned int assoclen,
0279                        bool do_simd)
0280 {
0281     struct scatter_walk walk;
0282     union aegis_block buf;
0283     unsigned int pos = 0;
0284 
0285     scatterwalk_start(&walk, sg_src);
0286     while (assoclen != 0) {
0287         unsigned int size = scatterwalk_clamp(&walk, assoclen);
0288         unsigned int left = size;
0289         void *mapped = scatterwalk_map(&walk);
0290         const u8 *src = (const u8 *)mapped;
0291 
0292         if (pos + size >= AEGIS_BLOCK_SIZE) {
0293             if (pos > 0) {
0294                 unsigned int fill = AEGIS_BLOCK_SIZE - pos;
0295                 memcpy(buf.bytes + pos, src, fill);
0296                 crypto_aegis128_update_a(state, &buf, do_simd);
0297                 pos = 0;
0298                 left -= fill;
0299                 src += fill;
0300             }
0301 
0302             crypto_aegis128_ad(state, src, left, do_simd);
0303             src += left & ~(AEGIS_BLOCK_SIZE - 1);
0304             left &= AEGIS_BLOCK_SIZE - 1;
0305         }
0306 
0307         memcpy(buf.bytes + pos, src, left);
0308 
0309         pos += left;
0310         assoclen -= size;
0311         scatterwalk_unmap(mapped);
0312         scatterwalk_advance(&walk, size);
0313         scatterwalk_done(&walk, 0, assoclen);
0314     }
0315 
0316     if (pos > 0) {
0317         memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos);
0318         crypto_aegis128_update_a(state, &buf, do_simd);
0319     }
0320 }
0321 
0322 static __always_inline
0323 int crypto_aegis128_process_crypt(struct aegis_state *state,
0324                   struct skcipher_walk *walk,
0325                   void (*crypt)(struct aegis_state *state,
0326                             u8 *dst, const u8 *src,
0327                             unsigned int size))
0328 {
0329     int err = 0;
0330 
0331     while (walk->nbytes) {
0332         unsigned int nbytes = walk->nbytes;
0333 
0334         if (nbytes < walk->total)
0335             nbytes = round_down(nbytes, walk->stride);
0336 
0337         crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes);
0338 
0339         err = skcipher_walk_done(walk, walk->nbytes - nbytes);
0340     }
0341     return err;
0342 }
0343 
0344 static void crypto_aegis128_final(struct aegis_state *state,
0345                   union aegis_block *tag_xor,
0346                   u64 assoclen, u64 cryptlen)
0347 {
0348     u64 assocbits = assoclen * 8;
0349     u64 cryptbits = cryptlen * 8;
0350 
0351     union aegis_block tmp;
0352     unsigned int i;
0353 
0354     tmp.words64[0] = cpu_to_le64(assocbits);
0355     tmp.words64[1] = cpu_to_le64(cryptbits);
0356 
0357     crypto_aegis_block_xor(&tmp, &state->blocks[3]);
0358 
0359     for (i = 0; i < 7; i++)
0360         crypto_aegis128_update_a(state, &tmp, false);
0361 
0362     for (i = 0; i < AEGIS128_STATE_BLOCKS; i++)
0363         crypto_aegis_block_xor(tag_xor, &state->blocks[i]);
0364 }
0365 
0366 static int crypto_aegis128_setkey(struct crypto_aead *aead, const u8 *key,
0367                   unsigned int keylen)
0368 {
0369     struct aegis_ctx *ctx = crypto_aead_ctx(aead);
0370 
0371     if (keylen != AEGIS128_KEY_SIZE)
0372         return -EINVAL;
0373 
0374     memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
0375     return 0;
0376 }
0377 
0378 static int crypto_aegis128_setauthsize(struct crypto_aead *tfm,
0379                        unsigned int authsize)
0380 {
0381     if (authsize > AEGIS128_MAX_AUTH_SIZE)
0382         return -EINVAL;
0383     if (authsize < AEGIS128_MIN_AUTH_SIZE)
0384         return -EINVAL;
0385     return 0;
0386 }
0387 
0388 static int crypto_aegis128_encrypt_generic(struct aead_request *req)
0389 {
0390     struct crypto_aead *tfm = crypto_aead_reqtfm(req);
0391     union aegis_block tag = {};
0392     unsigned int authsize = crypto_aead_authsize(tfm);
0393     struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
0394     unsigned int cryptlen = req->cryptlen;
0395     struct skcipher_walk walk;
0396     struct aegis_state state;
0397 
0398     skcipher_walk_aead_encrypt(&walk, req, false);
0399     crypto_aegis128_init(&state, &ctx->key, req->iv);
0400     crypto_aegis128_process_ad(&state, req->src, req->assoclen, false);
0401     crypto_aegis128_process_crypt(&state, &walk,
0402                       crypto_aegis128_encrypt_chunk);
0403     crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
0404 
0405     scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
0406                  authsize, 1);
0407     return 0;
0408 }
0409 
0410 static int crypto_aegis128_decrypt_generic(struct aead_request *req)
0411 {
0412     static const u8 zeros[AEGIS128_MAX_AUTH_SIZE] = {};
0413     struct crypto_aead *tfm = crypto_aead_reqtfm(req);
0414     union aegis_block tag;
0415     unsigned int authsize = crypto_aead_authsize(tfm);
0416     unsigned int cryptlen = req->cryptlen - authsize;
0417     struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
0418     struct skcipher_walk walk;
0419     struct aegis_state state;
0420 
0421     scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
0422                  authsize, 0);
0423 
0424     skcipher_walk_aead_decrypt(&walk, req, false);
0425     crypto_aegis128_init(&state, &ctx->key, req->iv);
0426     crypto_aegis128_process_ad(&state, req->src, req->assoclen, false);
0427     crypto_aegis128_process_crypt(&state, &walk,
0428                       crypto_aegis128_decrypt_chunk);
0429     crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
0430 
0431     if (unlikely(crypto_memneq(tag.bytes, zeros, authsize))) {
0432         /*
0433          * From Chapter 4. 'Security Analysis' of the AEGIS spec [0]
0434          *
0435          * "3. If verification fails, the decrypted plaintext and the
0436          *     wrong authentication tag should not be given as output."
0437          *
0438          * [0] https://competitions.cr.yp.to/round3/aegisv11.pdf
0439          */
0440         skcipher_walk_aead_decrypt(&walk, req, false);
0441         crypto_aegis128_process_crypt(NULL, &walk,
0442                           crypto_aegis128_wipe_chunk);
0443         memzero_explicit(&tag, sizeof(tag));
0444         return -EBADMSG;
0445     }
0446     return 0;
0447 }
0448 
0449 static int crypto_aegis128_encrypt_simd(struct aead_request *req)
0450 {
0451     struct crypto_aead *tfm = crypto_aead_reqtfm(req);
0452     union aegis_block tag = {};
0453     unsigned int authsize = crypto_aead_authsize(tfm);
0454     struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
0455     unsigned int cryptlen = req->cryptlen;
0456     struct skcipher_walk walk;
0457     struct aegis_state state;
0458 
0459     if (!aegis128_do_simd())
0460         return crypto_aegis128_encrypt_generic(req);
0461 
0462     skcipher_walk_aead_encrypt(&walk, req, false);
0463     crypto_aegis128_init_simd(&state, &ctx->key, req->iv);
0464     crypto_aegis128_process_ad(&state, req->src, req->assoclen, true);
0465     crypto_aegis128_process_crypt(&state, &walk,
0466                       crypto_aegis128_encrypt_chunk_simd);
0467     crypto_aegis128_final_simd(&state, &tag, req->assoclen, cryptlen, 0);
0468 
0469     scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
0470                  authsize, 1);
0471     return 0;
0472 }
0473 
0474 static int crypto_aegis128_decrypt_simd(struct aead_request *req)
0475 {
0476     struct crypto_aead *tfm = crypto_aead_reqtfm(req);
0477     union aegis_block tag;
0478     unsigned int authsize = crypto_aead_authsize(tfm);
0479     unsigned int cryptlen = req->cryptlen - authsize;
0480     struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
0481     struct skcipher_walk walk;
0482     struct aegis_state state;
0483 
0484     if (!aegis128_do_simd())
0485         return crypto_aegis128_decrypt_generic(req);
0486 
0487     scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
0488                  authsize, 0);
0489 
0490     skcipher_walk_aead_decrypt(&walk, req, false);
0491     crypto_aegis128_init_simd(&state, &ctx->key, req->iv);
0492     crypto_aegis128_process_ad(&state, req->src, req->assoclen, true);
0493     crypto_aegis128_process_crypt(&state, &walk,
0494                       crypto_aegis128_decrypt_chunk_simd);
0495 
0496     if (unlikely(crypto_aegis128_final_simd(&state, &tag, req->assoclen,
0497                         cryptlen, authsize))) {
0498         skcipher_walk_aead_decrypt(&walk, req, false);
0499         crypto_aegis128_process_crypt(NULL, &walk,
0500                           crypto_aegis128_wipe_chunk);
0501         return -EBADMSG;
0502     }
0503     return 0;
0504 }
0505 
0506 static struct aead_alg crypto_aegis128_alg_generic = {
0507     .setkey         = crypto_aegis128_setkey,
0508     .setauthsize        = crypto_aegis128_setauthsize,
0509     .encrypt        = crypto_aegis128_encrypt_generic,
0510     .decrypt        = crypto_aegis128_decrypt_generic,
0511 
0512     .ivsize         = AEGIS128_NONCE_SIZE,
0513     .maxauthsize        = AEGIS128_MAX_AUTH_SIZE,
0514     .chunksize      = AEGIS_BLOCK_SIZE,
0515 
0516     .base.cra_blocksize = 1,
0517     .base.cra_ctxsize   = sizeof(struct aegis_ctx),
0518     .base.cra_alignmask = 0,
0519     .base.cra_priority  = 100,
0520     .base.cra_name      = "aegis128",
0521     .base.cra_driver_name   = "aegis128-generic",
0522     .base.cra_module    = THIS_MODULE,
0523 };
0524 
0525 static struct aead_alg crypto_aegis128_alg_simd = {
0526     .setkey         = crypto_aegis128_setkey,
0527     .setauthsize        = crypto_aegis128_setauthsize,
0528     .encrypt        = crypto_aegis128_encrypt_simd,
0529     .decrypt        = crypto_aegis128_decrypt_simd,
0530 
0531     .ivsize         = AEGIS128_NONCE_SIZE,
0532     .maxauthsize        = AEGIS128_MAX_AUTH_SIZE,
0533     .chunksize      = AEGIS_BLOCK_SIZE,
0534 
0535     .base.cra_blocksize = 1,
0536     .base.cra_ctxsize   = sizeof(struct aegis_ctx),
0537     .base.cra_alignmask = 0,
0538     .base.cra_priority  = 200,
0539     .base.cra_name      = "aegis128",
0540     .base.cra_driver_name   = "aegis128-simd",
0541     .base.cra_module    = THIS_MODULE,
0542 };
0543 
0544 static int __init crypto_aegis128_module_init(void)
0545 {
0546     int ret;
0547 
0548     ret = crypto_register_aead(&crypto_aegis128_alg_generic);
0549     if (ret)
0550         return ret;
0551 
0552     if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) &&
0553         crypto_aegis128_have_simd()) {
0554         ret = crypto_register_aead(&crypto_aegis128_alg_simd);
0555         if (ret) {
0556             crypto_unregister_aead(&crypto_aegis128_alg_generic);
0557             return ret;
0558         }
0559         static_branch_enable(&have_simd);
0560     }
0561     return 0;
0562 }
0563 
0564 static void __exit crypto_aegis128_module_exit(void)
0565 {
0566     if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) &&
0567         crypto_aegis128_have_simd())
0568         crypto_unregister_aead(&crypto_aegis128_alg_simd);
0569 
0570     crypto_unregister_aead(&crypto_aegis128_alg_generic);
0571 }
0572 
0573 subsys_initcall(crypto_aegis128_module_init);
0574 module_exit(crypto_aegis128_module_exit);
0575 
0576 MODULE_LICENSE("GPL");
0577 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
0578 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm");
0579 MODULE_ALIAS_CRYPTO("aegis128");
0580 MODULE_ALIAS_CRYPTO("aegis128-generic");
0581 MODULE_ALIAS_CRYPTO("aegis128-simd");