0001
0002
0003
0004
0005
0006
0007
0008
0009 #include <crypto/algapi.h>
0010 #include <crypto/cast5.h>
0011 #include <crypto/internal/simd.h>
0012 #include <linux/crypto.h>
0013 #include <linux/err.h>
0014 #include <linux/module.h>
0015 #include <linux/types.h>
0016
0017 #include "ecb_cbc_helpers.h"
0018
0019 #define CAST5_PARALLEL_BLOCKS 16
0020
0021 asmlinkage void cast5_ecb_enc_16way(struct cast5_ctx *ctx, u8 *dst,
0022 const u8 *src);
0023 asmlinkage void cast5_ecb_dec_16way(struct cast5_ctx *ctx, u8 *dst,
0024 const u8 *src);
0025 asmlinkage void cast5_cbc_dec_16way(struct cast5_ctx *ctx, u8 *dst,
0026 const u8 *src);
0027
0028 static int cast5_setkey_skcipher(struct crypto_skcipher *tfm, const u8 *key,
0029 unsigned int keylen)
0030 {
0031 return cast5_setkey(&tfm->base, key, keylen);
0032 }
0033
0034 static int ecb_encrypt(struct skcipher_request *req)
0035 {
0036 ECB_WALK_START(req, CAST5_BLOCK_SIZE, CAST5_PARALLEL_BLOCKS);
0037 ECB_BLOCK(CAST5_PARALLEL_BLOCKS, cast5_ecb_enc_16way);
0038 ECB_BLOCK(1, __cast5_encrypt);
0039 ECB_WALK_END();
0040 }
0041
0042 static int ecb_decrypt(struct skcipher_request *req)
0043 {
0044 ECB_WALK_START(req, CAST5_BLOCK_SIZE, CAST5_PARALLEL_BLOCKS);
0045 ECB_BLOCK(CAST5_PARALLEL_BLOCKS, cast5_ecb_dec_16way);
0046 ECB_BLOCK(1, __cast5_decrypt);
0047 ECB_WALK_END();
0048 }
0049
0050 static int cbc_encrypt(struct skcipher_request *req)
0051 {
0052 CBC_WALK_START(req, CAST5_BLOCK_SIZE, -1);
0053 CBC_ENC_BLOCK(__cast5_encrypt);
0054 CBC_WALK_END();
0055 }
0056
0057 static int cbc_decrypt(struct skcipher_request *req)
0058 {
0059 CBC_WALK_START(req, CAST5_BLOCK_SIZE, CAST5_PARALLEL_BLOCKS);
0060 CBC_DEC_BLOCK(CAST5_PARALLEL_BLOCKS, cast5_cbc_dec_16way);
0061 CBC_DEC_BLOCK(1, __cast5_decrypt);
0062 CBC_WALK_END();
0063 }
0064
0065 static struct skcipher_alg cast5_algs[] = {
0066 {
0067 .base.cra_name = "__ecb(cast5)",
0068 .base.cra_driver_name = "__ecb-cast5-avx",
0069 .base.cra_priority = 200,
0070 .base.cra_flags = CRYPTO_ALG_INTERNAL,
0071 .base.cra_blocksize = CAST5_BLOCK_SIZE,
0072 .base.cra_ctxsize = sizeof(struct cast5_ctx),
0073 .base.cra_module = THIS_MODULE,
0074 .min_keysize = CAST5_MIN_KEY_SIZE,
0075 .max_keysize = CAST5_MAX_KEY_SIZE,
0076 .setkey = cast5_setkey_skcipher,
0077 .encrypt = ecb_encrypt,
0078 .decrypt = ecb_decrypt,
0079 }, {
0080 .base.cra_name = "__cbc(cast5)",
0081 .base.cra_driver_name = "__cbc-cast5-avx",
0082 .base.cra_priority = 200,
0083 .base.cra_flags = CRYPTO_ALG_INTERNAL,
0084 .base.cra_blocksize = CAST5_BLOCK_SIZE,
0085 .base.cra_ctxsize = sizeof(struct cast5_ctx),
0086 .base.cra_module = THIS_MODULE,
0087 .min_keysize = CAST5_MIN_KEY_SIZE,
0088 .max_keysize = CAST5_MAX_KEY_SIZE,
0089 .ivsize = CAST5_BLOCK_SIZE,
0090 .setkey = cast5_setkey_skcipher,
0091 .encrypt = cbc_encrypt,
0092 .decrypt = cbc_decrypt,
0093 }
0094 };
0095
0096 static struct simd_skcipher_alg *cast5_simd_algs[ARRAY_SIZE(cast5_algs)];
0097
0098 static int __init cast5_init(void)
0099 {
0100 const char *feature_name;
0101
0102 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
0103 &feature_name)) {
0104 pr_info("CPU feature '%s' is not supported.\n", feature_name);
0105 return -ENODEV;
0106 }
0107
0108 return simd_register_skciphers_compat(cast5_algs,
0109 ARRAY_SIZE(cast5_algs),
0110 cast5_simd_algs);
0111 }
0112
0113 static void __exit cast5_exit(void)
0114 {
0115 simd_unregister_skciphers(cast5_algs, ARRAY_SIZE(cast5_algs),
0116 cast5_simd_algs);
0117 }
0118
0119 module_init(cast5_init);
0120 module_exit(cast5_exit);
0121
0122 MODULE_DESCRIPTION("Cast5 Cipher Algorithm, AVX optimized");
0123 MODULE_LICENSE("GPL");
0124 MODULE_ALIAS_CRYPTO("cast5");