0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
0018
0019 #include <crypto/internal/hash.h>
0020 #include <crypto/internal/simd.h>
0021 #include <linux/init.h>
0022 #include <linux/module.h>
0023 #include <linux/mm.h>
0024 #include <linux/types.h>
0025 #include <crypto/sha1.h>
0026 #include <crypto/sha1_base.h>
0027 #include <asm/simd.h>
0028
0029 static int sha1_update(struct shash_desc *desc, const u8 *data,
0030 unsigned int len, sha1_block_fn *sha1_xform)
0031 {
0032 struct sha1_state *sctx = shash_desc_ctx(desc);
0033
0034 if (!crypto_simd_usable() ||
0035 (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE)
0036 return crypto_sha1_update(desc, data, len);
0037
0038
0039
0040
0041
0042 BUILD_BUG_ON(offsetof(struct sha1_state, state) != 0);
0043
0044 kernel_fpu_begin();
0045 sha1_base_do_update(desc, data, len, sha1_xform);
0046 kernel_fpu_end();
0047
0048 return 0;
0049 }
0050
0051 static int sha1_finup(struct shash_desc *desc, const u8 *data,
0052 unsigned int len, u8 *out, sha1_block_fn *sha1_xform)
0053 {
0054 if (!crypto_simd_usable())
0055 return crypto_sha1_finup(desc, data, len, out);
0056
0057 kernel_fpu_begin();
0058 if (len)
0059 sha1_base_do_update(desc, data, len, sha1_xform);
0060 sha1_base_do_finalize(desc, sha1_xform);
0061 kernel_fpu_end();
0062
0063 return sha1_base_finish(desc, out);
0064 }
0065
0066 asmlinkage void sha1_transform_ssse3(struct sha1_state *state,
0067 const u8 *data, int blocks);
0068
0069 static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
0070 unsigned int len)
0071 {
0072 return sha1_update(desc, data, len, sha1_transform_ssse3);
0073 }
0074
0075 static int sha1_ssse3_finup(struct shash_desc *desc, const u8 *data,
0076 unsigned int len, u8 *out)
0077 {
0078 return sha1_finup(desc, data, len, out, sha1_transform_ssse3);
0079 }
0080
0081
0082 static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
0083 {
0084 return sha1_ssse3_finup(desc, NULL, 0, out);
0085 }
0086
0087 static struct shash_alg sha1_ssse3_alg = {
0088 .digestsize = SHA1_DIGEST_SIZE,
0089 .init = sha1_base_init,
0090 .update = sha1_ssse3_update,
0091 .final = sha1_ssse3_final,
0092 .finup = sha1_ssse3_finup,
0093 .descsize = sizeof(struct sha1_state),
0094 .base = {
0095 .cra_name = "sha1",
0096 .cra_driver_name = "sha1-ssse3",
0097 .cra_priority = 150,
0098 .cra_blocksize = SHA1_BLOCK_SIZE,
0099 .cra_module = THIS_MODULE,
0100 }
0101 };
0102
0103 static int register_sha1_ssse3(void)
0104 {
0105 if (boot_cpu_has(X86_FEATURE_SSSE3))
0106 return crypto_register_shash(&sha1_ssse3_alg);
0107 return 0;
0108 }
0109
0110 static void unregister_sha1_ssse3(void)
0111 {
0112 if (boot_cpu_has(X86_FEATURE_SSSE3))
0113 crypto_unregister_shash(&sha1_ssse3_alg);
0114 }
0115
0116 asmlinkage void sha1_transform_avx(struct sha1_state *state,
0117 const u8 *data, int blocks);
0118
0119 static int sha1_avx_update(struct shash_desc *desc, const u8 *data,
0120 unsigned int len)
0121 {
0122 return sha1_update(desc, data, len, sha1_transform_avx);
0123 }
0124
0125 static int sha1_avx_finup(struct shash_desc *desc, const u8 *data,
0126 unsigned int len, u8 *out)
0127 {
0128 return sha1_finup(desc, data, len, out, sha1_transform_avx);
0129 }
0130
0131 static int sha1_avx_final(struct shash_desc *desc, u8 *out)
0132 {
0133 return sha1_avx_finup(desc, NULL, 0, out);
0134 }
0135
0136 static struct shash_alg sha1_avx_alg = {
0137 .digestsize = SHA1_DIGEST_SIZE,
0138 .init = sha1_base_init,
0139 .update = sha1_avx_update,
0140 .final = sha1_avx_final,
0141 .finup = sha1_avx_finup,
0142 .descsize = sizeof(struct sha1_state),
0143 .base = {
0144 .cra_name = "sha1",
0145 .cra_driver_name = "sha1-avx",
0146 .cra_priority = 160,
0147 .cra_blocksize = SHA1_BLOCK_SIZE,
0148 .cra_module = THIS_MODULE,
0149 }
0150 };
0151
0152 static bool avx_usable(void)
0153 {
0154 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
0155 if (boot_cpu_has(X86_FEATURE_AVX))
0156 pr_info("AVX detected but unusable.\n");
0157 return false;
0158 }
0159
0160 return true;
0161 }
0162
0163 static int register_sha1_avx(void)
0164 {
0165 if (avx_usable())
0166 return crypto_register_shash(&sha1_avx_alg);
0167 return 0;
0168 }
0169
0170 static void unregister_sha1_avx(void)
0171 {
0172 if (avx_usable())
0173 crypto_unregister_shash(&sha1_avx_alg);
0174 }
0175
0176 #define SHA1_AVX2_BLOCK_OPTSIZE 4
0177
0178 asmlinkage void sha1_transform_avx2(struct sha1_state *state,
0179 const u8 *data, int blocks);
0180
0181 static bool avx2_usable(void)
0182 {
0183 if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2)
0184 && boot_cpu_has(X86_FEATURE_BMI1)
0185 && boot_cpu_has(X86_FEATURE_BMI2))
0186 return true;
0187
0188 return false;
0189 }
0190
0191 static void sha1_apply_transform_avx2(struct sha1_state *state,
0192 const u8 *data, int blocks)
0193 {
0194
0195 if (blocks >= SHA1_AVX2_BLOCK_OPTSIZE)
0196 sha1_transform_avx2(state, data, blocks);
0197 else
0198 sha1_transform_avx(state, data, blocks);
0199 }
0200
0201 static int sha1_avx2_update(struct shash_desc *desc, const u8 *data,
0202 unsigned int len)
0203 {
0204 return sha1_update(desc, data, len, sha1_apply_transform_avx2);
0205 }
0206
0207 static int sha1_avx2_finup(struct shash_desc *desc, const u8 *data,
0208 unsigned int len, u8 *out)
0209 {
0210 return sha1_finup(desc, data, len, out, sha1_apply_transform_avx2);
0211 }
0212
0213 static int sha1_avx2_final(struct shash_desc *desc, u8 *out)
0214 {
0215 return sha1_avx2_finup(desc, NULL, 0, out);
0216 }
0217
0218 static struct shash_alg sha1_avx2_alg = {
0219 .digestsize = SHA1_DIGEST_SIZE,
0220 .init = sha1_base_init,
0221 .update = sha1_avx2_update,
0222 .final = sha1_avx2_final,
0223 .finup = sha1_avx2_finup,
0224 .descsize = sizeof(struct sha1_state),
0225 .base = {
0226 .cra_name = "sha1",
0227 .cra_driver_name = "sha1-avx2",
0228 .cra_priority = 170,
0229 .cra_blocksize = SHA1_BLOCK_SIZE,
0230 .cra_module = THIS_MODULE,
0231 }
0232 };
0233
0234 static int register_sha1_avx2(void)
0235 {
0236 if (avx2_usable())
0237 return crypto_register_shash(&sha1_avx2_alg);
0238 return 0;
0239 }
0240
0241 static void unregister_sha1_avx2(void)
0242 {
0243 if (avx2_usable())
0244 crypto_unregister_shash(&sha1_avx2_alg);
0245 }
0246
0247 #ifdef CONFIG_AS_SHA1_NI
0248 asmlinkage void sha1_ni_transform(struct sha1_state *digest, const u8 *data,
0249 int rounds);
0250
0251 static int sha1_ni_update(struct shash_desc *desc, const u8 *data,
0252 unsigned int len)
0253 {
0254 return sha1_update(desc, data, len, sha1_ni_transform);
0255 }
0256
0257 static int sha1_ni_finup(struct shash_desc *desc, const u8 *data,
0258 unsigned int len, u8 *out)
0259 {
0260 return sha1_finup(desc, data, len, out, sha1_ni_transform);
0261 }
0262
0263 static int sha1_ni_final(struct shash_desc *desc, u8 *out)
0264 {
0265 return sha1_ni_finup(desc, NULL, 0, out);
0266 }
0267
0268 static struct shash_alg sha1_ni_alg = {
0269 .digestsize = SHA1_DIGEST_SIZE,
0270 .init = sha1_base_init,
0271 .update = sha1_ni_update,
0272 .final = sha1_ni_final,
0273 .finup = sha1_ni_finup,
0274 .descsize = sizeof(struct sha1_state),
0275 .base = {
0276 .cra_name = "sha1",
0277 .cra_driver_name = "sha1-ni",
0278 .cra_priority = 250,
0279 .cra_blocksize = SHA1_BLOCK_SIZE,
0280 .cra_module = THIS_MODULE,
0281 }
0282 };
0283
0284 static int register_sha1_ni(void)
0285 {
0286 if (boot_cpu_has(X86_FEATURE_SHA_NI))
0287 return crypto_register_shash(&sha1_ni_alg);
0288 return 0;
0289 }
0290
0291 static void unregister_sha1_ni(void)
0292 {
0293 if (boot_cpu_has(X86_FEATURE_SHA_NI))
0294 crypto_unregister_shash(&sha1_ni_alg);
0295 }
0296
0297 #else
0298 static inline int register_sha1_ni(void) { return 0; }
0299 static inline void unregister_sha1_ni(void) { }
0300 #endif
0301
0302 static int __init sha1_ssse3_mod_init(void)
0303 {
0304 if (register_sha1_ssse3())
0305 goto fail;
0306
0307 if (register_sha1_avx()) {
0308 unregister_sha1_ssse3();
0309 goto fail;
0310 }
0311
0312 if (register_sha1_avx2()) {
0313 unregister_sha1_avx();
0314 unregister_sha1_ssse3();
0315 goto fail;
0316 }
0317
0318 if (register_sha1_ni()) {
0319 unregister_sha1_avx2();
0320 unregister_sha1_avx();
0321 unregister_sha1_ssse3();
0322 goto fail;
0323 }
0324
0325 return 0;
0326 fail:
0327 return -ENODEV;
0328 }
0329
0330 static void __exit sha1_ssse3_mod_fini(void)
0331 {
0332 unregister_sha1_ni();
0333 unregister_sha1_avx2();
0334 unregister_sha1_avx();
0335 unregister_sha1_ssse3();
0336 }
0337
0338 module_init(sha1_ssse3_mod_init);
0339 module_exit(sha1_ssse3_mod_fini);
0340
0341 MODULE_LICENSE("GPL");
0342 MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated");
0343
0344 MODULE_ALIAS_CRYPTO("sha1");
0345 MODULE_ALIAS_CRYPTO("sha1-ssse3");
0346 MODULE_ALIAS_CRYPTO("sha1-avx");
0347 MODULE_ALIAS_CRYPTO("sha1-avx2");
0348 #ifdef CONFIG_AS_SHA1_NI
0349 MODULE_ALIAS_CRYPTO("sha1-ni");
0350 #endif