0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
0031
0032 #include <crypto/internal/hash.h>
0033 #include <crypto/internal/simd.h>
0034 #include <linux/init.h>
0035 #include <linux/module.h>
0036 #include <linux/mm.h>
0037 #include <linux/types.h>
0038 #include <crypto/sha2.h>
0039 #include <crypto/sha256_base.h>
0040 #include <linux/string.h>
0041 #include <asm/simd.h>
0042
0043 asmlinkage void sha256_transform_ssse3(struct sha256_state *state,
0044 const u8 *data, int blocks);
0045
0046 static int _sha256_update(struct shash_desc *desc, const u8 *data,
0047 unsigned int len, sha256_block_fn *sha256_xform)
0048 {
0049 struct sha256_state *sctx = shash_desc_ctx(desc);
0050
0051 if (!crypto_simd_usable() ||
0052 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE)
0053 return crypto_sha256_update(desc, data, len);
0054
0055
0056
0057
0058
0059 BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
0060
0061 kernel_fpu_begin();
0062 sha256_base_do_update(desc, data, len, sha256_xform);
0063 kernel_fpu_end();
0064
0065 return 0;
0066 }
0067
0068 static int sha256_finup(struct shash_desc *desc, const u8 *data,
0069 unsigned int len, u8 *out, sha256_block_fn *sha256_xform)
0070 {
0071 if (!crypto_simd_usable())
0072 return crypto_sha256_finup(desc, data, len, out);
0073
0074 kernel_fpu_begin();
0075 if (len)
0076 sha256_base_do_update(desc, data, len, sha256_xform);
0077 sha256_base_do_finalize(desc, sha256_xform);
0078 kernel_fpu_end();
0079
0080 return sha256_base_finish(desc, out);
0081 }
0082
0083 static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data,
0084 unsigned int len)
0085 {
0086 return _sha256_update(desc, data, len, sha256_transform_ssse3);
0087 }
0088
0089 static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data,
0090 unsigned int len, u8 *out)
0091 {
0092 return sha256_finup(desc, data, len, out, sha256_transform_ssse3);
0093 }
0094
0095
0096 static int sha256_ssse3_final(struct shash_desc *desc, u8 *out)
0097 {
0098 return sha256_ssse3_finup(desc, NULL, 0, out);
0099 }
0100
0101 static struct shash_alg sha256_ssse3_algs[] = { {
0102 .digestsize = SHA256_DIGEST_SIZE,
0103 .init = sha256_base_init,
0104 .update = sha256_ssse3_update,
0105 .final = sha256_ssse3_final,
0106 .finup = sha256_ssse3_finup,
0107 .descsize = sizeof(struct sha256_state),
0108 .base = {
0109 .cra_name = "sha256",
0110 .cra_driver_name = "sha256-ssse3",
0111 .cra_priority = 150,
0112 .cra_blocksize = SHA256_BLOCK_SIZE,
0113 .cra_module = THIS_MODULE,
0114 }
0115 }, {
0116 .digestsize = SHA224_DIGEST_SIZE,
0117 .init = sha224_base_init,
0118 .update = sha256_ssse3_update,
0119 .final = sha256_ssse3_final,
0120 .finup = sha256_ssse3_finup,
0121 .descsize = sizeof(struct sha256_state),
0122 .base = {
0123 .cra_name = "sha224",
0124 .cra_driver_name = "sha224-ssse3",
0125 .cra_priority = 150,
0126 .cra_blocksize = SHA224_BLOCK_SIZE,
0127 .cra_module = THIS_MODULE,
0128 }
0129 } };
0130
0131 static int register_sha256_ssse3(void)
0132 {
0133 if (boot_cpu_has(X86_FEATURE_SSSE3))
0134 return crypto_register_shashes(sha256_ssse3_algs,
0135 ARRAY_SIZE(sha256_ssse3_algs));
0136 return 0;
0137 }
0138
0139 static void unregister_sha256_ssse3(void)
0140 {
0141 if (boot_cpu_has(X86_FEATURE_SSSE3))
0142 crypto_unregister_shashes(sha256_ssse3_algs,
0143 ARRAY_SIZE(sha256_ssse3_algs));
0144 }
0145
0146 asmlinkage void sha256_transform_avx(struct sha256_state *state,
0147 const u8 *data, int blocks);
0148
0149 static int sha256_avx_update(struct shash_desc *desc, const u8 *data,
0150 unsigned int len)
0151 {
0152 return _sha256_update(desc, data, len, sha256_transform_avx);
0153 }
0154
0155 static int sha256_avx_finup(struct shash_desc *desc, const u8 *data,
0156 unsigned int len, u8 *out)
0157 {
0158 return sha256_finup(desc, data, len, out, sha256_transform_avx);
0159 }
0160
0161 static int sha256_avx_final(struct shash_desc *desc, u8 *out)
0162 {
0163 return sha256_avx_finup(desc, NULL, 0, out);
0164 }
0165
0166 static struct shash_alg sha256_avx_algs[] = { {
0167 .digestsize = SHA256_DIGEST_SIZE,
0168 .init = sha256_base_init,
0169 .update = sha256_avx_update,
0170 .final = sha256_avx_final,
0171 .finup = sha256_avx_finup,
0172 .descsize = sizeof(struct sha256_state),
0173 .base = {
0174 .cra_name = "sha256",
0175 .cra_driver_name = "sha256-avx",
0176 .cra_priority = 160,
0177 .cra_blocksize = SHA256_BLOCK_SIZE,
0178 .cra_module = THIS_MODULE,
0179 }
0180 }, {
0181 .digestsize = SHA224_DIGEST_SIZE,
0182 .init = sha224_base_init,
0183 .update = sha256_avx_update,
0184 .final = sha256_avx_final,
0185 .finup = sha256_avx_finup,
0186 .descsize = sizeof(struct sha256_state),
0187 .base = {
0188 .cra_name = "sha224",
0189 .cra_driver_name = "sha224-avx",
0190 .cra_priority = 160,
0191 .cra_blocksize = SHA224_BLOCK_SIZE,
0192 .cra_module = THIS_MODULE,
0193 }
0194 } };
0195
0196 static bool avx_usable(void)
0197 {
0198 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
0199 if (boot_cpu_has(X86_FEATURE_AVX))
0200 pr_info("AVX detected but unusable.\n");
0201 return false;
0202 }
0203
0204 return true;
0205 }
0206
0207 static int register_sha256_avx(void)
0208 {
0209 if (avx_usable())
0210 return crypto_register_shashes(sha256_avx_algs,
0211 ARRAY_SIZE(sha256_avx_algs));
0212 return 0;
0213 }
0214
0215 static void unregister_sha256_avx(void)
0216 {
0217 if (avx_usable())
0218 crypto_unregister_shashes(sha256_avx_algs,
0219 ARRAY_SIZE(sha256_avx_algs));
0220 }
0221
0222 asmlinkage void sha256_transform_rorx(struct sha256_state *state,
0223 const u8 *data, int blocks);
0224
0225 static int sha256_avx2_update(struct shash_desc *desc, const u8 *data,
0226 unsigned int len)
0227 {
0228 return _sha256_update(desc, data, len, sha256_transform_rorx);
0229 }
0230
0231 static int sha256_avx2_finup(struct shash_desc *desc, const u8 *data,
0232 unsigned int len, u8 *out)
0233 {
0234 return sha256_finup(desc, data, len, out, sha256_transform_rorx);
0235 }
0236
0237 static int sha256_avx2_final(struct shash_desc *desc, u8 *out)
0238 {
0239 return sha256_avx2_finup(desc, NULL, 0, out);
0240 }
0241
0242 static struct shash_alg sha256_avx2_algs[] = { {
0243 .digestsize = SHA256_DIGEST_SIZE,
0244 .init = sha256_base_init,
0245 .update = sha256_avx2_update,
0246 .final = sha256_avx2_final,
0247 .finup = sha256_avx2_finup,
0248 .descsize = sizeof(struct sha256_state),
0249 .base = {
0250 .cra_name = "sha256",
0251 .cra_driver_name = "sha256-avx2",
0252 .cra_priority = 170,
0253 .cra_blocksize = SHA256_BLOCK_SIZE,
0254 .cra_module = THIS_MODULE,
0255 }
0256 }, {
0257 .digestsize = SHA224_DIGEST_SIZE,
0258 .init = sha224_base_init,
0259 .update = sha256_avx2_update,
0260 .final = sha256_avx2_final,
0261 .finup = sha256_avx2_finup,
0262 .descsize = sizeof(struct sha256_state),
0263 .base = {
0264 .cra_name = "sha224",
0265 .cra_driver_name = "sha224-avx2",
0266 .cra_priority = 170,
0267 .cra_blocksize = SHA224_BLOCK_SIZE,
0268 .cra_module = THIS_MODULE,
0269 }
0270 } };
0271
0272 static bool avx2_usable(void)
0273 {
0274 if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) &&
0275 boot_cpu_has(X86_FEATURE_BMI2))
0276 return true;
0277
0278 return false;
0279 }
0280
0281 static int register_sha256_avx2(void)
0282 {
0283 if (avx2_usable())
0284 return crypto_register_shashes(sha256_avx2_algs,
0285 ARRAY_SIZE(sha256_avx2_algs));
0286 return 0;
0287 }
0288
0289 static void unregister_sha256_avx2(void)
0290 {
0291 if (avx2_usable())
0292 crypto_unregister_shashes(sha256_avx2_algs,
0293 ARRAY_SIZE(sha256_avx2_algs));
0294 }
0295
0296 #ifdef CONFIG_AS_SHA256_NI
0297 asmlinkage void sha256_ni_transform(struct sha256_state *digest,
0298 const u8 *data, int rounds);
0299
0300 static int sha256_ni_update(struct shash_desc *desc, const u8 *data,
0301 unsigned int len)
0302 {
0303 return _sha256_update(desc, data, len, sha256_ni_transform);
0304 }
0305
0306 static int sha256_ni_finup(struct shash_desc *desc, const u8 *data,
0307 unsigned int len, u8 *out)
0308 {
0309 return sha256_finup(desc, data, len, out, sha256_ni_transform);
0310 }
0311
0312 static int sha256_ni_final(struct shash_desc *desc, u8 *out)
0313 {
0314 return sha256_ni_finup(desc, NULL, 0, out);
0315 }
0316
0317 static struct shash_alg sha256_ni_algs[] = { {
0318 .digestsize = SHA256_DIGEST_SIZE,
0319 .init = sha256_base_init,
0320 .update = sha256_ni_update,
0321 .final = sha256_ni_final,
0322 .finup = sha256_ni_finup,
0323 .descsize = sizeof(struct sha256_state),
0324 .base = {
0325 .cra_name = "sha256",
0326 .cra_driver_name = "sha256-ni",
0327 .cra_priority = 250,
0328 .cra_blocksize = SHA256_BLOCK_SIZE,
0329 .cra_module = THIS_MODULE,
0330 }
0331 }, {
0332 .digestsize = SHA224_DIGEST_SIZE,
0333 .init = sha224_base_init,
0334 .update = sha256_ni_update,
0335 .final = sha256_ni_final,
0336 .finup = sha256_ni_finup,
0337 .descsize = sizeof(struct sha256_state),
0338 .base = {
0339 .cra_name = "sha224",
0340 .cra_driver_name = "sha224-ni",
0341 .cra_priority = 250,
0342 .cra_blocksize = SHA224_BLOCK_SIZE,
0343 .cra_module = THIS_MODULE,
0344 }
0345 } };
0346
0347 static int register_sha256_ni(void)
0348 {
0349 if (boot_cpu_has(X86_FEATURE_SHA_NI))
0350 return crypto_register_shashes(sha256_ni_algs,
0351 ARRAY_SIZE(sha256_ni_algs));
0352 return 0;
0353 }
0354
0355 static void unregister_sha256_ni(void)
0356 {
0357 if (boot_cpu_has(X86_FEATURE_SHA_NI))
0358 crypto_unregister_shashes(sha256_ni_algs,
0359 ARRAY_SIZE(sha256_ni_algs));
0360 }
0361
0362 #else
0363 static inline int register_sha256_ni(void) { return 0; }
0364 static inline void unregister_sha256_ni(void) { }
0365 #endif
0366
0367 static int __init sha256_ssse3_mod_init(void)
0368 {
0369 if (register_sha256_ssse3())
0370 goto fail;
0371
0372 if (register_sha256_avx()) {
0373 unregister_sha256_ssse3();
0374 goto fail;
0375 }
0376
0377 if (register_sha256_avx2()) {
0378 unregister_sha256_avx();
0379 unregister_sha256_ssse3();
0380 goto fail;
0381 }
0382
0383 if (register_sha256_ni()) {
0384 unregister_sha256_avx2();
0385 unregister_sha256_avx();
0386 unregister_sha256_ssse3();
0387 goto fail;
0388 }
0389
0390 return 0;
0391 fail:
0392 return -ENODEV;
0393 }
0394
0395 static void __exit sha256_ssse3_mod_fini(void)
0396 {
0397 unregister_sha256_ni();
0398 unregister_sha256_avx2();
0399 unregister_sha256_avx();
0400 unregister_sha256_ssse3();
0401 }
0402
0403 module_init(sha256_ssse3_mod_init);
0404 module_exit(sha256_ssse3_mod_fini);
0405
0406 MODULE_LICENSE("GPL");
0407 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm, Supplemental SSE3 accelerated");
0408
0409 MODULE_ALIAS_CRYPTO("sha256");
0410 MODULE_ALIAS_CRYPTO("sha256-ssse3");
0411 MODULE_ALIAS_CRYPTO("sha256-avx");
0412 MODULE_ALIAS_CRYPTO("sha256-avx2");
0413 MODULE_ALIAS_CRYPTO("sha224");
0414 MODULE_ALIAS_CRYPTO("sha224-ssse3");
0415 MODULE_ALIAS_CRYPTO("sha224-avx");
0416 MODULE_ALIAS_CRYPTO("sha224-avx2");
0417 #ifdef CONFIG_AS_SHA256_NI
0418 MODULE_ALIAS_CRYPTO("sha256-ni");
0419 MODULE_ALIAS_CRYPTO("sha224-ni");
0420 #endif