0001
0002
0003
0004
0005
0006
0007
0008
0009 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
0010
0011 #include <crypto/internal/hash.h>
0012 #include <crypto/internal/simd.h>
0013 #include <linux/init.h>
0014 #include <linux/module.h>
0015 #include <linux/types.h>
0016 #include <crypto/sm3.h>
0017 #include <crypto/sm3_base.h>
0018 #include <asm/simd.h>
0019
0020 asmlinkage void sm3_transform_avx(struct sm3_state *state,
0021 const u8 *data, int nblocks);
0022
0023 static int sm3_avx_update(struct shash_desc *desc, const u8 *data,
0024 unsigned int len)
0025 {
0026 struct sm3_state *sctx = shash_desc_ctx(desc);
0027
0028 if (!crypto_simd_usable() ||
0029 (sctx->count % SM3_BLOCK_SIZE) + len < SM3_BLOCK_SIZE) {
0030 sm3_update(sctx, data, len);
0031 return 0;
0032 }
0033
0034
0035
0036
0037
0038 BUILD_BUG_ON(offsetof(struct sm3_state, state) != 0);
0039
0040 kernel_fpu_begin();
0041 sm3_base_do_update(desc, data, len, sm3_transform_avx);
0042 kernel_fpu_end();
0043
0044 return 0;
0045 }
0046
0047 static int sm3_avx_finup(struct shash_desc *desc, const u8 *data,
0048 unsigned int len, u8 *out)
0049 {
0050 if (!crypto_simd_usable()) {
0051 struct sm3_state *sctx = shash_desc_ctx(desc);
0052
0053 if (len)
0054 sm3_update(sctx, data, len);
0055
0056 sm3_final(sctx, out);
0057 return 0;
0058 }
0059
0060 kernel_fpu_begin();
0061 if (len)
0062 sm3_base_do_update(desc, data, len, sm3_transform_avx);
0063 sm3_base_do_finalize(desc, sm3_transform_avx);
0064 kernel_fpu_end();
0065
0066 return sm3_base_finish(desc, out);
0067 }
0068
0069 static int sm3_avx_final(struct shash_desc *desc, u8 *out)
0070 {
0071 if (!crypto_simd_usable()) {
0072 sm3_final(shash_desc_ctx(desc), out);
0073 return 0;
0074 }
0075
0076 kernel_fpu_begin();
0077 sm3_base_do_finalize(desc, sm3_transform_avx);
0078 kernel_fpu_end();
0079
0080 return sm3_base_finish(desc, out);
0081 }
0082
0083 static struct shash_alg sm3_avx_alg = {
0084 .digestsize = SM3_DIGEST_SIZE,
0085 .init = sm3_base_init,
0086 .update = sm3_avx_update,
0087 .final = sm3_avx_final,
0088 .finup = sm3_avx_finup,
0089 .descsize = sizeof(struct sm3_state),
0090 .base = {
0091 .cra_name = "sm3",
0092 .cra_driver_name = "sm3-avx",
0093 .cra_priority = 300,
0094 .cra_blocksize = SM3_BLOCK_SIZE,
0095 .cra_module = THIS_MODULE,
0096 }
0097 };
0098
0099 static int __init sm3_avx_mod_init(void)
0100 {
0101 const char *feature_name;
0102
0103 if (!boot_cpu_has(X86_FEATURE_AVX)) {
0104 pr_info("AVX instruction are not detected.\n");
0105 return -ENODEV;
0106 }
0107
0108 if (!boot_cpu_has(X86_FEATURE_BMI2)) {
0109 pr_info("BMI2 instruction are not detected.\n");
0110 return -ENODEV;
0111 }
0112
0113 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
0114 &feature_name)) {
0115 pr_info("CPU feature '%s' is not supported.\n", feature_name);
0116 return -ENODEV;
0117 }
0118
0119 return crypto_register_shash(&sm3_avx_alg);
0120 }
0121
0122 static void __exit sm3_avx_mod_exit(void)
0123 {
0124 crypto_unregister_shash(&sm3_avx_alg);
0125 }
0126
0127 module_init(sm3_avx_mod_init);
0128 module_exit(sm3_avx_mod_exit);
0129
0130 MODULE_LICENSE("GPL v2");
0131 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
0132 MODULE_DESCRIPTION("SM3 Secure Hash Algorithm, AVX assembler accelerated");
0133 MODULE_ALIAS_CRYPTO("sm3");
0134 MODULE_ALIAS_CRYPTO("sm3-avx");