0001
0002
0003
0004
0005
0006
0007
0008 #include <asm/hwcap.h>
0009 #include <asm/neon.h>
0010 #include <asm/simd.h>
0011 #include <crypto/internal/hash.h>
0012 #include <crypto/internal/simd.h>
0013 #include <crypto/sha2.h>
0014 #include <crypto/sha256_base.h>
0015 #include <linux/types.h>
0016 #include <linux/string.h>
0017
0018 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash for arm64");
0019 MODULE_AUTHOR("Andy Polyakov <appro@openssl.org>");
0020 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
0021 MODULE_LICENSE("GPL v2");
0022 MODULE_ALIAS_CRYPTO("sha224");
0023 MODULE_ALIAS_CRYPTO("sha256");
0024
0025 asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
0026 unsigned int num_blks);
0027 EXPORT_SYMBOL(sha256_block_data_order);
0028
0029 static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src,
0030 int blocks)
0031 {
0032 sha256_block_data_order(sst->state, src, blocks);
0033 }
0034
0035 asmlinkage void sha256_block_neon(u32 *digest, const void *data,
0036 unsigned int num_blks);
0037
0038 static void __sha256_block_neon(struct sha256_state *sst, u8 const *src,
0039 int blocks)
0040 {
0041 sha256_block_neon(sst->state, src, blocks);
0042 }
0043
0044 static int crypto_sha256_arm64_update(struct shash_desc *desc, const u8 *data,
0045 unsigned int len)
0046 {
0047 return sha256_base_do_update(desc, data, len,
0048 __sha256_block_data_order);
0049 }
0050
0051 static int crypto_sha256_arm64_finup(struct shash_desc *desc, const u8 *data,
0052 unsigned int len, u8 *out)
0053 {
0054 if (len)
0055 sha256_base_do_update(desc, data, len,
0056 __sha256_block_data_order);
0057 sha256_base_do_finalize(desc, __sha256_block_data_order);
0058
0059 return sha256_base_finish(desc, out);
0060 }
0061
0062 static int crypto_sha256_arm64_final(struct shash_desc *desc, u8 *out)
0063 {
0064 return crypto_sha256_arm64_finup(desc, NULL, 0, out);
0065 }
0066
0067 static struct shash_alg algs[] = { {
0068 .digestsize = SHA256_DIGEST_SIZE,
0069 .init = sha256_base_init,
0070 .update = crypto_sha256_arm64_update,
0071 .final = crypto_sha256_arm64_final,
0072 .finup = crypto_sha256_arm64_finup,
0073 .descsize = sizeof(struct sha256_state),
0074 .base.cra_name = "sha256",
0075 .base.cra_driver_name = "sha256-arm64",
0076 .base.cra_priority = 125,
0077 .base.cra_blocksize = SHA256_BLOCK_SIZE,
0078 .base.cra_module = THIS_MODULE,
0079 }, {
0080 .digestsize = SHA224_DIGEST_SIZE,
0081 .init = sha224_base_init,
0082 .update = crypto_sha256_arm64_update,
0083 .final = crypto_sha256_arm64_final,
0084 .finup = crypto_sha256_arm64_finup,
0085 .descsize = sizeof(struct sha256_state),
0086 .base.cra_name = "sha224",
0087 .base.cra_driver_name = "sha224-arm64",
0088 .base.cra_priority = 125,
0089 .base.cra_blocksize = SHA224_BLOCK_SIZE,
0090 .base.cra_module = THIS_MODULE,
0091 } };
0092
0093 static int sha256_update_neon(struct shash_desc *desc, const u8 *data,
0094 unsigned int len)
0095 {
0096 struct sha256_state *sctx = shash_desc_ctx(desc);
0097
0098 if (!crypto_simd_usable())
0099 return sha256_base_do_update(desc, data, len,
0100 __sha256_block_data_order);
0101
0102 while (len > 0) {
0103 unsigned int chunk = len;
0104
0105
0106
0107
0108
0109
0110 if (IS_ENABLED(CONFIG_PREEMPTION) &&
0111 chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE)
0112 chunk = SHA256_BLOCK_SIZE -
0113 sctx->count % SHA256_BLOCK_SIZE;
0114
0115 kernel_neon_begin();
0116 sha256_base_do_update(desc, data, chunk, __sha256_block_neon);
0117 kernel_neon_end();
0118 data += chunk;
0119 len -= chunk;
0120 }
0121 return 0;
0122 }
0123
0124 static int sha256_finup_neon(struct shash_desc *desc, const u8 *data,
0125 unsigned int len, u8 *out)
0126 {
0127 if (!crypto_simd_usable()) {
0128 if (len)
0129 sha256_base_do_update(desc, data, len,
0130 __sha256_block_data_order);
0131 sha256_base_do_finalize(desc, __sha256_block_data_order);
0132 } else {
0133 if (len)
0134 sha256_update_neon(desc, data, len);
0135 kernel_neon_begin();
0136 sha256_base_do_finalize(desc, __sha256_block_neon);
0137 kernel_neon_end();
0138 }
0139 return sha256_base_finish(desc, out);
0140 }
0141
0142 static int sha256_final_neon(struct shash_desc *desc, u8 *out)
0143 {
0144 return sha256_finup_neon(desc, NULL, 0, out);
0145 }
0146
0147 static struct shash_alg neon_algs[] = { {
0148 .digestsize = SHA256_DIGEST_SIZE,
0149 .init = sha256_base_init,
0150 .update = sha256_update_neon,
0151 .final = sha256_final_neon,
0152 .finup = sha256_finup_neon,
0153 .descsize = sizeof(struct sha256_state),
0154 .base.cra_name = "sha256",
0155 .base.cra_driver_name = "sha256-arm64-neon",
0156 .base.cra_priority = 150,
0157 .base.cra_blocksize = SHA256_BLOCK_SIZE,
0158 .base.cra_module = THIS_MODULE,
0159 }, {
0160 .digestsize = SHA224_DIGEST_SIZE,
0161 .init = sha224_base_init,
0162 .update = sha256_update_neon,
0163 .final = sha256_final_neon,
0164 .finup = sha256_finup_neon,
0165 .descsize = sizeof(struct sha256_state),
0166 .base.cra_name = "sha224",
0167 .base.cra_driver_name = "sha224-arm64-neon",
0168 .base.cra_priority = 150,
0169 .base.cra_blocksize = SHA224_BLOCK_SIZE,
0170 .base.cra_module = THIS_MODULE,
0171 } };
0172
0173 static int __init sha256_mod_init(void)
0174 {
0175 int ret = crypto_register_shashes(algs, ARRAY_SIZE(algs));
0176 if (ret)
0177 return ret;
0178
0179 if (cpu_have_named_feature(ASIMD)) {
0180 ret = crypto_register_shashes(neon_algs, ARRAY_SIZE(neon_algs));
0181 if (ret)
0182 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
0183 }
0184 return ret;
0185 }
0186
0187 static void __exit sha256_mod_fini(void)
0188 {
0189 if (cpu_have_named_feature(ASIMD))
0190 crypto_unregister_shashes(neon_algs, ARRAY_SIZE(neon_algs));
0191 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
0192 }
0193
0194 module_init(sha256_mod_init);
0195 module_exit(sha256_mod_fini);