Back to home page

OSCL-LXR

 
 

    


0001 /*
0002  * Cryptographic API.
0003  *
0004  * Glue code for the SHA512 Secure Hash Algorithm assembler
0005  * implementation using supplemental SSE3 / AVX / AVX2 instructions.
0006  *
0007  * This file is based on sha512_generic.c
0008  *
0009  * Copyright (C) 2013 Intel Corporation
0010  * Author: Tim Chen <tim.c.chen@linux.intel.com>
0011  *
0012  * This program is free software; you can redistribute it and/or modify it
0013  * under the terms of the GNU General Public License as published by the Free
0014  * Software Foundation; either version 2 of the License, or (at your option)
0015  * any later version.
0016  *
0017  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
0018  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
0019  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
0020  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
0021  * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
0022  * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
0023  * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
0024  * SOFTWARE.
0025  *
0026  */
0027 
0028 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
0029 
0030 #include <crypto/internal/hash.h>
0031 #include <crypto/internal/simd.h>
0032 #include <linux/init.h>
0033 #include <linux/module.h>
0034 #include <linux/mm.h>
0035 #include <linux/string.h>
0036 #include <linux/types.h>
0037 #include <crypto/sha2.h>
0038 #include <crypto/sha512_base.h>
0039 #include <asm/simd.h>
0040 
0041 asmlinkage void sha512_transform_ssse3(struct sha512_state *state,
0042                        const u8 *data, int blocks);
0043 
0044 static int sha512_update(struct shash_desc *desc, const u8 *data,
0045                unsigned int len, sha512_block_fn *sha512_xform)
0046 {
0047     struct sha512_state *sctx = shash_desc_ctx(desc);
0048 
0049     if (!crypto_simd_usable() ||
0050         (sctx->count[0] % SHA512_BLOCK_SIZE) + len < SHA512_BLOCK_SIZE)
0051         return crypto_sha512_update(desc, data, len);
0052 
0053     /*
0054      * Make sure struct sha512_state begins directly with the SHA512
0055      * 512-bit internal state, as this is what the asm functions expect.
0056      */
0057     BUILD_BUG_ON(offsetof(struct sha512_state, state) != 0);
0058 
0059     kernel_fpu_begin();
0060     sha512_base_do_update(desc, data, len, sha512_xform);
0061     kernel_fpu_end();
0062 
0063     return 0;
0064 }
0065 
0066 static int sha512_finup(struct shash_desc *desc, const u8 *data,
0067           unsigned int len, u8 *out, sha512_block_fn *sha512_xform)
0068 {
0069     if (!crypto_simd_usable())
0070         return crypto_sha512_finup(desc, data, len, out);
0071 
0072     kernel_fpu_begin();
0073     if (len)
0074         sha512_base_do_update(desc, data, len, sha512_xform);
0075     sha512_base_do_finalize(desc, sha512_xform);
0076     kernel_fpu_end();
0077 
0078     return sha512_base_finish(desc, out);
0079 }
0080 
0081 static int sha512_ssse3_update(struct shash_desc *desc, const u8 *data,
0082                unsigned int len)
0083 {
0084     return sha512_update(desc, data, len, sha512_transform_ssse3);
0085 }
0086 
0087 static int sha512_ssse3_finup(struct shash_desc *desc, const u8 *data,
0088           unsigned int len, u8 *out)
0089 {
0090     return sha512_finup(desc, data, len, out, sha512_transform_ssse3);
0091 }
0092 
0093 /* Add padding and return the message digest. */
0094 static int sha512_ssse3_final(struct shash_desc *desc, u8 *out)
0095 {
0096     return sha512_ssse3_finup(desc, NULL, 0, out);
0097 }
0098 
0099 static struct shash_alg sha512_ssse3_algs[] = { {
0100     .digestsize =   SHA512_DIGEST_SIZE,
0101     .init       =   sha512_base_init,
0102     .update     =   sha512_ssse3_update,
0103     .final      =   sha512_ssse3_final,
0104     .finup      =   sha512_ssse3_finup,
0105     .descsize   =   sizeof(struct sha512_state),
0106     .base       =   {
0107         .cra_name   =   "sha512",
0108         .cra_driver_name =  "sha512-ssse3",
0109         .cra_priority   =   150,
0110         .cra_blocksize  =   SHA512_BLOCK_SIZE,
0111         .cra_module =   THIS_MODULE,
0112     }
0113 },  {
0114     .digestsize =   SHA384_DIGEST_SIZE,
0115     .init       =   sha384_base_init,
0116     .update     =   sha512_ssse3_update,
0117     .final      =   sha512_ssse3_final,
0118     .finup      =   sha512_ssse3_finup,
0119     .descsize   =   sizeof(struct sha512_state),
0120     .base       =   {
0121         .cra_name   =   "sha384",
0122         .cra_driver_name =  "sha384-ssse3",
0123         .cra_priority   =   150,
0124         .cra_blocksize  =   SHA384_BLOCK_SIZE,
0125         .cra_module =   THIS_MODULE,
0126     }
0127 } };
0128 
0129 static int register_sha512_ssse3(void)
0130 {
0131     if (boot_cpu_has(X86_FEATURE_SSSE3))
0132         return crypto_register_shashes(sha512_ssse3_algs,
0133             ARRAY_SIZE(sha512_ssse3_algs));
0134     return 0;
0135 }
0136 
0137 static void unregister_sha512_ssse3(void)
0138 {
0139     if (boot_cpu_has(X86_FEATURE_SSSE3))
0140         crypto_unregister_shashes(sha512_ssse3_algs,
0141             ARRAY_SIZE(sha512_ssse3_algs));
0142 }
0143 
0144 asmlinkage void sha512_transform_avx(struct sha512_state *state,
0145                      const u8 *data, int blocks);
0146 static bool avx_usable(void)
0147 {
0148     if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
0149         if (boot_cpu_has(X86_FEATURE_AVX))
0150             pr_info("AVX detected but unusable.\n");
0151         return false;
0152     }
0153 
0154     return true;
0155 }
0156 
0157 static int sha512_avx_update(struct shash_desc *desc, const u8 *data,
0158                unsigned int len)
0159 {
0160     return sha512_update(desc, data, len, sha512_transform_avx);
0161 }
0162 
0163 static int sha512_avx_finup(struct shash_desc *desc, const u8 *data,
0164           unsigned int len, u8 *out)
0165 {
0166     return sha512_finup(desc, data, len, out, sha512_transform_avx);
0167 }
0168 
0169 /* Add padding and return the message digest. */
0170 static int sha512_avx_final(struct shash_desc *desc, u8 *out)
0171 {
0172     return sha512_avx_finup(desc, NULL, 0, out);
0173 }
0174 
0175 static struct shash_alg sha512_avx_algs[] = { {
0176     .digestsize =   SHA512_DIGEST_SIZE,
0177     .init       =   sha512_base_init,
0178     .update     =   sha512_avx_update,
0179     .final      =   sha512_avx_final,
0180     .finup      =   sha512_avx_finup,
0181     .descsize   =   sizeof(struct sha512_state),
0182     .base       =   {
0183         .cra_name   =   "sha512",
0184         .cra_driver_name =  "sha512-avx",
0185         .cra_priority   =   160,
0186         .cra_blocksize  =   SHA512_BLOCK_SIZE,
0187         .cra_module =   THIS_MODULE,
0188     }
0189 },  {
0190     .digestsize =   SHA384_DIGEST_SIZE,
0191     .init       =   sha384_base_init,
0192     .update     =   sha512_avx_update,
0193     .final      =   sha512_avx_final,
0194     .finup      =   sha512_avx_finup,
0195     .descsize   =   sizeof(struct sha512_state),
0196     .base       =   {
0197         .cra_name   =   "sha384",
0198         .cra_driver_name =  "sha384-avx",
0199         .cra_priority   =   160,
0200         .cra_blocksize  =   SHA384_BLOCK_SIZE,
0201         .cra_module =   THIS_MODULE,
0202     }
0203 } };
0204 
0205 static int register_sha512_avx(void)
0206 {
0207     if (avx_usable())
0208         return crypto_register_shashes(sha512_avx_algs,
0209             ARRAY_SIZE(sha512_avx_algs));
0210     return 0;
0211 }
0212 
0213 static void unregister_sha512_avx(void)
0214 {
0215     if (avx_usable())
0216         crypto_unregister_shashes(sha512_avx_algs,
0217             ARRAY_SIZE(sha512_avx_algs));
0218 }
0219 
0220 asmlinkage void sha512_transform_rorx(struct sha512_state *state,
0221                       const u8 *data, int blocks);
0222 
0223 static int sha512_avx2_update(struct shash_desc *desc, const u8 *data,
0224                unsigned int len)
0225 {
0226     return sha512_update(desc, data, len, sha512_transform_rorx);
0227 }
0228 
0229 static int sha512_avx2_finup(struct shash_desc *desc, const u8 *data,
0230           unsigned int len, u8 *out)
0231 {
0232     return sha512_finup(desc, data, len, out, sha512_transform_rorx);
0233 }
0234 
0235 /* Add padding and return the message digest. */
0236 static int sha512_avx2_final(struct shash_desc *desc, u8 *out)
0237 {
0238     return sha512_avx2_finup(desc, NULL, 0, out);
0239 }
0240 
0241 static struct shash_alg sha512_avx2_algs[] = { {
0242     .digestsize =   SHA512_DIGEST_SIZE,
0243     .init       =   sha512_base_init,
0244     .update     =   sha512_avx2_update,
0245     .final      =   sha512_avx2_final,
0246     .finup      =   sha512_avx2_finup,
0247     .descsize   =   sizeof(struct sha512_state),
0248     .base       =   {
0249         .cra_name   =   "sha512",
0250         .cra_driver_name =  "sha512-avx2",
0251         .cra_priority   =   170,
0252         .cra_blocksize  =   SHA512_BLOCK_SIZE,
0253         .cra_module =   THIS_MODULE,
0254     }
0255 },  {
0256     .digestsize =   SHA384_DIGEST_SIZE,
0257     .init       =   sha384_base_init,
0258     .update     =   sha512_avx2_update,
0259     .final      =   sha512_avx2_final,
0260     .finup      =   sha512_avx2_finup,
0261     .descsize   =   sizeof(struct sha512_state),
0262     .base       =   {
0263         .cra_name   =   "sha384",
0264         .cra_driver_name =  "sha384-avx2",
0265         .cra_priority   =   170,
0266         .cra_blocksize  =   SHA384_BLOCK_SIZE,
0267         .cra_module =   THIS_MODULE,
0268     }
0269 } };
0270 
0271 static bool avx2_usable(void)
0272 {
0273     if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) &&
0274             boot_cpu_has(X86_FEATURE_BMI2))
0275         return true;
0276 
0277     return false;
0278 }
0279 
0280 static int register_sha512_avx2(void)
0281 {
0282     if (avx2_usable())
0283         return crypto_register_shashes(sha512_avx2_algs,
0284             ARRAY_SIZE(sha512_avx2_algs));
0285     return 0;
0286 }
0287 
0288 static void unregister_sha512_avx2(void)
0289 {
0290     if (avx2_usable())
0291         crypto_unregister_shashes(sha512_avx2_algs,
0292             ARRAY_SIZE(sha512_avx2_algs));
0293 }
0294 
0295 static int __init sha512_ssse3_mod_init(void)
0296 {
0297 
0298     if (register_sha512_ssse3())
0299         goto fail;
0300 
0301     if (register_sha512_avx()) {
0302         unregister_sha512_ssse3();
0303         goto fail;
0304     }
0305 
0306     if (register_sha512_avx2()) {
0307         unregister_sha512_avx();
0308         unregister_sha512_ssse3();
0309         goto fail;
0310     }
0311 
0312     return 0;
0313 fail:
0314     return -ENODEV;
0315 }
0316 
0317 static void __exit sha512_ssse3_mod_fini(void)
0318 {
0319     unregister_sha512_avx2();
0320     unregister_sha512_avx();
0321     unregister_sha512_ssse3();
0322 }
0323 
0324 module_init(sha512_ssse3_mod_init);
0325 module_exit(sha512_ssse3_mod_fini);
0326 
0327 MODULE_LICENSE("GPL");
0328 MODULE_DESCRIPTION("SHA512 Secure Hash Algorithm, Supplemental SSE3 accelerated");
0329 
0330 MODULE_ALIAS_CRYPTO("sha512");
0331 MODULE_ALIAS_CRYPTO("sha512-ssse3");
0332 MODULE_ALIAS_CRYPTO("sha512-avx");
0333 MODULE_ALIAS_CRYPTO("sha512-avx2");
0334 MODULE_ALIAS_CRYPTO("sha384");
0335 MODULE_ALIAS_CRYPTO("sha384-ssse3");
0336 MODULE_ALIAS_CRYPTO("sha384-avx");
0337 MODULE_ALIAS_CRYPTO("sha384-avx2");