Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-only
0002 /*
0003  * Accelerated CRC32(C) using ARM CRC, NEON and Crypto Extensions instructions
0004  *
0005  * Copyright (C) 2016 Linaro Ltd <ard.biesheuvel@linaro.org>
0006  */
0007 
0008 #include <linux/cpufeature.h>
0009 #include <linux/crc32.h>
0010 #include <linux/init.h>
0011 #include <linux/kernel.h>
0012 #include <linux/module.h>
0013 #include <linux/string.h>
0014 
0015 #include <crypto/internal/hash.h>
0016 #include <crypto/internal/simd.h>
0017 
0018 #include <asm/hwcap.h>
0019 #include <asm/neon.h>
0020 #include <asm/simd.h>
0021 #include <asm/unaligned.h>
0022 
0023 #define PMULL_MIN_LEN       64L /* minimum size of buffer
0024                      * for crc32_pmull_le_16 */
0025 #define SCALE_F         16L /* size of NEON register */
0026 
0027 asmlinkage u32 crc32_pmull_le(const u8 buf[], u32 len, u32 init_crc);
0028 asmlinkage u32 crc32_armv8_le(u32 init_crc, const u8 buf[], u32 len);
0029 
0030 asmlinkage u32 crc32c_pmull_le(const u8 buf[], u32 len, u32 init_crc);
0031 asmlinkage u32 crc32c_armv8_le(u32 init_crc, const u8 buf[], u32 len);
0032 
0033 static u32 (*fallback_crc32)(u32 init_crc, const u8 buf[], u32 len);
0034 static u32 (*fallback_crc32c)(u32 init_crc, const u8 buf[], u32 len);
0035 
0036 static int crc32_cra_init(struct crypto_tfm *tfm)
0037 {
0038     u32 *key = crypto_tfm_ctx(tfm);
0039 
0040     *key = 0;
0041     return 0;
0042 }
0043 
0044 static int crc32c_cra_init(struct crypto_tfm *tfm)
0045 {
0046     u32 *key = crypto_tfm_ctx(tfm);
0047 
0048     *key = ~0;
0049     return 0;
0050 }
0051 
0052 static int crc32_setkey(struct crypto_shash *hash, const u8 *key,
0053             unsigned int keylen)
0054 {
0055     u32 *mctx = crypto_shash_ctx(hash);
0056 
0057     if (keylen != sizeof(u32))
0058         return -EINVAL;
0059     *mctx = le32_to_cpup((__le32 *)key);
0060     return 0;
0061 }
0062 
0063 static int crc32_init(struct shash_desc *desc)
0064 {
0065     u32 *mctx = crypto_shash_ctx(desc->tfm);
0066     u32 *crc = shash_desc_ctx(desc);
0067 
0068     *crc = *mctx;
0069     return 0;
0070 }
0071 
0072 static int crc32_update(struct shash_desc *desc, const u8 *data,
0073             unsigned int length)
0074 {
0075     u32 *crc = shash_desc_ctx(desc);
0076 
0077     *crc = crc32_armv8_le(*crc, data, length);
0078     return 0;
0079 }
0080 
0081 static int crc32c_update(struct shash_desc *desc, const u8 *data,
0082              unsigned int length)
0083 {
0084     u32 *crc = shash_desc_ctx(desc);
0085 
0086     *crc = crc32c_armv8_le(*crc, data, length);
0087     return 0;
0088 }
0089 
0090 static int crc32_final(struct shash_desc *desc, u8 *out)
0091 {
0092     u32 *crc = shash_desc_ctx(desc);
0093 
0094     put_unaligned_le32(*crc, out);
0095     return 0;
0096 }
0097 
0098 static int crc32c_final(struct shash_desc *desc, u8 *out)
0099 {
0100     u32 *crc = shash_desc_ctx(desc);
0101 
0102     put_unaligned_le32(~*crc, out);
0103     return 0;
0104 }
0105 
0106 static int crc32_pmull_update(struct shash_desc *desc, const u8 *data,
0107                   unsigned int length)
0108 {
0109     u32 *crc = shash_desc_ctx(desc);
0110     unsigned int l;
0111 
0112     if (crypto_simd_usable()) {
0113         if ((u32)data % SCALE_F) {
0114             l = min_t(u32, length, SCALE_F - ((u32)data % SCALE_F));
0115 
0116             *crc = fallback_crc32(*crc, data, l);
0117 
0118             data += l;
0119             length -= l;
0120         }
0121 
0122         if (length >= PMULL_MIN_LEN) {
0123             l = round_down(length, SCALE_F);
0124 
0125             kernel_neon_begin();
0126             *crc = crc32_pmull_le(data, l, *crc);
0127             kernel_neon_end();
0128 
0129             data += l;
0130             length -= l;
0131         }
0132     }
0133 
0134     if (length > 0)
0135         *crc = fallback_crc32(*crc, data, length);
0136 
0137     return 0;
0138 }
0139 
0140 static int crc32c_pmull_update(struct shash_desc *desc, const u8 *data,
0141                    unsigned int length)
0142 {
0143     u32 *crc = shash_desc_ctx(desc);
0144     unsigned int l;
0145 
0146     if (crypto_simd_usable()) {
0147         if ((u32)data % SCALE_F) {
0148             l = min_t(u32, length, SCALE_F - ((u32)data % SCALE_F));
0149 
0150             *crc = fallback_crc32c(*crc, data, l);
0151 
0152             data += l;
0153             length -= l;
0154         }
0155 
0156         if (length >= PMULL_MIN_LEN) {
0157             l = round_down(length, SCALE_F);
0158 
0159             kernel_neon_begin();
0160             *crc = crc32c_pmull_le(data, l, *crc);
0161             kernel_neon_end();
0162 
0163             data += l;
0164             length -= l;
0165         }
0166     }
0167 
0168     if (length > 0)
0169         *crc = fallback_crc32c(*crc, data, length);
0170 
0171     return 0;
0172 }
0173 
0174 static struct shash_alg crc32_pmull_algs[] = { {
0175     .setkey         = crc32_setkey,
0176     .init           = crc32_init,
0177     .update         = crc32_update,
0178     .final          = crc32_final,
0179     .descsize       = sizeof(u32),
0180     .digestsize     = sizeof(u32),
0181 
0182     .base.cra_ctxsize   = sizeof(u32),
0183     .base.cra_init      = crc32_cra_init,
0184     .base.cra_name      = "crc32",
0185     .base.cra_driver_name   = "crc32-arm-ce",
0186     .base.cra_priority  = 200,
0187     .base.cra_flags     = CRYPTO_ALG_OPTIONAL_KEY,
0188     .base.cra_blocksize = 1,
0189     .base.cra_module    = THIS_MODULE,
0190 }, {
0191     .setkey         = crc32_setkey,
0192     .init           = crc32_init,
0193     .update         = crc32c_update,
0194     .final          = crc32c_final,
0195     .descsize       = sizeof(u32),
0196     .digestsize     = sizeof(u32),
0197 
0198     .base.cra_ctxsize   = sizeof(u32),
0199     .base.cra_init      = crc32c_cra_init,
0200     .base.cra_name      = "crc32c",
0201     .base.cra_driver_name   = "crc32c-arm-ce",
0202     .base.cra_priority  = 200,
0203     .base.cra_flags     = CRYPTO_ALG_OPTIONAL_KEY,
0204     .base.cra_blocksize = 1,
0205     .base.cra_module    = THIS_MODULE,
0206 } };
0207 
0208 static int __init crc32_pmull_mod_init(void)
0209 {
0210     if (elf_hwcap2 & HWCAP2_PMULL) {
0211         crc32_pmull_algs[0].update = crc32_pmull_update;
0212         crc32_pmull_algs[1].update = crc32c_pmull_update;
0213 
0214         if (elf_hwcap2 & HWCAP2_CRC32) {
0215             fallback_crc32 = crc32_armv8_le;
0216             fallback_crc32c = crc32c_armv8_le;
0217         } else {
0218             fallback_crc32 = crc32_le;
0219             fallback_crc32c = __crc32c_le;
0220         }
0221     } else if (!(elf_hwcap2 & HWCAP2_CRC32)) {
0222         return -ENODEV;
0223     }
0224 
0225     return crypto_register_shashes(crc32_pmull_algs,
0226                        ARRAY_SIZE(crc32_pmull_algs));
0227 }
0228 
0229 static void __exit crc32_pmull_mod_exit(void)
0230 {
0231     crypto_unregister_shashes(crc32_pmull_algs,
0232                   ARRAY_SIZE(crc32_pmull_algs));
0233 }
0234 
0235 static const struct cpu_feature __maybe_unused crc32_cpu_feature[] = {
0236     { cpu_feature(CRC32) }, { cpu_feature(PMULL) }, { }
0237 };
0238 MODULE_DEVICE_TABLE(cpu, crc32_cpu_feature);
0239 
0240 module_init(crc32_pmull_mod_init);
0241 module_exit(crc32_pmull_mod_exit);
0242 
0243 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
0244 MODULE_LICENSE("GPL v2");
0245 MODULE_ALIAS_CRYPTO("crc32");
0246 MODULE_ALIAS_CRYPTO("crc32c");