Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-only
0002 /*
0003  * Accelerated GHASH implementation with ARMv8 vmull.p64 instructions.
0004  *
0005  * Copyright (C) 2015 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
0006  */
0007 
0008 #include <asm/hwcap.h>
0009 #include <asm/neon.h>
0010 #include <asm/simd.h>
0011 #include <asm/unaligned.h>
0012 #include <crypto/b128ops.h>
0013 #include <crypto/cryptd.h>
0014 #include <crypto/internal/hash.h>
0015 #include <crypto/internal/simd.h>
0016 #include <crypto/gf128mul.h>
0017 #include <linux/cpufeature.h>
0018 #include <linux/crypto.h>
0019 #include <linux/jump_label.h>
0020 #include <linux/module.h>
0021 
0022 MODULE_DESCRIPTION("GHASH hash function using ARMv8 Crypto Extensions");
0023 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
0024 MODULE_LICENSE("GPL v2");
0025 MODULE_ALIAS_CRYPTO("ghash");
0026 
0027 #define GHASH_BLOCK_SIZE    16
0028 #define GHASH_DIGEST_SIZE   16
0029 
0030 struct ghash_key {
0031     be128   k;
0032     u64 h[][2];
0033 };
0034 
0035 struct ghash_desc_ctx {
0036     u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
0037     u8 buf[GHASH_BLOCK_SIZE];
0038     u32 count;
0039 };
0040 
0041 struct ghash_async_ctx {
0042     struct cryptd_ahash *cryptd_tfm;
0043 };
0044 
0045 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
0046                        u64 const h[][2], const char *head);
0047 
0048 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
0049                       u64 const h[][2], const char *head);
0050 
0051 static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_p64);
0052 
0053 static int ghash_init(struct shash_desc *desc)
0054 {
0055     struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
0056 
0057     *ctx = (struct ghash_desc_ctx){};
0058     return 0;
0059 }
0060 
0061 static void ghash_do_update(int blocks, u64 dg[], const char *src,
0062                 struct ghash_key *key, const char *head)
0063 {
0064     if (likely(crypto_simd_usable())) {
0065         kernel_neon_begin();
0066         if (static_branch_likely(&use_p64))
0067             pmull_ghash_update_p64(blocks, dg, src, key->h, head);
0068         else
0069             pmull_ghash_update_p8(blocks, dg, src, key->h, head);
0070         kernel_neon_end();
0071     } else {
0072         be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
0073 
0074         do {
0075             const u8 *in = src;
0076 
0077             if (head) {
0078                 in = head;
0079                 blocks++;
0080                 head = NULL;
0081             } else {
0082                 src += GHASH_BLOCK_SIZE;
0083             }
0084 
0085             crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
0086             gf128mul_lle(&dst, &key->k);
0087         } while (--blocks);
0088 
0089         dg[0] = be64_to_cpu(dst.b);
0090         dg[1] = be64_to_cpu(dst.a);
0091     }
0092 }
0093 
0094 static int ghash_update(struct shash_desc *desc, const u8 *src,
0095             unsigned int len)
0096 {
0097     struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
0098     unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
0099 
0100     ctx->count += len;
0101 
0102     if ((partial + len) >= GHASH_BLOCK_SIZE) {
0103         struct ghash_key *key = crypto_shash_ctx(desc->tfm);
0104         int blocks;
0105 
0106         if (partial) {
0107             int p = GHASH_BLOCK_SIZE - partial;
0108 
0109             memcpy(ctx->buf + partial, src, p);
0110             src += p;
0111             len -= p;
0112         }
0113 
0114         blocks = len / GHASH_BLOCK_SIZE;
0115         len %= GHASH_BLOCK_SIZE;
0116 
0117         ghash_do_update(blocks, ctx->digest, src, key,
0118                 partial ? ctx->buf : NULL);
0119         src += blocks * GHASH_BLOCK_SIZE;
0120         partial = 0;
0121     }
0122     if (len)
0123         memcpy(ctx->buf + partial, src, len);
0124     return 0;
0125 }
0126 
0127 static int ghash_final(struct shash_desc *desc, u8 *dst)
0128 {
0129     struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
0130     unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
0131 
0132     if (partial) {
0133         struct ghash_key *key = crypto_shash_ctx(desc->tfm);
0134 
0135         memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
0136         ghash_do_update(1, ctx->digest, ctx->buf, key, NULL);
0137     }
0138     put_unaligned_be64(ctx->digest[1], dst);
0139     put_unaligned_be64(ctx->digest[0], dst + 8);
0140 
0141     *ctx = (struct ghash_desc_ctx){};
0142     return 0;
0143 }
0144 
0145 static void ghash_reflect(u64 h[], const be128 *k)
0146 {
0147     u64 carry = be64_to_cpu(k->a) >> 63;
0148 
0149     h[0] = (be64_to_cpu(k->b) << 1) | carry;
0150     h[1] = (be64_to_cpu(k->a) << 1) | (be64_to_cpu(k->b) >> 63);
0151 
0152     if (carry)
0153         h[1] ^= 0xc200000000000000UL;
0154 }
0155 
0156 static int ghash_setkey(struct crypto_shash *tfm,
0157             const u8 *inkey, unsigned int keylen)
0158 {
0159     struct ghash_key *key = crypto_shash_ctx(tfm);
0160 
0161     if (keylen != GHASH_BLOCK_SIZE)
0162         return -EINVAL;
0163 
0164     /* needed for the fallback */
0165     memcpy(&key->k, inkey, GHASH_BLOCK_SIZE);
0166     ghash_reflect(key->h[0], &key->k);
0167 
0168     if (static_branch_likely(&use_p64)) {
0169         be128 h = key->k;
0170 
0171         gf128mul_lle(&h, &key->k);
0172         ghash_reflect(key->h[1], &h);
0173 
0174         gf128mul_lle(&h, &key->k);
0175         ghash_reflect(key->h[2], &h);
0176 
0177         gf128mul_lle(&h, &key->k);
0178         ghash_reflect(key->h[3], &h);
0179     }
0180     return 0;
0181 }
0182 
0183 static struct shash_alg ghash_alg = {
0184     .digestsize     = GHASH_DIGEST_SIZE,
0185     .init           = ghash_init,
0186     .update         = ghash_update,
0187     .final          = ghash_final,
0188     .setkey         = ghash_setkey,
0189     .descsize       = sizeof(struct ghash_desc_ctx),
0190 
0191     .base.cra_name      = "ghash",
0192     .base.cra_driver_name   = "ghash-ce-sync",
0193     .base.cra_priority  = 300 - 1,
0194     .base.cra_blocksize = GHASH_BLOCK_SIZE,
0195     .base.cra_ctxsize   = sizeof(struct ghash_key) + sizeof(u64[2]),
0196     .base.cra_module    = THIS_MODULE,
0197 };
0198 
0199 static int ghash_async_init(struct ahash_request *req)
0200 {
0201     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0202     struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
0203     struct ahash_request *cryptd_req = ahash_request_ctx(req);
0204     struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
0205     struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0206     struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
0207 
0208     desc->tfm = child;
0209     return crypto_shash_init(desc);
0210 }
0211 
0212 static int ghash_async_update(struct ahash_request *req)
0213 {
0214     struct ahash_request *cryptd_req = ahash_request_ctx(req);
0215     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0216     struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
0217     struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
0218 
0219     if (!crypto_simd_usable() ||
0220         (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
0221         memcpy(cryptd_req, req, sizeof(*req));
0222         ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
0223         return crypto_ahash_update(cryptd_req);
0224     } else {
0225         struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0226         return shash_ahash_update(req, desc);
0227     }
0228 }
0229 
0230 static int ghash_async_final(struct ahash_request *req)
0231 {
0232     struct ahash_request *cryptd_req = ahash_request_ctx(req);
0233     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0234     struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
0235     struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
0236 
0237     if (!crypto_simd_usable() ||
0238         (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
0239         memcpy(cryptd_req, req, sizeof(*req));
0240         ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
0241         return crypto_ahash_final(cryptd_req);
0242     } else {
0243         struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0244         return crypto_shash_final(desc, req->result);
0245     }
0246 }
0247 
0248 static int ghash_async_digest(struct ahash_request *req)
0249 {
0250     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0251     struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
0252     struct ahash_request *cryptd_req = ahash_request_ctx(req);
0253     struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
0254 
0255     if (!crypto_simd_usable() ||
0256         (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
0257         memcpy(cryptd_req, req, sizeof(*req));
0258         ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
0259         return crypto_ahash_digest(cryptd_req);
0260     } else {
0261         struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0262         struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
0263 
0264         desc->tfm = child;
0265         return shash_ahash_digest(req, desc);
0266     }
0267 }
0268 
0269 static int ghash_async_import(struct ahash_request *req, const void *in)
0270 {
0271     struct ahash_request *cryptd_req = ahash_request_ctx(req);
0272     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0273     struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
0274     struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0275 
0276     desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm);
0277 
0278     return crypto_shash_import(desc, in);
0279 }
0280 
0281 static int ghash_async_export(struct ahash_request *req, void *out)
0282 {
0283     struct ahash_request *cryptd_req = ahash_request_ctx(req);
0284     struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0285 
0286     return crypto_shash_export(desc, out);
0287 }
0288 
0289 static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
0290                   unsigned int keylen)
0291 {
0292     struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
0293     struct crypto_ahash *child = &ctx->cryptd_tfm->base;
0294 
0295     crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
0296     crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
0297                    & CRYPTO_TFM_REQ_MASK);
0298     return crypto_ahash_setkey(child, key, keylen);
0299 }
0300 
0301 static int ghash_async_init_tfm(struct crypto_tfm *tfm)
0302 {
0303     struct cryptd_ahash *cryptd_tfm;
0304     struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
0305 
0306     cryptd_tfm = cryptd_alloc_ahash("ghash-ce-sync", 0, 0);
0307     if (IS_ERR(cryptd_tfm))
0308         return PTR_ERR(cryptd_tfm);
0309     ctx->cryptd_tfm = cryptd_tfm;
0310     crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
0311                  sizeof(struct ahash_request) +
0312                  crypto_ahash_reqsize(&cryptd_tfm->base));
0313 
0314     return 0;
0315 }
0316 
0317 static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
0318 {
0319     struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
0320 
0321     cryptd_free_ahash(ctx->cryptd_tfm);
0322 }
0323 
0324 static struct ahash_alg ghash_async_alg = {
0325     .init           = ghash_async_init,
0326     .update         = ghash_async_update,
0327     .final          = ghash_async_final,
0328     .setkey         = ghash_async_setkey,
0329     .digest         = ghash_async_digest,
0330     .import         = ghash_async_import,
0331     .export         = ghash_async_export,
0332     .halg.digestsize    = GHASH_DIGEST_SIZE,
0333     .halg.statesize     = sizeof(struct ghash_desc_ctx),
0334     .halg.base      = {
0335         .cra_name   = "ghash",
0336         .cra_driver_name = "ghash-ce",
0337         .cra_priority   = 300,
0338         .cra_flags  = CRYPTO_ALG_ASYNC,
0339         .cra_blocksize  = GHASH_BLOCK_SIZE,
0340         .cra_ctxsize    = sizeof(struct ghash_async_ctx),
0341         .cra_module = THIS_MODULE,
0342         .cra_init   = ghash_async_init_tfm,
0343         .cra_exit   = ghash_async_exit_tfm,
0344     },
0345 };
0346 
0347 static int __init ghash_ce_mod_init(void)
0348 {
0349     int err;
0350 
0351     if (!(elf_hwcap & HWCAP_NEON))
0352         return -ENODEV;
0353 
0354     if (elf_hwcap2 & HWCAP2_PMULL) {
0355         ghash_alg.base.cra_ctxsize += 3 * sizeof(u64[2]);
0356         static_branch_enable(&use_p64);
0357     }
0358 
0359     err = crypto_register_shash(&ghash_alg);
0360     if (err)
0361         return err;
0362     err = crypto_register_ahash(&ghash_async_alg);
0363     if (err)
0364         goto err_shash;
0365 
0366     return 0;
0367 
0368 err_shash:
0369     crypto_unregister_shash(&ghash_alg);
0370     return err;
0371 }
0372 
0373 static void __exit ghash_ce_mod_exit(void)
0374 {
0375     crypto_unregister_ahash(&ghash_async_alg);
0376     crypto_unregister_shash(&ghash_alg);
0377 }
0378 
0379 module_init(ghash_ce_mod_init);
0380 module_exit(ghash_ce_mod_exit);