0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #include <linux/err.h>
0011 #include <linux/module.h>
0012 #include <linux/init.h>
0013 #include <linux/kernel.h>
0014 #include <linux/crypto.h>
0015 #include <crypto/algapi.h>
0016 #include <crypto/cryptd.h>
0017 #include <crypto/gf128mul.h>
0018 #include <crypto/internal/hash.h>
0019 #include <crypto/internal/simd.h>
0020 #include <asm/cpu_device_id.h>
0021 #include <asm/simd.h>
0022
0023 #define GHASH_BLOCK_SIZE 16
0024 #define GHASH_DIGEST_SIZE 16
0025
0026 void clmul_ghash_mul(char *dst, const u128 *shash);
0027
0028 void clmul_ghash_update(char *dst, const char *src, unsigned int srclen,
0029 const u128 *shash);
0030
0031 struct ghash_async_ctx {
0032 struct cryptd_ahash *cryptd_tfm;
0033 };
0034
0035 struct ghash_ctx {
0036 u128 shash;
0037 };
0038
0039 struct ghash_desc_ctx {
0040 u8 buffer[GHASH_BLOCK_SIZE];
0041 u32 bytes;
0042 };
0043
0044 static int ghash_init(struct shash_desc *desc)
0045 {
0046 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
0047
0048 memset(dctx, 0, sizeof(*dctx));
0049
0050 return 0;
0051 }
0052
0053 static int ghash_setkey(struct crypto_shash *tfm,
0054 const u8 *key, unsigned int keylen)
0055 {
0056 struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
0057 be128 *x = (be128 *)key;
0058 u64 a, b;
0059
0060 if (keylen != GHASH_BLOCK_SIZE)
0061 return -EINVAL;
0062
0063
0064 a = be64_to_cpu(x->a);
0065 b = be64_to_cpu(x->b);
0066
0067 ctx->shash.a = (b << 1) | (a >> 63);
0068 ctx->shash.b = (a << 1) | (b >> 63);
0069
0070 if (a >> 63)
0071 ctx->shash.b ^= ((u64)0xc2) << 56;
0072
0073 return 0;
0074 }
0075
0076 static int ghash_update(struct shash_desc *desc,
0077 const u8 *src, unsigned int srclen)
0078 {
0079 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
0080 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
0081 u8 *dst = dctx->buffer;
0082
0083 kernel_fpu_begin();
0084 if (dctx->bytes) {
0085 int n = min(srclen, dctx->bytes);
0086 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
0087
0088 dctx->bytes -= n;
0089 srclen -= n;
0090
0091 while (n--)
0092 *pos++ ^= *src++;
0093
0094 if (!dctx->bytes)
0095 clmul_ghash_mul(dst, &ctx->shash);
0096 }
0097
0098 clmul_ghash_update(dst, src, srclen, &ctx->shash);
0099 kernel_fpu_end();
0100
0101 if (srclen & 0xf) {
0102 src += srclen - (srclen & 0xf);
0103 srclen &= 0xf;
0104 dctx->bytes = GHASH_BLOCK_SIZE - srclen;
0105 while (srclen--)
0106 *dst++ ^= *src++;
0107 }
0108
0109 return 0;
0110 }
0111
0112 static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
0113 {
0114 u8 *dst = dctx->buffer;
0115
0116 if (dctx->bytes) {
0117 u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
0118
0119 while (dctx->bytes--)
0120 *tmp++ ^= 0;
0121
0122 kernel_fpu_begin();
0123 clmul_ghash_mul(dst, &ctx->shash);
0124 kernel_fpu_end();
0125 }
0126
0127 dctx->bytes = 0;
0128 }
0129
0130 static int ghash_final(struct shash_desc *desc, u8 *dst)
0131 {
0132 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
0133 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
0134 u8 *buf = dctx->buffer;
0135
0136 ghash_flush(ctx, dctx);
0137 memcpy(dst, buf, GHASH_BLOCK_SIZE);
0138
0139 return 0;
0140 }
0141
0142 static struct shash_alg ghash_alg = {
0143 .digestsize = GHASH_DIGEST_SIZE,
0144 .init = ghash_init,
0145 .update = ghash_update,
0146 .final = ghash_final,
0147 .setkey = ghash_setkey,
0148 .descsize = sizeof(struct ghash_desc_ctx),
0149 .base = {
0150 .cra_name = "__ghash",
0151 .cra_driver_name = "__ghash-pclmulqdqni",
0152 .cra_priority = 0,
0153 .cra_flags = CRYPTO_ALG_INTERNAL,
0154 .cra_blocksize = GHASH_BLOCK_SIZE,
0155 .cra_ctxsize = sizeof(struct ghash_ctx),
0156 .cra_module = THIS_MODULE,
0157 },
0158 };
0159
0160 static int ghash_async_init(struct ahash_request *req)
0161 {
0162 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0163 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
0164 struct ahash_request *cryptd_req = ahash_request_ctx(req);
0165 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
0166 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0167 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
0168
0169 desc->tfm = child;
0170 return crypto_shash_init(desc);
0171 }
0172
0173 static int ghash_async_update(struct ahash_request *req)
0174 {
0175 struct ahash_request *cryptd_req = ahash_request_ctx(req);
0176 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0177 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
0178 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
0179
0180 if (!crypto_simd_usable() ||
0181 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
0182 memcpy(cryptd_req, req, sizeof(*req));
0183 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
0184 return crypto_ahash_update(cryptd_req);
0185 } else {
0186 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0187 return shash_ahash_update(req, desc);
0188 }
0189 }
0190
0191 static int ghash_async_final(struct ahash_request *req)
0192 {
0193 struct ahash_request *cryptd_req = ahash_request_ctx(req);
0194 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0195 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
0196 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
0197
0198 if (!crypto_simd_usable() ||
0199 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
0200 memcpy(cryptd_req, req, sizeof(*req));
0201 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
0202 return crypto_ahash_final(cryptd_req);
0203 } else {
0204 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0205 return crypto_shash_final(desc, req->result);
0206 }
0207 }
0208
0209 static int ghash_async_import(struct ahash_request *req, const void *in)
0210 {
0211 struct ahash_request *cryptd_req = ahash_request_ctx(req);
0212 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0213 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
0214
0215 ghash_async_init(req);
0216 memcpy(dctx, in, sizeof(*dctx));
0217 return 0;
0218
0219 }
0220
0221 static int ghash_async_export(struct ahash_request *req, void *out)
0222 {
0223 struct ahash_request *cryptd_req = ahash_request_ctx(req);
0224 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0225 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
0226
0227 memcpy(out, dctx, sizeof(*dctx));
0228 return 0;
0229
0230 }
0231
0232 static int ghash_async_digest(struct ahash_request *req)
0233 {
0234 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0235 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
0236 struct ahash_request *cryptd_req = ahash_request_ctx(req);
0237 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
0238
0239 if (!crypto_simd_usable() ||
0240 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
0241 memcpy(cryptd_req, req, sizeof(*req));
0242 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
0243 return crypto_ahash_digest(cryptd_req);
0244 } else {
0245 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
0246 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
0247
0248 desc->tfm = child;
0249 return shash_ahash_digest(req, desc);
0250 }
0251 }
0252
0253 static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
0254 unsigned int keylen)
0255 {
0256 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
0257 struct crypto_ahash *child = &ctx->cryptd_tfm->base;
0258
0259 crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
0260 crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
0261 & CRYPTO_TFM_REQ_MASK);
0262 return crypto_ahash_setkey(child, key, keylen);
0263 }
0264
0265 static int ghash_async_init_tfm(struct crypto_tfm *tfm)
0266 {
0267 struct cryptd_ahash *cryptd_tfm;
0268 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
0269
0270 cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni",
0271 CRYPTO_ALG_INTERNAL,
0272 CRYPTO_ALG_INTERNAL);
0273 if (IS_ERR(cryptd_tfm))
0274 return PTR_ERR(cryptd_tfm);
0275 ctx->cryptd_tfm = cryptd_tfm;
0276 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
0277 sizeof(struct ahash_request) +
0278 crypto_ahash_reqsize(&cryptd_tfm->base));
0279
0280 return 0;
0281 }
0282
0283 static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
0284 {
0285 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
0286
0287 cryptd_free_ahash(ctx->cryptd_tfm);
0288 }
0289
0290 static struct ahash_alg ghash_async_alg = {
0291 .init = ghash_async_init,
0292 .update = ghash_async_update,
0293 .final = ghash_async_final,
0294 .setkey = ghash_async_setkey,
0295 .digest = ghash_async_digest,
0296 .export = ghash_async_export,
0297 .import = ghash_async_import,
0298 .halg = {
0299 .digestsize = GHASH_DIGEST_SIZE,
0300 .statesize = sizeof(struct ghash_desc_ctx),
0301 .base = {
0302 .cra_name = "ghash",
0303 .cra_driver_name = "ghash-clmulni",
0304 .cra_priority = 400,
0305 .cra_ctxsize = sizeof(struct ghash_async_ctx),
0306 .cra_flags = CRYPTO_ALG_ASYNC,
0307 .cra_blocksize = GHASH_BLOCK_SIZE,
0308 .cra_module = THIS_MODULE,
0309 .cra_init = ghash_async_init_tfm,
0310 .cra_exit = ghash_async_exit_tfm,
0311 },
0312 },
0313 };
0314
0315 static const struct x86_cpu_id pcmul_cpu_id[] = {
0316 X86_MATCH_FEATURE(X86_FEATURE_PCLMULQDQ, NULL),
0317 {}
0318 };
0319 MODULE_DEVICE_TABLE(x86cpu, pcmul_cpu_id);
0320
0321 static int __init ghash_pclmulqdqni_mod_init(void)
0322 {
0323 int err;
0324
0325 if (!x86_match_cpu(pcmul_cpu_id))
0326 return -ENODEV;
0327
0328 err = crypto_register_shash(&ghash_alg);
0329 if (err)
0330 goto err_out;
0331 err = crypto_register_ahash(&ghash_async_alg);
0332 if (err)
0333 goto err_shash;
0334
0335 return 0;
0336
0337 err_shash:
0338 crypto_unregister_shash(&ghash_alg);
0339 err_out:
0340 return err;
0341 }
0342
0343 static void __exit ghash_pclmulqdqni_mod_exit(void)
0344 {
0345 crypto_unregister_ahash(&ghash_async_alg);
0346 crypto_unregister_shash(&ghash_alg);
0347 }
0348
0349 module_init(ghash_pclmulqdqni_mod_init);
0350 module_exit(ghash_pclmulqdqni_mod_exit);
0351
0352 MODULE_LICENSE("GPL");
0353 MODULE_DESCRIPTION("GHASH hash function, accelerated by PCLMULQDQ-NI");
0354 MODULE_ALIAS_CRYPTO("ghash");