0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015 #include <crypto/internal/hash.h>
0016 #include <crypto/internal/aead.h>
0017 #include <crypto/internal/skcipher.h>
0018 #include <crypto/cryptd.h>
0019 #include <linux/refcount.h>
0020 #include <linux/err.h>
0021 #include <linux/init.h>
0022 #include <linux/kernel.h>
0023 #include <linux/list.h>
0024 #include <linux/module.h>
0025 #include <linux/scatterlist.h>
0026 #include <linux/sched.h>
0027 #include <linux/slab.h>
0028 #include <linux/workqueue.h>
0029
0030 static unsigned int cryptd_max_cpu_qlen = 1000;
0031 module_param(cryptd_max_cpu_qlen, uint, 0);
0032 MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth");
0033
0034 static struct workqueue_struct *cryptd_wq;
0035
0036 struct cryptd_cpu_queue {
0037 struct crypto_queue queue;
0038 struct work_struct work;
0039 };
0040
0041 struct cryptd_queue {
0042
0043
0044
0045
0046 struct cryptd_cpu_queue __percpu *cpu_queue;
0047 };
0048
0049 struct cryptd_instance_ctx {
0050 struct crypto_spawn spawn;
0051 struct cryptd_queue *queue;
0052 };
0053
0054 struct skcipherd_instance_ctx {
0055 struct crypto_skcipher_spawn spawn;
0056 struct cryptd_queue *queue;
0057 };
0058
0059 struct hashd_instance_ctx {
0060 struct crypto_shash_spawn spawn;
0061 struct cryptd_queue *queue;
0062 };
0063
0064 struct aead_instance_ctx {
0065 struct crypto_aead_spawn aead_spawn;
0066 struct cryptd_queue *queue;
0067 };
0068
0069 struct cryptd_skcipher_ctx {
0070 refcount_t refcnt;
0071 struct crypto_sync_skcipher *child;
0072 };
0073
0074 struct cryptd_skcipher_request_ctx {
0075 crypto_completion_t complete;
0076 };
0077
0078 struct cryptd_hash_ctx {
0079 refcount_t refcnt;
0080 struct crypto_shash *child;
0081 };
0082
0083 struct cryptd_hash_request_ctx {
0084 crypto_completion_t complete;
0085 struct shash_desc desc;
0086 };
0087
0088 struct cryptd_aead_ctx {
0089 refcount_t refcnt;
0090 struct crypto_aead *child;
0091 };
0092
0093 struct cryptd_aead_request_ctx {
0094 crypto_completion_t complete;
0095 };
0096
0097 static void cryptd_queue_worker(struct work_struct *work);
0098
0099 static int cryptd_init_queue(struct cryptd_queue *queue,
0100 unsigned int max_cpu_qlen)
0101 {
0102 int cpu;
0103 struct cryptd_cpu_queue *cpu_queue;
0104
0105 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
0106 if (!queue->cpu_queue)
0107 return -ENOMEM;
0108 for_each_possible_cpu(cpu) {
0109 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
0110 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
0111 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
0112 }
0113 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen);
0114 return 0;
0115 }
0116
0117 static void cryptd_fini_queue(struct cryptd_queue *queue)
0118 {
0119 int cpu;
0120 struct cryptd_cpu_queue *cpu_queue;
0121
0122 for_each_possible_cpu(cpu) {
0123 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
0124 BUG_ON(cpu_queue->queue.qlen);
0125 }
0126 free_percpu(queue->cpu_queue);
0127 }
0128
0129 static int cryptd_enqueue_request(struct cryptd_queue *queue,
0130 struct crypto_async_request *request)
0131 {
0132 int err;
0133 struct cryptd_cpu_queue *cpu_queue;
0134 refcount_t *refcnt;
0135
0136 local_bh_disable();
0137 cpu_queue = this_cpu_ptr(queue->cpu_queue);
0138 err = crypto_enqueue_request(&cpu_queue->queue, request);
0139
0140 refcnt = crypto_tfm_ctx(request->tfm);
0141
0142 if (err == -ENOSPC)
0143 goto out;
0144
0145 queue_work_on(smp_processor_id(), cryptd_wq, &cpu_queue->work);
0146
0147 if (!refcount_read(refcnt))
0148 goto out;
0149
0150 refcount_inc(refcnt);
0151
0152 out:
0153 local_bh_enable();
0154
0155 return err;
0156 }
0157
0158
0159
0160
0161 static void cryptd_queue_worker(struct work_struct *work)
0162 {
0163 struct cryptd_cpu_queue *cpu_queue;
0164 struct crypto_async_request *req, *backlog;
0165
0166 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
0167
0168
0169
0170 local_bh_disable();
0171 backlog = crypto_get_backlog(&cpu_queue->queue);
0172 req = crypto_dequeue_request(&cpu_queue->queue);
0173 local_bh_enable();
0174
0175 if (!req)
0176 return;
0177
0178 if (backlog)
0179 backlog->complete(backlog, -EINPROGRESS);
0180 req->complete(req, 0);
0181
0182 if (cpu_queue->queue.qlen)
0183 queue_work(cryptd_wq, &cpu_queue->work);
0184 }
0185
0186 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
0187 {
0188 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
0189 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
0190 return ictx->queue;
0191 }
0192
0193 static void cryptd_type_and_mask(struct crypto_attr_type *algt,
0194 u32 *type, u32 *mask)
0195 {
0196
0197
0198
0199
0200 *type = algt->type & CRYPTO_ALG_INTERNAL;
0201 *mask = algt->mask & CRYPTO_ALG_INTERNAL;
0202
0203
0204 *mask |= CRYPTO_ALG_ASYNC;
0205
0206 *mask |= crypto_algt_inherited_mask(algt);
0207 }
0208
0209 static int cryptd_init_instance(struct crypto_instance *inst,
0210 struct crypto_alg *alg)
0211 {
0212 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
0213 "cryptd(%s)",
0214 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
0215 return -ENAMETOOLONG;
0216
0217 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
0218
0219 inst->alg.cra_priority = alg->cra_priority + 50;
0220 inst->alg.cra_blocksize = alg->cra_blocksize;
0221 inst->alg.cra_alignmask = alg->cra_alignmask;
0222
0223 return 0;
0224 }
0225
0226 static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
0227 const u8 *key, unsigned int keylen)
0228 {
0229 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
0230 struct crypto_sync_skcipher *child = ctx->child;
0231
0232 crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
0233 crypto_sync_skcipher_set_flags(child,
0234 crypto_skcipher_get_flags(parent) &
0235 CRYPTO_TFM_REQ_MASK);
0236 return crypto_sync_skcipher_setkey(child, key, keylen);
0237 }
0238
0239 static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
0240 {
0241 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0242 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0243 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
0244 int refcnt = refcount_read(&ctx->refcnt);
0245
0246 local_bh_disable();
0247 rctx->complete(&req->base, err);
0248 local_bh_enable();
0249
0250 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
0251 crypto_free_skcipher(tfm);
0252 }
0253
0254 static void cryptd_skcipher_encrypt(struct crypto_async_request *base,
0255 int err)
0256 {
0257 struct skcipher_request *req = skcipher_request_cast(base);
0258 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
0259 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0260 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0261 struct crypto_sync_skcipher *child = ctx->child;
0262 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
0263
0264 if (unlikely(err == -EINPROGRESS))
0265 goto out;
0266
0267 skcipher_request_set_sync_tfm(subreq, child);
0268 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
0269 NULL, NULL);
0270 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
0271 req->iv);
0272
0273 err = crypto_skcipher_encrypt(subreq);
0274 skcipher_request_zero(subreq);
0275
0276 req->base.complete = rctx->complete;
0277
0278 out:
0279 cryptd_skcipher_complete(req, err);
0280 }
0281
0282 static void cryptd_skcipher_decrypt(struct crypto_async_request *base,
0283 int err)
0284 {
0285 struct skcipher_request *req = skcipher_request_cast(base);
0286 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
0287 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0288 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0289 struct crypto_sync_skcipher *child = ctx->child;
0290 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
0291
0292 if (unlikely(err == -EINPROGRESS))
0293 goto out;
0294
0295 skcipher_request_set_sync_tfm(subreq, child);
0296 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
0297 NULL, NULL);
0298 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
0299 req->iv);
0300
0301 err = crypto_skcipher_decrypt(subreq);
0302 skcipher_request_zero(subreq);
0303
0304 req->base.complete = rctx->complete;
0305
0306 out:
0307 cryptd_skcipher_complete(req, err);
0308 }
0309
0310 static int cryptd_skcipher_enqueue(struct skcipher_request *req,
0311 crypto_completion_t compl)
0312 {
0313 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
0314 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0315 struct cryptd_queue *queue;
0316
0317 queue = cryptd_get_queue(crypto_skcipher_tfm(tfm));
0318 rctx->complete = req->base.complete;
0319 req->base.complete = compl;
0320
0321 return cryptd_enqueue_request(queue, &req->base);
0322 }
0323
0324 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
0325 {
0326 return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
0327 }
0328
0329 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
0330 {
0331 return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
0332 }
0333
0334 static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm)
0335 {
0336 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
0337 struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst);
0338 struct crypto_skcipher_spawn *spawn = &ictx->spawn;
0339 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0340 struct crypto_skcipher *cipher;
0341
0342 cipher = crypto_spawn_skcipher(spawn);
0343 if (IS_ERR(cipher))
0344 return PTR_ERR(cipher);
0345
0346 ctx->child = (struct crypto_sync_skcipher *)cipher;
0347 crypto_skcipher_set_reqsize(
0348 tfm, sizeof(struct cryptd_skcipher_request_ctx));
0349 return 0;
0350 }
0351
0352 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm)
0353 {
0354 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0355
0356 crypto_free_sync_skcipher(ctx->child);
0357 }
0358
0359 static void cryptd_skcipher_free(struct skcipher_instance *inst)
0360 {
0361 struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
0362
0363 crypto_drop_skcipher(&ctx->spawn);
0364 kfree(inst);
0365 }
0366
0367 static int cryptd_create_skcipher(struct crypto_template *tmpl,
0368 struct rtattr **tb,
0369 struct crypto_attr_type *algt,
0370 struct cryptd_queue *queue)
0371 {
0372 struct skcipherd_instance_ctx *ctx;
0373 struct skcipher_instance *inst;
0374 struct skcipher_alg *alg;
0375 u32 type;
0376 u32 mask;
0377 int err;
0378
0379 cryptd_type_and_mask(algt, &type, &mask);
0380
0381 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
0382 if (!inst)
0383 return -ENOMEM;
0384
0385 ctx = skcipher_instance_ctx(inst);
0386 ctx->queue = queue;
0387
0388 err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst),
0389 crypto_attr_alg_name(tb[1]), type, mask);
0390 if (err)
0391 goto err_free_inst;
0392
0393 alg = crypto_spawn_skcipher_alg(&ctx->spawn);
0394 err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base);
0395 if (err)
0396 goto err_free_inst;
0397
0398 inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
0399 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
0400 inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
0401 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
0402 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
0403 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
0404
0405 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx);
0406
0407 inst->alg.init = cryptd_skcipher_init_tfm;
0408 inst->alg.exit = cryptd_skcipher_exit_tfm;
0409
0410 inst->alg.setkey = cryptd_skcipher_setkey;
0411 inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue;
0412 inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue;
0413
0414 inst->free = cryptd_skcipher_free;
0415
0416 err = skcipher_register_instance(tmpl, inst);
0417 if (err) {
0418 err_free_inst:
0419 cryptd_skcipher_free(inst);
0420 }
0421 return err;
0422 }
0423
0424 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
0425 {
0426 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
0427 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
0428 struct crypto_shash_spawn *spawn = &ictx->spawn;
0429 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
0430 struct crypto_shash *hash;
0431
0432 hash = crypto_spawn_shash(spawn);
0433 if (IS_ERR(hash))
0434 return PTR_ERR(hash);
0435
0436 ctx->child = hash;
0437 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
0438 sizeof(struct cryptd_hash_request_ctx) +
0439 crypto_shash_descsize(hash));
0440 return 0;
0441 }
0442
0443 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
0444 {
0445 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
0446
0447 crypto_free_shash(ctx->child);
0448 }
0449
0450 static int cryptd_hash_setkey(struct crypto_ahash *parent,
0451 const u8 *key, unsigned int keylen)
0452 {
0453 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
0454 struct crypto_shash *child = ctx->child;
0455
0456 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
0457 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
0458 CRYPTO_TFM_REQ_MASK);
0459 return crypto_shash_setkey(child, key, keylen);
0460 }
0461
0462 static int cryptd_hash_enqueue(struct ahash_request *req,
0463 crypto_completion_t compl)
0464 {
0465 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
0466 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0467 struct cryptd_queue *queue =
0468 cryptd_get_queue(crypto_ahash_tfm(tfm));
0469
0470 rctx->complete = req->base.complete;
0471 req->base.complete = compl;
0472
0473 return cryptd_enqueue_request(queue, &req->base);
0474 }
0475
0476 static void cryptd_hash_complete(struct ahash_request *req, int err)
0477 {
0478 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0479 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
0480 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
0481 int refcnt = refcount_read(&ctx->refcnt);
0482
0483 local_bh_disable();
0484 rctx->complete(&req->base, err);
0485 local_bh_enable();
0486
0487 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
0488 crypto_free_ahash(tfm);
0489 }
0490
0491 static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
0492 {
0493 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
0494 struct crypto_shash *child = ctx->child;
0495 struct ahash_request *req = ahash_request_cast(req_async);
0496 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
0497 struct shash_desc *desc = &rctx->desc;
0498
0499 if (unlikely(err == -EINPROGRESS))
0500 goto out;
0501
0502 desc->tfm = child;
0503
0504 err = crypto_shash_init(desc);
0505
0506 req->base.complete = rctx->complete;
0507
0508 out:
0509 cryptd_hash_complete(req, err);
0510 }
0511
0512 static int cryptd_hash_init_enqueue(struct ahash_request *req)
0513 {
0514 return cryptd_hash_enqueue(req, cryptd_hash_init);
0515 }
0516
0517 static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
0518 {
0519 struct ahash_request *req = ahash_request_cast(req_async);
0520 struct cryptd_hash_request_ctx *rctx;
0521
0522 rctx = ahash_request_ctx(req);
0523
0524 if (unlikely(err == -EINPROGRESS))
0525 goto out;
0526
0527 err = shash_ahash_update(req, &rctx->desc);
0528
0529 req->base.complete = rctx->complete;
0530
0531 out:
0532 cryptd_hash_complete(req, err);
0533 }
0534
0535 static int cryptd_hash_update_enqueue(struct ahash_request *req)
0536 {
0537 return cryptd_hash_enqueue(req, cryptd_hash_update);
0538 }
0539
0540 static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
0541 {
0542 struct ahash_request *req = ahash_request_cast(req_async);
0543 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
0544
0545 if (unlikely(err == -EINPROGRESS))
0546 goto out;
0547
0548 err = crypto_shash_final(&rctx->desc, req->result);
0549
0550 req->base.complete = rctx->complete;
0551
0552 out:
0553 cryptd_hash_complete(req, err);
0554 }
0555
0556 static int cryptd_hash_final_enqueue(struct ahash_request *req)
0557 {
0558 return cryptd_hash_enqueue(req, cryptd_hash_final);
0559 }
0560
0561 static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
0562 {
0563 struct ahash_request *req = ahash_request_cast(req_async);
0564 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
0565
0566 if (unlikely(err == -EINPROGRESS))
0567 goto out;
0568
0569 err = shash_ahash_finup(req, &rctx->desc);
0570
0571 req->base.complete = rctx->complete;
0572
0573 out:
0574 cryptd_hash_complete(req, err);
0575 }
0576
0577 static int cryptd_hash_finup_enqueue(struct ahash_request *req)
0578 {
0579 return cryptd_hash_enqueue(req, cryptd_hash_finup);
0580 }
0581
0582 static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
0583 {
0584 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
0585 struct crypto_shash *child = ctx->child;
0586 struct ahash_request *req = ahash_request_cast(req_async);
0587 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
0588 struct shash_desc *desc = &rctx->desc;
0589
0590 if (unlikely(err == -EINPROGRESS))
0591 goto out;
0592
0593 desc->tfm = child;
0594
0595 err = shash_ahash_digest(req, desc);
0596
0597 req->base.complete = rctx->complete;
0598
0599 out:
0600 cryptd_hash_complete(req, err);
0601 }
0602
0603 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
0604 {
0605 return cryptd_hash_enqueue(req, cryptd_hash_digest);
0606 }
0607
0608 static int cryptd_hash_export(struct ahash_request *req, void *out)
0609 {
0610 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
0611
0612 return crypto_shash_export(&rctx->desc, out);
0613 }
0614
0615 static int cryptd_hash_import(struct ahash_request *req, const void *in)
0616 {
0617 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
0618 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
0619 struct shash_desc *desc = cryptd_shash_desc(req);
0620
0621 desc->tfm = ctx->child;
0622
0623 return crypto_shash_import(desc, in);
0624 }
0625
0626 static void cryptd_hash_free(struct ahash_instance *inst)
0627 {
0628 struct hashd_instance_ctx *ctx = ahash_instance_ctx(inst);
0629
0630 crypto_drop_shash(&ctx->spawn);
0631 kfree(inst);
0632 }
0633
0634 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
0635 struct crypto_attr_type *algt,
0636 struct cryptd_queue *queue)
0637 {
0638 struct hashd_instance_ctx *ctx;
0639 struct ahash_instance *inst;
0640 struct shash_alg *alg;
0641 u32 type;
0642 u32 mask;
0643 int err;
0644
0645 cryptd_type_and_mask(algt, &type, &mask);
0646
0647 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
0648 if (!inst)
0649 return -ENOMEM;
0650
0651 ctx = ahash_instance_ctx(inst);
0652 ctx->queue = queue;
0653
0654 err = crypto_grab_shash(&ctx->spawn, ahash_crypto_instance(inst),
0655 crypto_attr_alg_name(tb[1]), type, mask);
0656 if (err)
0657 goto err_free_inst;
0658 alg = crypto_spawn_shash_alg(&ctx->spawn);
0659
0660 err = cryptd_init_instance(ahash_crypto_instance(inst), &alg->base);
0661 if (err)
0662 goto err_free_inst;
0663
0664 inst->alg.halg.base.cra_flags |= CRYPTO_ALG_ASYNC |
0665 (alg->base.cra_flags & (CRYPTO_ALG_INTERNAL|
0666 CRYPTO_ALG_OPTIONAL_KEY));
0667 inst->alg.halg.digestsize = alg->digestsize;
0668 inst->alg.halg.statesize = alg->statesize;
0669 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
0670
0671 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
0672 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
0673
0674 inst->alg.init = cryptd_hash_init_enqueue;
0675 inst->alg.update = cryptd_hash_update_enqueue;
0676 inst->alg.final = cryptd_hash_final_enqueue;
0677 inst->alg.finup = cryptd_hash_finup_enqueue;
0678 inst->alg.export = cryptd_hash_export;
0679 inst->alg.import = cryptd_hash_import;
0680 if (crypto_shash_alg_has_setkey(alg))
0681 inst->alg.setkey = cryptd_hash_setkey;
0682 inst->alg.digest = cryptd_hash_digest_enqueue;
0683
0684 inst->free = cryptd_hash_free;
0685
0686 err = ahash_register_instance(tmpl, inst);
0687 if (err) {
0688 err_free_inst:
0689 cryptd_hash_free(inst);
0690 }
0691 return err;
0692 }
0693
0694 static int cryptd_aead_setkey(struct crypto_aead *parent,
0695 const u8 *key, unsigned int keylen)
0696 {
0697 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
0698 struct crypto_aead *child = ctx->child;
0699
0700 return crypto_aead_setkey(child, key, keylen);
0701 }
0702
0703 static int cryptd_aead_setauthsize(struct crypto_aead *parent,
0704 unsigned int authsize)
0705 {
0706 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
0707 struct crypto_aead *child = ctx->child;
0708
0709 return crypto_aead_setauthsize(child, authsize);
0710 }
0711
0712 static void cryptd_aead_crypt(struct aead_request *req,
0713 struct crypto_aead *child,
0714 int err,
0715 int (*crypt)(struct aead_request *req))
0716 {
0717 struct cryptd_aead_request_ctx *rctx;
0718 struct cryptd_aead_ctx *ctx;
0719 crypto_completion_t compl;
0720 struct crypto_aead *tfm;
0721 int refcnt;
0722
0723 rctx = aead_request_ctx(req);
0724 compl = rctx->complete;
0725
0726 tfm = crypto_aead_reqtfm(req);
0727
0728 if (unlikely(err == -EINPROGRESS))
0729 goto out;
0730 aead_request_set_tfm(req, child);
0731 err = crypt( req );
0732
0733 out:
0734 ctx = crypto_aead_ctx(tfm);
0735 refcnt = refcount_read(&ctx->refcnt);
0736
0737 local_bh_disable();
0738 compl(&req->base, err);
0739 local_bh_enable();
0740
0741 if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
0742 crypto_free_aead(tfm);
0743 }
0744
0745 static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
0746 {
0747 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
0748 struct crypto_aead *child = ctx->child;
0749 struct aead_request *req;
0750
0751 req = container_of(areq, struct aead_request, base);
0752 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
0753 }
0754
0755 static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
0756 {
0757 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
0758 struct crypto_aead *child = ctx->child;
0759 struct aead_request *req;
0760
0761 req = container_of(areq, struct aead_request, base);
0762 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
0763 }
0764
0765 static int cryptd_aead_enqueue(struct aead_request *req,
0766 crypto_completion_t compl)
0767 {
0768 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
0769 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
0770 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
0771
0772 rctx->complete = req->base.complete;
0773 req->base.complete = compl;
0774 return cryptd_enqueue_request(queue, &req->base);
0775 }
0776
0777 static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
0778 {
0779 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
0780 }
0781
0782 static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
0783 {
0784 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
0785 }
0786
0787 static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
0788 {
0789 struct aead_instance *inst = aead_alg_instance(tfm);
0790 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
0791 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
0792 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
0793 struct crypto_aead *cipher;
0794
0795 cipher = crypto_spawn_aead(spawn);
0796 if (IS_ERR(cipher))
0797 return PTR_ERR(cipher);
0798
0799 ctx->child = cipher;
0800 crypto_aead_set_reqsize(
0801 tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
0802 crypto_aead_reqsize(cipher)));
0803 return 0;
0804 }
0805
0806 static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
0807 {
0808 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
0809 crypto_free_aead(ctx->child);
0810 }
0811
0812 static void cryptd_aead_free(struct aead_instance *inst)
0813 {
0814 struct aead_instance_ctx *ctx = aead_instance_ctx(inst);
0815
0816 crypto_drop_aead(&ctx->aead_spawn);
0817 kfree(inst);
0818 }
0819
0820 static int cryptd_create_aead(struct crypto_template *tmpl,
0821 struct rtattr **tb,
0822 struct crypto_attr_type *algt,
0823 struct cryptd_queue *queue)
0824 {
0825 struct aead_instance_ctx *ctx;
0826 struct aead_instance *inst;
0827 struct aead_alg *alg;
0828 u32 type;
0829 u32 mask;
0830 int err;
0831
0832 cryptd_type_and_mask(algt, &type, &mask);
0833
0834 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
0835 if (!inst)
0836 return -ENOMEM;
0837
0838 ctx = aead_instance_ctx(inst);
0839 ctx->queue = queue;
0840
0841 err = crypto_grab_aead(&ctx->aead_spawn, aead_crypto_instance(inst),
0842 crypto_attr_alg_name(tb[1]), type, mask);
0843 if (err)
0844 goto err_free_inst;
0845
0846 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
0847 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
0848 if (err)
0849 goto err_free_inst;
0850
0851 inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
0852 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
0853 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
0854
0855 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
0856 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
0857
0858 inst->alg.init = cryptd_aead_init_tfm;
0859 inst->alg.exit = cryptd_aead_exit_tfm;
0860 inst->alg.setkey = cryptd_aead_setkey;
0861 inst->alg.setauthsize = cryptd_aead_setauthsize;
0862 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
0863 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
0864
0865 inst->free = cryptd_aead_free;
0866
0867 err = aead_register_instance(tmpl, inst);
0868 if (err) {
0869 err_free_inst:
0870 cryptd_aead_free(inst);
0871 }
0872 return err;
0873 }
0874
0875 static struct cryptd_queue queue;
0876
0877 static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
0878 {
0879 struct crypto_attr_type *algt;
0880
0881 algt = crypto_get_attr_type(tb);
0882 if (IS_ERR(algt))
0883 return PTR_ERR(algt);
0884
0885 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
0886 case CRYPTO_ALG_TYPE_SKCIPHER:
0887 return cryptd_create_skcipher(tmpl, tb, algt, &queue);
0888 case CRYPTO_ALG_TYPE_HASH:
0889 return cryptd_create_hash(tmpl, tb, algt, &queue);
0890 case CRYPTO_ALG_TYPE_AEAD:
0891 return cryptd_create_aead(tmpl, tb, algt, &queue);
0892 }
0893
0894 return -EINVAL;
0895 }
0896
0897 static struct crypto_template cryptd_tmpl = {
0898 .name = "cryptd",
0899 .create = cryptd_create,
0900 .module = THIS_MODULE,
0901 };
0902
0903 struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
0904 u32 type, u32 mask)
0905 {
0906 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
0907 struct cryptd_skcipher_ctx *ctx;
0908 struct crypto_skcipher *tfm;
0909
0910 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
0911 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
0912 return ERR_PTR(-EINVAL);
0913
0914 tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask);
0915 if (IS_ERR(tfm))
0916 return ERR_CAST(tfm);
0917
0918 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
0919 crypto_free_skcipher(tfm);
0920 return ERR_PTR(-EINVAL);
0921 }
0922
0923 ctx = crypto_skcipher_ctx(tfm);
0924 refcount_set(&ctx->refcnt, 1);
0925
0926 return container_of(tfm, struct cryptd_skcipher, base);
0927 }
0928 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher);
0929
0930 struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm)
0931 {
0932 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
0933
0934 return &ctx->child->base;
0935 }
0936 EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
0937
0938 bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
0939 {
0940 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
0941
0942 return refcount_read(&ctx->refcnt) - 1;
0943 }
0944 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
0945
0946 void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
0947 {
0948 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
0949
0950 if (refcount_dec_and_test(&ctx->refcnt))
0951 crypto_free_skcipher(&tfm->base);
0952 }
0953 EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
0954
0955 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
0956 u32 type, u32 mask)
0957 {
0958 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
0959 struct cryptd_hash_ctx *ctx;
0960 struct crypto_ahash *tfm;
0961
0962 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
0963 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
0964 return ERR_PTR(-EINVAL);
0965 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
0966 if (IS_ERR(tfm))
0967 return ERR_CAST(tfm);
0968 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
0969 crypto_free_ahash(tfm);
0970 return ERR_PTR(-EINVAL);
0971 }
0972
0973 ctx = crypto_ahash_ctx(tfm);
0974 refcount_set(&ctx->refcnt, 1);
0975
0976 return __cryptd_ahash_cast(tfm);
0977 }
0978 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
0979
0980 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
0981 {
0982 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
0983
0984 return ctx->child;
0985 }
0986 EXPORT_SYMBOL_GPL(cryptd_ahash_child);
0987
0988 struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
0989 {
0990 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
0991 return &rctx->desc;
0992 }
0993 EXPORT_SYMBOL_GPL(cryptd_shash_desc);
0994
0995 bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
0996 {
0997 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
0998
0999 return refcount_read(&ctx->refcnt) - 1;
1000 }
1001 EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1002
1003 void cryptd_free_ahash(struct cryptd_ahash *tfm)
1004 {
1005 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1006
1007 if (refcount_dec_and_test(&ctx->refcnt))
1008 crypto_free_ahash(&tfm->base);
1009 }
1010 EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1011
1012 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1013 u32 type, u32 mask)
1014 {
1015 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1016 struct cryptd_aead_ctx *ctx;
1017 struct crypto_aead *tfm;
1018
1019 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1020 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1021 return ERR_PTR(-EINVAL);
1022 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1023 if (IS_ERR(tfm))
1024 return ERR_CAST(tfm);
1025 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1026 crypto_free_aead(tfm);
1027 return ERR_PTR(-EINVAL);
1028 }
1029
1030 ctx = crypto_aead_ctx(tfm);
1031 refcount_set(&ctx->refcnt, 1);
1032
1033 return __cryptd_aead_cast(tfm);
1034 }
1035 EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1036
1037 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1038 {
1039 struct cryptd_aead_ctx *ctx;
1040 ctx = crypto_aead_ctx(&tfm->base);
1041 return ctx->child;
1042 }
1043 EXPORT_SYMBOL_GPL(cryptd_aead_child);
1044
1045 bool cryptd_aead_queued(struct cryptd_aead *tfm)
1046 {
1047 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1048
1049 return refcount_read(&ctx->refcnt) - 1;
1050 }
1051 EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1052
1053 void cryptd_free_aead(struct cryptd_aead *tfm)
1054 {
1055 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1056
1057 if (refcount_dec_and_test(&ctx->refcnt))
1058 crypto_free_aead(&tfm->base);
1059 }
1060 EXPORT_SYMBOL_GPL(cryptd_free_aead);
1061
1062 static int __init cryptd_init(void)
1063 {
1064 int err;
1065
1066 cryptd_wq = alloc_workqueue("cryptd", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE,
1067 1);
1068 if (!cryptd_wq)
1069 return -ENOMEM;
1070
1071 err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen);
1072 if (err)
1073 goto err_destroy_wq;
1074
1075 err = crypto_register_template(&cryptd_tmpl);
1076 if (err)
1077 goto err_fini_queue;
1078
1079 return 0;
1080
1081 err_fini_queue:
1082 cryptd_fini_queue(&queue);
1083 err_destroy_wq:
1084 destroy_workqueue(cryptd_wq);
1085 return err;
1086 }
1087
1088 static void __exit cryptd_exit(void)
1089 {
1090 destroy_workqueue(cryptd_wq);
1091 cryptd_fini_queue(&queue);
1092 crypto_unregister_template(&cryptd_tmpl);
1093 }
1094
1095 subsys_initcall(cryptd_init);
1096 module_exit(cryptd_exit);
1097
1098 MODULE_LICENSE("GPL");
1099 MODULE_DESCRIPTION("Software async crypto daemon");
1100 MODULE_ALIAS_CRYPTO("cryptd");