Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-or-later
0002 /*
0003  *   Copyright (C) 2019 Samsung Electronics Co., Ltd.
0004  */
0005 
0006 #include <linux/kernel.h>
0007 #include <linux/string.h>
0008 #include <linux/err.h>
0009 #include <linux/slab.h>
0010 #include <linux/wait.h>
0011 #include <linux/sched.h>
0012 
0013 #include "glob.h"
0014 #include "crypto_ctx.h"
0015 
0016 struct crypto_ctx_list {
0017     spinlock_t      ctx_lock;
0018     int         avail_ctx;
0019     struct list_head    idle_ctx;
0020     wait_queue_head_t   ctx_wait;
0021 };
0022 
0023 static struct crypto_ctx_list ctx_list;
0024 
0025 static inline void free_aead(struct crypto_aead *aead)
0026 {
0027     if (aead)
0028         crypto_free_aead(aead);
0029 }
0030 
0031 static void free_shash(struct shash_desc *shash)
0032 {
0033     if (shash) {
0034         crypto_free_shash(shash->tfm);
0035         kfree(shash);
0036     }
0037 }
0038 
0039 static struct crypto_aead *alloc_aead(int id)
0040 {
0041     struct crypto_aead *tfm = NULL;
0042 
0043     switch (id) {
0044     case CRYPTO_AEAD_AES_GCM:
0045         tfm = crypto_alloc_aead("gcm(aes)", 0, 0);
0046         break;
0047     case CRYPTO_AEAD_AES_CCM:
0048         tfm = crypto_alloc_aead("ccm(aes)", 0, 0);
0049         break;
0050     default:
0051         pr_err("Does not support encrypt ahead(id : %d)\n", id);
0052         return NULL;
0053     }
0054 
0055     if (IS_ERR(tfm)) {
0056         pr_err("Failed to alloc encrypt aead : %ld\n", PTR_ERR(tfm));
0057         return NULL;
0058     }
0059 
0060     return tfm;
0061 }
0062 
0063 static struct shash_desc *alloc_shash_desc(int id)
0064 {
0065     struct crypto_shash *tfm = NULL;
0066     struct shash_desc *shash;
0067 
0068     switch (id) {
0069     case CRYPTO_SHASH_HMACMD5:
0070         tfm = crypto_alloc_shash("hmac(md5)", 0, 0);
0071         break;
0072     case CRYPTO_SHASH_HMACSHA256:
0073         tfm = crypto_alloc_shash("hmac(sha256)", 0, 0);
0074         break;
0075     case CRYPTO_SHASH_CMACAES:
0076         tfm = crypto_alloc_shash("cmac(aes)", 0, 0);
0077         break;
0078     case CRYPTO_SHASH_SHA256:
0079         tfm = crypto_alloc_shash("sha256", 0, 0);
0080         break;
0081     case CRYPTO_SHASH_SHA512:
0082         tfm = crypto_alloc_shash("sha512", 0, 0);
0083         break;
0084     default:
0085         return NULL;
0086     }
0087 
0088     if (IS_ERR(tfm))
0089         return NULL;
0090 
0091     shash = kzalloc(sizeof(*shash) + crypto_shash_descsize(tfm),
0092             GFP_KERNEL);
0093     if (!shash)
0094         crypto_free_shash(tfm);
0095     else
0096         shash->tfm = tfm;
0097     return shash;
0098 }
0099 
0100 static void ctx_free(struct ksmbd_crypto_ctx *ctx)
0101 {
0102     int i;
0103 
0104     for (i = 0; i < CRYPTO_SHASH_MAX; i++)
0105         free_shash(ctx->desc[i]);
0106     for (i = 0; i < CRYPTO_AEAD_MAX; i++)
0107         free_aead(ctx->ccmaes[i]);
0108     kfree(ctx);
0109 }
0110 
0111 static struct ksmbd_crypto_ctx *ksmbd_find_crypto_ctx(void)
0112 {
0113     struct ksmbd_crypto_ctx *ctx;
0114 
0115     while (1) {
0116         spin_lock(&ctx_list.ctx_lock);
0117         if (!list_empty(&ctx_list.idle_ctx)) {
0118             ctx = list_entry(ctx_list.idle_ctx.next,
0119                      struct ksmbd_crypto_ctx,
0120                      list);
0121             list_del(&ctx->list);
0122             spin_unlock(&ctx_list.ctx_lock);
0123             return ctx;
0124         }
0125 
0126         if (ctx_list.avail_ctx > num_online_cpus()) {
0127             spin_unlock(&ctx_list.ctx_lock);
0128             wait_event(ctx_list.ctx_wait,
0129                    !list_empty(&ctx_list.idle_ctx));
0130             continue;
0131         }
0132 
0133         ctx_list.avail_ctx++;
0134         spin_unlock(&ctx_list.ctx_lock);
0135 
0136         ctx = kzalloc(sizeof(struct ksmbd_crypto_ctx), GFP_KERNEL);
0137         if (!ctx) {
0138             spin_lock(&ctx_list.ctx_lock);
0139             ctx_list.avail_ctx--;
0140             spin_unlock(&ctx_list.ctx_lock);
0141             wait_event(ctx_list.ctx_wait,
0142                    !list_empty(&ctx_list.idle_ctx));
0143             continue;
0144         }
0145         break;
0146     }
0147     return ctx;
0148 }
0149 
0150 void ksmbd_release_crypto_ctx(struct ksmbd_crypto_ctx *ctx)
0151 {
0152     if (!ctx)
0153         return;
0154 
0155     spin_lock(&ctx_list.ctx_lock);
0156     if (ctx_list.avail_ctx <= num_online_cpus()) {
0157         list_add(&ctx->list, &ctx_list.idle_ctx);
0158         spin_unlock(&ctx_list.ctx_lock);
0159         wake_up(&ctx_list.ctx_wait);
0160         return;
0161     }
0162 
0163     ctx_list.avail_ctx--;
0164     spin_unlock(&ctx_list.ctx_lock);
0165     ctx_free(ctx);
0166 }
0167 
0168 static struct ksmbd_crypto_ctx *____crypto_shash_ctx_find(int id)
0169 {
0170     struct ksmbd_crypto_ctx *ctx;
0171 
0172     if (id >= CRYPTO_SHASH_MAX)
0173         return NULL;
0174 
0175     ctx = ksmbd_find_crypto_ctx();
0176     if (ctx->desc[id])
0177         return ctx;
0178 
0179     ctx->desc[id] = alloc_shash_desc(id);
0180     if (ctx->desc[id])
0181         return ctx;
0182     ksmbd_release_crypto_ctx(ctx);
0183     return NULL;
0184 }
0185 
0186 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_hmacmd5(void)
0187 {
0188     return ____crypto_shash_ctx_find(CRYPTO_SHASH_HMACMD5);
0189 }
0190 
0191 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_hmacsha256(void)
0192 {
0193     return ____crypto_shash_ctx_find(CRYPTO_SHASH_HMACSHA256);
0194 }
0195 
0196 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_cmacaes(void)
0197 {
0198     return ____crypto_shash_ctx_find(CRYPTO_SHASH_CMACAES);
0199 }
0200 
0201 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_sha256(void)
0202 {
0203     return ____crypto_shash_ctx_find(CRYPTO_SHASH_SHA256);
0204 }
0205 
0206 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_sha512(void)
0207 {
0208     return ____crypto_shash_ctx_find(CRYPTO_SHASH_SHA512);
0209 }
0210 
0211 static struct ksmbd_crypto_ctx *____crypto_aead_ctx_find(int id)
0212 {
0213     struct ksmbd_crypto_ctx *ctx;
0214 
0215     if (id >= CRYPTO_AEAD_MAX)
0216         return NULL;
0217 
0218     ctx = ksmbd_find_crypto_ctx();
0219     if (ctx->ccmaes[id])
0220         return ctx;
0221 
0222     ctx->ccmaes[id] = alloc_aead(id);
0223     if (ctx->ccmaes[id])
0224         return ctx;
0225     ksmbd_release_crypto_ctx(ctx);
0226     return NULL;
0227 }
0228 
0229 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_gcm(void)
0230 {
0231     return ____crypto_aead_ctx_find(CRYPTO_AEAD_AES_GCM);
0232 }
0233 
0234 struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_ccm(void)
0235 {
0236     return ____crypto_aead_ctx_find(CRYPTO_AEAD_AES_CCM);
0237 }
0238 
0239 void ksmbd_crypto_destroy(void)
0240 {
0241     struct ksmbd_crypto_ctx *ctx;
0242 
0243     while (!list_empty(&ctx_list.idle_ctx)) {
0244         ctx = list_entry(ctx_list.idle_ctx.next,
0245                  struct ksmbd_crypto_ctx,
0246                  list);
0247         list_del(&ctx->list);
0248         ctx_free(ctx);
0249     }
0250 }
0251 
0252 int ksmbd_crypto_create(void)
0253 {
0254     struct ksmbd_crypto_ctx *ctx;
0255 
0256     spin_lock_init(&ctx_list.ctx_lock);
0257     INIT_LIST_HEAD(&ctx_list.idle_ctx);
0258     init_waitqueue_head(&ctx_list.ctx_wait);
0259     ctx_list.avail_ctx = 1;
0260 
0261     ctx = kzalloc(sizeof(struct ksmbd_crypto_ctx), GFP_KERNEL);
0262     if (!ctx)
0263         return -ENOMEM;
0264     list_add(&ctx->list, &ctx_list.idle_ctx);
0265     return 0;
0266 }