Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-only
0002 /*
0003  * AMD Cryptographic Coprocessor (CCP) AES crypto API support
0004  *
0005  * Copyright (C) 2013-2019 Advanced Micro Devices, Inc.
0006  *
0007  * Author: Tom Lendacky <thomas.lendacky@amd.com>
0008  */
0009 
0010 #include <linux/module.h>
0011 #include <linux/sched.h>
0012 #include <linux/delay.h>
0013 #include <linux/scatterlist.h>
0014 #include <linux/crypto.h>
0015 #include <crypto/algapi.h>
0016 #include <crypto/aes.h>
0017 #include <crypto/ctr.h>
0018 #include <crypto/scatterwalk.h>
0019 
0020 #include "ccp-crypto.h"
0021 
0022 static int ccp_aes_complete(struct crypto_async_request *async_req, int ret)
0023 {
0024     struct skcipher_request *req = skcipher_request_cast(async_req);
0025     struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
0026     struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req);
0027 
0028     if (ret)
0029         return ret;
0030 
0031     if (ctx->u.aes.mode != CCP_AES_MODE_ECB)
0032         memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE);
0033 
0034     return 0;
0035 }
0036 
0037 static int ccp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
0038               unsigned int key_len)
0039 {
0040     struct ccp_crypto_skcipher_alg *alg = ccp_crypto_skcipher_alg(tfm);
0041     struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
0042 
0043     switch (key_len) {
0044     case AES_KEYSIZE_128:
0045         ctx->u.aes.type = CCP_AES_TYPE_128;
0046         break;
0047     case AES_KEYSIZE_192:
0048         ctx->u.aes.type = CCP_AES_TYPE_192;
0049         break;
0050     case AES_KEYSIZE_256:
0051         ctx->u.aes.type = CCP_AES_TYPE_256;
0052         break;
0053     default:
0054         return -EINVAL;
0055     }
0056     ctx->u.aes.mode = alg->mode;
0057     ctx->u.aes.key_len = key_len;
0058 
0059     memcpy(ctx->u.aes.key, key, key_len);
0060     sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len);
0061 
0062     return 0;
0063 }
0064 
0065 static int ccp_aes_crypt(struct skcipher_request *req, bool encrypt)
0066 {
0067     struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0068     struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
0069     struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req);
0070     struct scatterlist *iv_sg = NULL;
0071     unsigned int iv_len = 0;
0072 
0073     if (!ctx->u.aes.key_len)
0074         return -EINVAL;
0075 
0076     if (((ctx->u.aes.mode == CCP_AES_MODE_ECB) ||
0077          (ctx->u.aes.mode == CCP_AES_MODE_CBC)) &&
0078         (req->cryptlen & (AES_BLOCK_SIZE - 1)))
0079         return -EINVAL;
0080 
0081     if (ctx->u.aes.mode != CCP_AES_MODE_ECB) {
0082         if (!req->iv)
0083             return -EINVAL;
0084 
0085         memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE);
0086         iv_sg = &rctx->iv_sg;
0087         iv_len = AES_BLOCK_SIZE;
0088         sg_init_one(iv_sg, rctx->iv, iv_len);
0089     }
0090 
0091     memset(&rctx->cmd, 0, sizeof(rctx->cmd));
0092     INIT_LIST_HEAD(&rctx->cmd.entry);
0093     rctx->cmd.engine = CCP_ENGINE_AES;
0094     rctx->cmd.u.aes.type = ctx->u.aes.type;
0095     rctx->cmd.u.aes.mode = ctx->u.aes.mode;
0096     rctx->cmd.u.aes.action =
0097         (encrypt) ? CCP_AES_ACTION_ENCRYPT : CCP_AES_ACTION_DECRYPT;
0098     rctx->cmd.u.aes.key = &ctx->u.aes.key_sg;
0099     rctx->cmd.u.aes.key_len = ctx->u.aes.key_len;
0100     rctx->cmd.u.aes.iv = iv_sg;
0101     rctx->cmd.u.aes.iv_len = iv_len;
0102     rctx->cmd.u.aes.src = req->src;
0103     rctx->cmd.u.aes.src_len = req->cryptlen;
0104     rctx->cmd.u.aes.dst = req->dst;
0105 
0106     return ccp_crypto_enqueue_request(&req->base, &rctx->cmd);
0107 }
0108 
0109 static int ccp_aes_encrypt(struct skcipher_request *req)
0110 {
0111     return ccp_aes_crypt(req, true);
0112 }
0113 
0114 static int ccp_aes_decrypt(struct skcipher_request *req)
0115 {
0116     return ccp_aes_crypt(req, false);
0117 }
0118 
0119 static int ccp_aes_init_tfm(struct crypto_skcipher *tfm)
0120 {
0121     struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
0122 
0123     ctx->complete = ccp_aes_complete;
0124     ctx->u.aes.key_len = 0;
0125 
0126     crypto_skcipher_set_reqsize(tfm, sizeof(struct ccp_aes_req_ctx));
0127 
0128     return 0;
0129 }
0130 
0131 static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req,
0132                     int ret)
0133 {
0134     struct skcipher_request *req = skcipher_request_cast(async_req);
0135     struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req);
0136 
0137     /* Restore the original pointer */
0138     req->iv = rctx->rfc3686_info;
0139 
0140     return ccp_aes_complete(async_req, ret);
0141 }
0142 
0143 static int ccp_aes_rfc3686_setkey(struct crypto_skcipher *tfm, const u8 *key,
0144                   unsigned int key_len)
0145 {
0146     struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
0147 
0148     if (key_len < CTR_RFC3686_NONCE_SIZE)
0149         return -EINVAL;
0150 
0151     key_len -= CTR_RFC3686_NONCE_SIZE;
0152     memcpy(ctx->u.aes.nonce, key + key_len, CTR_RFC3686_NONCE_SIZE);
0153 
0154     return ccp_aes_setkey(tfm, key, key_len);
0155 }
0156 
0157 static int ccp_aes_rfc3686_crypt(struct skcipher_request *req, bool encrypt)
0158 {
0159     struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0160     struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
0161     struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req);
0162     u8 *iv;
0163 
0164     /* Initialize the CTR block */
0165     iv = rctx->rfc3686_iv;
0166     memcpy(iv, ctx->u.aes.nonce, CTR_RFC3686_NONCE_SIZE);
0167 
0168     iv += CTR_RFC3686_NONCE_SIZE;
0169     memcpy(iv, req->iv, CTR_RFC3686_IV_SIZE);
0170 
0171     iv += CTR_RFC3686_IV_SIZE;
0172     *(__be32 *)iv = cpu_to_be32(1);
0173 
0174     /* Point to the new IV */
0175     rctx->rfc3686_info = req->iv;
0176     req->iv = rctx->rfc3686_iv;
0177 
0178     return ccp_aes_crypt(req, encrypt);
0179 }
0180 
0181 static int ccp_aes_rfc3686_encrypt(struct skcipher_request *req)
0182 {
0183     return ccp_aes_rfc3686_crypt(req, true);
0184 }
0185 
0186 static int ccp_aes_rfc3686_decrypt(struct skcipher_request *req)
0187 {
0188     return ccp_aes_rfc3686_crypt(req, false);
0189 }
0190 
0191 static int ccp_aes_rfc3686_init_tfm(struct crypto_skcipher *tfm)
0192 {
0193     struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
0194 
0195     ctx->complete = ccp_aes_rfc3686_complete;
0196     ctx->u.aes.key_len = 0;
0197 
0198     crypto_skcipher_set_reqsize(tfm, sizeof(struct ccp_aes_req_ctx));
0199 
0200     return 0;
0201 }
0202 
0203 static const struct skcipher_alg ccp_aes_defaults = {
0204     .setkey         = ccp_aes_setkey,
0205     .encrypt        = ccp_aes_encrypt,
0206     .decrypt        = ccp_aes_decrypt,
0207     .min_keysize        = AES_MIN_KEY_SIZE,
0208     .max_keysize        = AES_MAX_KEY_SIZE,
0209     .init           = ccp_aes_init_tfm,
0210 
0211     .base.cra_flags     = CRYPTO_ALG_ASYNC |
0212                   CRYPTO_ALG_ALLOCATES_MEMORY |
0213                   CRYPTO_ALG_KERN_DRIVER_ONLY |
0214                   CRYPTO_ALG_NEED_FALLBACK,
0215     .base.cra_blocksize = AES_BLOCK_SIZE,
0216     .base.cra_ctxsize   = sizeof(struct ccp_ctx),
0217     .base.cra_priority  = CCP_CRA_PRIORITY,
0218     .base.cra_module    = THIS_MODULE,
0219 };
0220 
0221 static const struct skcipher_alg ccp_aes_rfc3686_defaults = {
0222     .setkey         = ccp_aes_rfc3686_setkey,
0223     .encrypt        = ccp_aes_rfc3686_encrypt,
0224     .decrypt        = ccp_aes_rfc3686_decrypt,
0225     .min_keysize        = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
0226     .max_keysize        = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
0227     .init           = ccp_aes_rfc3686_init_tfm,
0228 
0229     .base.cra_flags     = CRYPTO_ALG_ASYNC |
0230                   CRYPTO_ALG_ALLOCATES_MEMORY |
0231                   CRYPTO_ALG_KERN_DRIVER_ONLY |
0232                   CRYPTO_ALG_NEED_FALLBACK,
0233     .base.cra_blocksize = CTR_RFC3686_BLOCK_SIZE,
0234     .base.cra_ctxsize   = sizeof(struct ccp_ctx),
0235     .base.cra_priority  = CCP_CRA_PRIORITY,
0236     .base.cra_module    = THIS_MODULE,
0237 };
0238 
0239 struct ccp_aes_def {
0240     enum ccp_aes_mode mode;
0241     unsigned int version;
0242     const char *name;
0243     const char *driver_name;
0244     unsigned int blocksize;
0245     unsigned int ivsize;
0246     const struct skcipher_alg *alg_defaults;
0247 };
0248 
0249 static struct ccp_aes_def aes_algs[] = {
0250     {
0251         .mode       = CCP_AES_MODE_ECB,
0252         .version    = CCP_VERSION(3, 0),
0253         .name       = "ecb(aes)",
0254         .driver_name    = "ecb-aes-ccp",
0255         .blocksize  = AES_BLOCK_SIZE,
0256         .ivsize     = 0,
0257         .alg_defaults   = &ccp_aes_defaults,
0258     },
0259     {
0260         .mode       = CCP_AES_MODE_CBC,
0261         .version    = CCP_VERSION(3, 0),
0262         .name       = "cbc(aes)",
0263         .driver_name    = "cbc-aes-ccp",
0264         .blocksize  = AES_BLOCK_SIZE,
0265         .ivsize     = AES_BLOCK_SIZE,
0266         .alg_defaults   = &ccp_aes_defaults,
0267     },
0268     {
0269         .mode       = CCP_AES_MODE_CFB,
0270         .version    = CCP_VERSION(3, 0),
0271         .name       = "cfb(aes)",
0272         .driver_name    = "cfb-aes-ccp",
0273         .blocksize  = 1,
0274         .ivsize     = AES_BLOCK_SIZE,
0275         .alg_defaults   = &ccp_aes_defaults,
0276     },
0277     {
0278         .mode       = CCP_AES_MODE_OFB,
0279         .version    = CCP_VERSION(3, 0),
0280         .name       = "ofb(aes)",
0281         .driver_name    = "ofb-aes-ccp",
0282         .blocksize  = 1,
0283         .ivsize     = AES_BLOCK_SIZE,
0284         .alg_defaults   = &ccp_aes_defaults,
0285     },
0286     {
0287         .mode       = CCP_AES_MODE_CTR,
0288         .version    = CCP_VERSION(3, 0),
0289         .name       = "ctr(aes)",
0290         .driver_name    = "ctr-aes-ccp",
0291         .blocksize  = 1,
0292         .ivsize     = AES_BLOCK_SIZE,
0293         .alg_defaults   = &ccp_aes_defaults,
0294     },
0295     {
0296         .mode       = CCP_AES_MODE_CTR,
0297         .version    = CCP_VERSION(3, 0),
0298         .name       = "rfc3686(ctr(aes))",
0299         .driver_name    = "rfc3686-ctr-aes-ccp",
0300         .blocksize  = 1,
0301         .ivsize     = CTR_RFC3686_IV_SIZE,
0302         .alg_defaults   = &ccp_aes_rfc3686_defaults,
0303     },
0304 };
0305 
0306 static int ccp_register_aes_alg(struct list_head *head,
0307                 const struct ccp_aes_def *def)
0308 {
0309     struct ccp_crypto_skcipher_alg *ccp_alg;
0310     struct skcipher_alg *alg;
0311     int ret;
0312 
0313     ccp_alg = kzalloc(sizeof(*ccp_alg), GFP_KERNEL);
0314     if (!ccp_alg)
0315         return -ENOMEM;
0316 
0317     INIT_LIST_HEAD(&ccp_alg->entry);
0318 
0319     ccp_alg->mode = def->mode;
0320 
0321     /* Copy the defaults and override as necessary */
0322     alg = &ccp_alg->alg;
0323     *alg = *def->alg_defaults;
0324     snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name);
0325     snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
0326          def->driver_name);
0327     alg->base.cra_blocksize = def->blocksize;
0328     alg->ivsize = def->ivsize;
0329 
0330     ret = crypto_register_skcipher(alg);
0331     if (ret) {
0332         pr_err("%s skcipher algorithm registration error (%d)\n",
0333                alg->base.cra_name, ret);
0334         kfree(ccp_alg);
0335         return ret;
0336     }
0337 
0338     list_add(&ccp_alg->entry, head);
0339 
0340     return 0;
0341 }
0342 
0343 int ccp_register_aes_algs(struct list_head *head)
0344 {
0345     int i, ret;
0346     unsigned int ccpversion = ccp_version();
0347 
0348     for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
0349         if (aes_algs[i].version > ccpversion)
0350             continue;
0351         ret = ccp_register_aes_alg(head, &aes_algs[i]);
0352         if (ret)
0353             return ret;
0354     }
0355 
0356     return 0;
0357 }