0001
0002
0003
0004
0005
0006
0007 #include <crypto/aes.h>
0008 #include <crypto/engine.h>
0009 #include <crypto/gcm.h>
0010 #include <crypto/internal/aead.h>
0011 #include <crypto/scatterwalk.h>
0012
0013 #include <linux/dma-mapping.h>
0014 #include <linux/module.h>
0015 #include <linux/of_device.h>
0016 #include <linux/platform_device.h>
0017
0018 #include <linux/firmware/xlnx-zynqmp.h>
0019
0020 #define ZYNQMP_DMA_BIT_MASK 32U
0021
0022 #define ZYNQMP_AES_KEY_SIZE AES_KEYSIZE_256
0023 #define ZYNQMP_AES_AUTH_SIZE 16U
0024 #define ZYNQMP_KEY_SRC_SEL_KEY_LEN 1U
0025 #define ZYNQMP_AES_BLK_SIZE 1U
0026 #define ZYNQMP_AES_MIN_INPUT_BLK_SIZE 4U
0027 #define ZYNQMP_AES_WORD_LEN 4U
0028
0029 #define ZYNQMP_AES_GCM_TAG_MISMATCH_ERR 0x01
0030 #define ZYNQMP_AES_WRONG_KEY_SRC_ERR 0x13
0031 #define ZYNQMP_AES_PUF_NOT_PROGRAMMED 0xE300
0032
0033 enum zynqmp_aead_op {
0034 ZYNQMP_AES_DECRYPT = 0,
0035 ZYNQMP_AES_ENCRYPT
0036 };
0037
0038 enum zynqmp_aead_keysrc {
0039 ZYNQMP_AES_KUP_KEY = 0,
0040 ZYNQMP_AES_DEV_KEY,
0041 ZYNQMP_AES_PUF_KEY
0042 };
0043
0044 struct zynqmp_aead_drv_ctx {
0045 union {
0046 struct aead_alg aead;
0047 } alg;
0048 struct device *dev;
0049 struct crypto_engine *engine;
0050 };
0051
0052 struct zynqmp_aead_hw_req {
0053 u64 src;
0054 u64 iv;
0055 u64 key;
0056 u64 dst;
0057 u64 size;
0058 u64 op;
0059 u64 keysrc;
0060 };
0061
0062 struct zynqmp_aead_tfm_ctx {
0063 struct crypto_engine_ctx engine_ctx;
0064 struct device *dev;
0065 u8 key[ZYNQMP_AES_KEY_SIZE];
0066 u8 *iv;
0067 u32 keylen;
0068 u32 authsize;
0069 enum zynqmp_aead_keysrc keysrc;
0070 struct crypto_aead *fbk_cipher;
0071 };
0072
0073 struct zynqmp_aead_req_ctx {
0074 enum zynqmp_aead_op op;
0075 };
0076
0077 static int zynqmp_aes_aead_cipher(struct aead_request *req)
0078 {
0079 struct crypto_aead *aead = crypto_aead_reqtfm(req);
0080 struct zynqmp_aead_tfm_ctx *tfm_ctx = crypto_aead_ctx(aead);
0081 struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
0082 struct device *dev = tfm_ctx->dev;
0083 struct zynqmp_aead_hw_req *hwreq;
0084 dma_addr_t dma_addr_data, dma_addr_hw_req;
0085 unsigned int data_size;
0086 unsigned int status;
0087 int ret;
0088 size_t dma_size;
0089 char *kbuf;
0090 int err;
0091
0092 if (tfm_ctx->keysrc == ZYNQMP_AES_KUP_KEY)
0093 dma_size = req->cryptlen + ZYNQMP_AES_KEY_SIZE
0094 + GCM_AES_IV_SIZE;
0095 else
0096 dma_size = req->cryptlen + GCM_AES_IV_SIZE;
0097
0098 kbuf = dma_alloc_coherent(dev, dma_size, &dma_addr_data, GFP_KERNEL);
0099 if (!kbuf)
0100 return -ENOMEM;
0101
0102 hwreq = dma_alloc_coherent(dev, sizeof(struct zynqmp_aead_hw_req),
0103 &dma_addr_hw_req, GFP_KERNEL);
0104 if (!hwreq) {
0105 dma_free_coherent(dev, dma_size, kbuf, dma_addr_data);
0106 return -ENOMEM;
0107 }
0108
0109 data_size = req->cryptlen;
0110 scatterwalk_map_and_copy(kbuf, req->src, 0, req->cryptlen, 0);
0111 memcpy(kbuf + data_size, req->iv, GCM_AES_IV_SIZE);
0112
0113 hwreq->src = dma_addr_data;
0114 hwreq->dst = dma_addr_data;
0115 hwreq->iv = hwreq->src + data_size;
0116 hwreq->keysrc = tfm_ctx->keysrc;
0117 hwreq->op = rq_ctx->op;
0118
0119 if (hwreq->op == ZYNQMP_AES_ENCRYPT)
0120 hwreq->size = data_size;
0121 else
0122 hwreq->size = data_size - ZYNQMP_AES_AUTH_SIZE;
0123
0124 if (hwreq->keysrc == ZYNQMP_AES_KUP_KEY) {
0125 memcpy(kbuf + data_size + GCM_AES_IV_SIZE,
0126 tfm_ctx->key, ZYNQMP_AES_KEY_SIZE);
0127
0128 hwreq->key = hwreq->src + data_size + GCM_AES_IV_SIZE;
0129 } else {
0130 hwreq->key = 0;
0131 }
0132
0133 ret = zynqmp_pm_aes_engine(dma_addr_hw_req, &status);
0134
0135 if (ret) {
0136 dev_err(dev, "ERROR: AES PM API failed\n");
0137 err = ret;
0138 } else if (status) {
0139 switch (status) {
0140 case ZYNQMP_AES_GCM_TAG_MISMATCH_ERR:
0141 dev_err(dev, "ERROR: Gcm Tag mismatch\n");
0142 break;
0143 case ZYNQMP_AES_WRONG_KEY_SRC_ERR:
0144 dev_err(dev, "ERROR: Wrong KeySrc, enable secure mode\n");
0145 break;
0146 case ZYNQMP_AES_PUF_NOT_PROGRAMMED:
0147 dev_err(dev, "ERROR: PUF is not registered\n");
0148 break;
0149 default:
0150 dev_err(dev, "ERROR: Unknown error\n");
0151 break;
0152 }
0153 err = -status;
0154 } else {
0155 if (hwreq->op == ZYNQMP_AES_ENCRYPT)
0156 data_size = data_size + ZYNQMP_AES_AUTH_SIZE;
0157 else
0158 data_size = data_size - ZYNQMP_AES_AUTH_SIZE;
0159
0160 sg_copy_from_buffer(req->dst, sg_nents(req->dst),
0161 kbuf, data_size);
0162 err = 0;
0163 }
0164
0165 if (kbuf) {
0166 memzero_explicit(kbuf, dma_size);
0167 dma_free_coherent(dev, dma_size, kbuf, dma_addr_data);
0168 }
0169 if (hwreq) {
0170 memzero_explicit(hwreq, sizeof(struct zynqmp_aead_hw_req));
0171 dma_free_coherent(dev, sizeof(struct zynqmp_aead_hw_req),
0172 hwreq, dma_addr_hw_req);
0173 }
0174 return err;
0175 }
0176
0177 static int zynqmp_fallback_check(struct zynqmp_aead_tfm_ctx *tfm_ctx,
0178 struct aead_request *req)
0179 {
0180 int need_fallback = 0;
0181 struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
0182
0183 if (tfm_ctx->authsize != ZYNQMP_AES_AUTH_SIZE)
0184 need_fallback = 1;
0185
0186 if (tfm_ctx->keysrc == ZYNQMP_AES_KUP_KEY &&
0187 tfm_ctx->keylen != ZYNQMP_AES_KEY_SIZE) {
0188 need_fallback = 1;
0189 }
0190 if (req->assoclen != 0 ||
0191 req->cryptlen < ZYNQMP_AES_MIN_INPUT_BLK_SIZE) {
0192 need_fallback = 1;
0193 }
0194 if ((req->cryptlen % ZYNQMP_AES_WORD_LEN) != 0)
0195 need_fallback = 1;
0196
0197 if (rq_ctx->op == ZYNQMP_AES_DECRYPT &&
0198 req->cryptlen <= ZYNQMP_AES_AUTH_SIZE) {
0199 need_fallback = 1;
0200 }
0201 return need_fallback;
0202 }
0203
0204 static int zynqmp_handle_aes_req(struct crypto_engine *engine,
0205 void *req)
0206 {
0207 struct aead_request *areq =
0208 container_of(req, struct aead_request, base);
0209 struct crypto_aead *aead = crypto_aead_reqtfm(req);
0210 struct zynqmp_aead_tfm_ctx *tfm_ctx = crypto_aead_ctx(aead);
0211 struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(areq);
0212 struct aead_request *subreq = aead_request_ctx(req);
0213 int need_fallback;
0214 int err;
0215
0216 need_fallback = zynqmp_fallback_check(tfm_ctx, areq);
0217
0218 if (need_fallback) {
0219 aead_request_set_tfm(subreq, tfm_ctx->fbk_cipher);
0220
0221 aead_request_set_callback(subreq, areq->base.flags,
0222 NULL, NULL);
0223 aead_request_set_crypt(subreq, areq->src, areq->dst,
0224 areq->cryptlen, areq->iv);
0225 aead_request_set_ad(subreq, areq->assoclen);
0226 if (rq_ctx->op == ZYNQMP_AES_ENCRYPT)
0227 err = crypto_aead_encrypt(subreq);
0228 else
0229 err = crypto_aead_decrypt(subreq);
0230 } else {
0231 err = zynqmp_aes_aead_cipher(areq);
0232 }
0233
0234 crypto_finalize_aead_request(engine, areq, err);
0235 return 0;
0236 }
0237
0238 static int zynqmp_aes_aead_setkey(struct crypto_aead *aead, const u8 *key,
0239 unsigned int keylen)
0240 {
0241 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
0242 struct zynqmp_aead_tfm_ctx *tfm_ctx =
0243 (struct zynqmp_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
0244 unsigned char keysrc;
0245
0246 if (keylen == ZYNQMP_KEY_SRC_SEL_KEY_LEN) {
0247 keysrc = *key;
0248 if (keysrc == ZYNQMP_AES_KUP_KEY ||
0249 keysrc == ZYNQMP_AES_DEV_KEY ||
0250 keysrc == ZYNQMP_AES_PUF_KEY) {
0251 tfm_ctx->keysrc = (enum zynqmp_aead_keysrc)keysrc;
0252 } else {
0253 tfm_ctx->keylen = keylen;
0254 }
0255 } else {
0256 tfm_ctx->keylen = keylen;
0257 if (keylen == ZYNQMP_AES_KEY_SIZE) {
0258 tfm_ctx->keysrc = ZYNQMP_AES_KUP_KEY;
0259 memcpy(tfm_ctx->key, key, keylen);
0260 }
0261 }
0262
0263 tfm_ctx->fbk_cipher->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
0264 tfm_ctx->fbk_cipher->base.crt_flags |= (aead->base.crt_flags &
0265 CRYPTO_TFM_REQ_MASK);
0266
0267 return crypto_aead_setkey(tfm_ctx->fbk_cipher, key, keylen);
0268 }
0269
0270 static int zynqmp_aes_aead_setauthsize(struct crypto_aead *aead,
0271 unsigned int authsize)
0272 {
0273 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
0274 struct zynqmp_aead_tfm_ctx *tfm_ctx =
0275 (struct zynqmp_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
0276
0277 tfm_ctx->authsize = authsize;
0278 return crypto_aead_setauthsize(tfm_ctx->fbk_cipher, authsize);
0279 }
0280
0281 static int zynqmp_aes_aead_encrypt(struct aead_request *req)
0282 {
0283 struct zynqmp_aead_drv_ctx *drv_ctx;
0284 struct crypto_aead *aead = crypto_aead_reqtfm(req);
0285 struct aead_alg *alg = crypto_aead_alg(aead);
0286 struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
0287
0288 rq_ctx->op = ZYNQMP_AES_ENCRYPT;
0289 drv_ctx = container_of(alg, struct zynqmp_aead_drv_ctx, alg.aead);
0290
0291 return crypto_transfer_aead_request_to_engine(drv_ctx->engine, req);
0292 }
0293
0294 static int zynqmp_aes_aead_decrypt(struct aead_request *req)
0295 {
0296 struct zynqmp_aead_drv_ctx *drv_ctx;
0297 struct crypto_aead *aead = crypto_aead_reqtfm(req);
0298 struct aead_alg *alg = crypto_aead_alg(aead);
0299 struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
0300
0301 rq_ctx->op = ZYNQMP_AES_DECRYPT;
0302 drv_ctx = container_of(alg, struct zynqmp_aead_drv_ctx, alg.aead);
0303
0304 return crypto_transfer_aead_request_to_engine(drv_ctx->engine, req);
0305 }
0306
0307 static int zynqmp_aes_aead_init(struct crypto_aead *aead)
0308 {
0309 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
0310 struct zynqmp_aead_tfm_ctx *tfm_ctx =
0311 (struct zynqmp_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
0312 struct zynqmp_aead_drv_ctx *drv_ctx;
0313 struct aead_alg *alg = crypto_aead_alg(aead);
0314
0315 drv_ctx = container_of(alg, struct zynqmp_aead_drv_ctx, alg.aead);
0316 tfm_ctx->dev = drv_ctx->dev;
0317
0318 tfm_ctx->engine_ctx.op.do_one_request = zynqmp_handle_aes_req;
0319 tfm_ctx->engine_ctx.op.prepare_request = NULL;
0320 tfm_ctx->engine_ctx.op.unprepare_request = NULL;
0321
0322 tfm_ctx->fbk_cipher = crypto_alloc_aead(drv_ctx->alg.aead.base.cra_name,
0323 0,
0324 CRYPTO_ALG_NEED_FALLBACK);
0325
0326 if (IS_ERR(tfm_ctx->fbk_cipher)) {
0327 pr_err("%s() Error: failed to allocate fallback for %s\n",
0328 __func__, drv_ctx->alg.aead.base.cra_name);
0329 return PTR_ERR(tfm_ctx->fbk_cipher);
0330 }
0331
0332 crypto_aead_set_reqsize(aead,
0333 max(sizeof(struct zynqmp_aead_req_ctx),
0334 sizeof(struct aead_request) +
0335 crypto_aead_reqsize(tfm_ctx->fbk_cipher)));
0336 return 0;
0337 }
0338
0339 static void zynqmp_aes_aead_exit(struct crypto_aead *aead)
0340 {
0341 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
0342 struct zynqmp_aead_tfm_ctx *tfm_ctx =
0343 (struct zynqmp_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
0344
0345 if (tfm_ctx->fbk_cipher) {
0346 crypto_free_aead(tfm_ctx->fbk_cipher);
0347 tfm_ctx->fbk_cipher = NULL;
0348 }
0349 memzero_explicit(tfm_ctx, sizeof(struct zynqmp_aead_tfm_ctx));
0350 }
0351
0352 static struct zynqmp_aead_drv_ctx aes_drv_ctx = {
0353 .alg.aead = {
0354 .setkey = zynqmp_aes_aead_setkey,
0355 .setauthsize = zynqmp_aes_aead_setauthsize,
0356 .encrypt = zynqmp_aes_aead_encrypt,
0357 .decrypt = zynqmp_aes_aead_decrypt,
0358 .init = zynqmp_aes_aead_init,
0359 .exit = zynqmp_aes_aead_exit,
0360 .ivsize = GCM_AES_IV_SIZE,
0361 .maxauthsize = ZYNQMP_AES_AUTH_SIZE,
0362 .base = {
0363 .cra_name = "gcm(aes)",
0364 .cra_driver_name = "xilinx-zynqmp-aes-gcm",
0365 .cra_priority = 200,
0366 .cra_flags = CRYPTO_ALG_TYPE_AEAD |
0367 CRYPTO_ALG_ASYNC |
0368 CRYPTO_ALG_ALLOCATES_MEMORY |
0369 CRYPTO_ALG_KERN_DRIVER_ONLY |
0370 CRYPTO_ALG_NEED_FALLBACK,
0371 .cra_blocksize = ZYNQMP_AES_BLK_SIZE,
0372 .cra_ctxsize = sizeof(struct zynqmp_aead_tfm_ctx),
0373 .cra_module = THIS_MODULE,
0374 }
0375 }
0376 };
0377
0378 static int zynqmp_aes_aead_probe(struct platform_device *pdev)
0379 {
0380 struct device *dev = &pdev->dev;
0381 int err;
0382
0383
0384 if (!aes_drv_ctx.dev)
0385 aes_drv_ctx.dev = dev;
0386 else
0387 return -ENODEV;
0388
0389 err = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(ZYNQMP_DMA_BIT_MASK));
0390 if (err < 0) {
0391 dev_err(dev, "No usable DMA configuration\n");
0392 return err;
0393 }
0394
0395 aes_drv_ctx.engine = crypto_engine_alloc_init(dev, 1);
0396 if (!aes_drv_ctx.engine) {
0397 dev_err(dev, "Cannot alloc AES engine\n");
0398 err = -ENOMEM;
0399 goto err_engine;
0400 }
0401
0402 err = crypto_engine_start(aes_drv_ctx.engine);
0403 if (err) {
0404 dev_err(dev, "Cannot start AES engine\n");
0405 goto err_engine;
0406 }
0407
0408 err = crypto_register_aead(&aes_drv_ctx.alg.aead);
0409 if (err < 0) {
0410 dev_err(dev, "Failed to register AEAD alg.\n");
0411 goto err_aead;
0412 }
0413 return 0;
0414
0415 err_aead:
0416 crypto_unregister_aead(&aes_drv_ctx.alg.aead);
0417
0418 err_engine:
0419 if (aes_drv_ctx.engine)
0420 crypto_engine_exit(aes_drv_ctx.engine);
0421
0422 return err;
0423 }
0424
0425 static int zynqmp_aes_aead_remove(struct platform_device *pdev)
0426 {
0427 crypto_engine_exit(aes_drv_ctx.engine);
0428 crypto_unregister_aead(&aes_drv_ctx.alg.aead);
0429
0430 return 0;
0431 }
0432
0433 static const struct of_device_id zynqmp_aes_dt_ids[] = {
0434 { .compatible = "xlnx,zynqmp-aes" },
0435 { }
0436 };
0437 MODULE_DEVICE_TABLE(of, zynqmp_aes_dt_ids);
0438
0439 static struct platform_driver zynqmp_aes_driver = {
0440 .probe = zynqmp_aes_aead_probe,
0441 .remove = zynqmp_aes_aead_remove,
0442 .driver = {
0443 .name = "zynqmp-aes",
0444 .of_match_table = zynqmp_aes_dt_ids,
0445 },
0446 };
0447
0448 module_platform_driver(zynqmp_aes_driver);
0449 MODULE_LICENSE("GPL");