0001
0002
0003
0004
0005
0006
0007
0008 #include <linux/clk.h>
0009 #include <linux/completion.h>
0010 #include <linux/crypto.h>
0011 #include <linux/dma-mapping.h>
0012 #include <linux/interrupt.h>
0013 #include <linux/io.h>
0014 #include <linux/module.h>
0015 #include <linux/of.h>
0016 #include <linux/platform_device.h>
0017 #include <linux/types.h>
0018
0019 #include <crypto/aes.h>
0020 #include <crypto/engine.h>
0021 #include <crypto/gcm.h>
0022 #include <crypto/scatterwalk.h>
0023
0024 #include <crypto/internal/aead.h>
0025 #include <crypto/internal/skcipher.h>
0026
0027 #include "ocs-aes.h"
0028
0029 #define KMB_OCS_PRIORITY 350
0030 #define DRV_NAME "keembay-ocs-aes"
0031
0032 #define OCS_AES_MIN_KEY_SIZE 16
0033 #define OCS_AES_MAX_KEY_SIZE 32
0034 #define OCS_AES_KEYSIZE_128 16
0035 #define OCS_AES_KEYSIZE_192 24
0036 #define OCS_AES_KEYSIZE_256 32
0037 #define OCS_SM4_KEY_SIZE 16
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049 struct ocs_aes_tctx {
0050 struct crypto_engine_ctx engine_ctx;
0051 struct ocs_aes_dev *aes_dev;
0052 u8 key[OCS_AES_KEYSIZE_256];
0053 unsigned int key_len;
0054 enum ocs_cipher cipher;
0055 union {
0056 struct crypto_sync_skcipher *sk;
0057 struct crypto_aead *aead;
0058 } sw_cipher;
0059 bool use_fallback;
0060 };
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084 struct ocs_aes_rctx {
0085
0086 enum ocs_instruction instruction;
0087 enum ocs_mode mode;
0088 int src_nents;
0089 int dst_nents;
0090 int src_dma_count;
0091 int dst_dma_count;
0092 bool in_place;
0093 struct ocs_dll_desc src_dll;
0094 struct ocs_dll_desc dst_dll;
0095
0096
0097 u8 last_ct_blk[AES_BLOCK_SIZE];
0098
0099
0100 int cts_swap;
0101
0102
0103 struct ocs_dll_desc aad_src_dll;
0104 struct ocs_dll_desc aad_dst_dll;
0105 u8 in_tag[AES_BLOCK_SIZE];
0106
0107
0108 u8 out_tag[AES_BLOCK_SIZE];
0109 };
0110
0111
0112 struct ocs_aes_drv {
0113 struct list_head dev_list;
0114 spinlock_t lock;
0115 };
0116
0117 static struct ocs_aes_drv ocs_aes = {
0118 .dev_list = LIST_HEAD_INIT(ocs_aes.dev_list),
0119 .lock = __SPIN_LOCK_UNLOCKED(ocs_aes.lock),
0120 };
0121
0122 static struct ocs_aes_dev *kmb_ocs_aes_find_dev(struct ocs_aes_tctx *tctx)
0123 {
0124 struct ocs_aes_dev *aes_dev;
0125
0126 spin_lock(&ocs_aes.lock);
0127
0128 if (tctx->aes_dev) {
0129 aes_dev = tctx->aes_dev;
0130 goto exit;
0131 }
0132
0133
0134 aes_dev = list_first_entry(&ocs_aes.dev_list, struct ocs_aes_dev, list);
0135 tctx->aes_dev = aes_dev;
0136
0137 exit:
0138 spin_unlock(&ocs_aes.lock);
0139
0140 return aes_dev;
0141 }
0142
0143
0144
0145
0146
0147
0148
0149 static int check_key(const u8 *in_key, size_t key_len, enum ocs_cipher cipher)
0150 {
0151 if (!in_key)
0152 return -EINVAL;
0153
0154
0155 if (cipher == OCS_AES && (key_len == OCS_AES_KEYSIZE_128 ||
0156 key_len == OCS_AES_KEYSIZE_256))
0157 return 0;
0158
0159
0160 if (cipher == OCS_SM4 && key_len == OCS_AES_KEYSIZE_128)
0161 return 0;
0162
0163
0164 return -EINVAL;
0165 }
0166
0167
0168 static int save_key(struct ocs_aes_tctx *tctx, const u8 *in_key, size_t key_len,
0169 enum ocs_cipher cipher)
0170 {
0171 int ret;
0172
0173 ret = check_key(in_key, key_len, cipher);
0174 if (ret)
0175 return ret;
0176
0177 memcpy(tctx->key, in_key, key_len);
0178 tctx->key_len = key_len;
0179 tctx->cipher = cipher;
0180
0181 return 0;
0182 }
0183
0184
0185 static int kmb_ocs_sk_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
0186 size_t key_len, enum ocs_cipher cipher)
0187 {
0188 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);
0189
0190
0191 tctx->use_fallback = (cipher == OCS_AES &&
0192 key_len == OCS_AES_KEYSIZE_192);
0193
0194 if (!tctx->use_fallback)
0195 return save_key(tctx, in_key, key_len, cipher);
0196
0197 crypto_sync_skcipher_clear_flags(tctx->sw_cipher.sk,
0198 CRYPTO_TFM_REQ_MASK);
0199 crypto_sync_skcipher_set_flags(tctx->sw_cipher.sk,
0200 tfm->base.crt_flags &
0201 CRYPTO_TFM_REQ_MASK);
0202
0203 return crypto_sync_skcipher_setkey(tctx->sw_cipher.sk, in_key, key_len);
0204 }
0205
0206
0207 static int kmb_ocs_aead_set_key(struct crypto_aead *tfm, const u8 *in_key,
0208 size_t key_len, enum ocs_cipher cipher)
0209 {
0210 struct ocs_aes_tctx *tctx = crypto_aead_ctx(tfm);
0211
0212
0213 tctx->use_fallback = (cipher == OCS_AES &&
0214 key_len == OCS_AES_KEYSIZE_192);
0215
0216 if (!tctx->use_fallback)
0217 return save_key(tctx, in_key, key_len, cipher);
0218
0219 crypto_aead_clear_flags(tctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
0220 crypto_aead_set_flags(tctx->sw_cipher.aead,
0221 crypto_aead_get_flags(tfm) & CRYPTO_TFM_REQ_MASK);
0222
0223 return crypto_aead_setkey(tctx->sw_cipher.aead, in_key, key_len);
0224 }
0225
0226
0227 static void sg_swap_blocks(struct scatterlist *sgl, unsigned int nents,
0228 off_t blk1_offset, off_t blk2_offset)
0229 {
0230 u8 tmp_buf1[AES_BLOCK_SIZE], tmp_buf2[AES_BLOCK_SIZE];
0231
0232
0233
0234
0235
0236 sg_pcopy_to_buffer(sgl, nents, tmp_buf1, AES_BLOCK_SIZE, blk1_offset);
0237 sg_pcopy_to_buffer(sgl, nents, tmp_buf2, AES_BLOCK_SIZE, blk2_offset);
0238 sg_pcopy_from_buffer(sgl, nents, tmp_buf1, AES_BLOCK_SIZE, blk2_offset);
0239 sg_pcopy_from_buffer(sgl, nents, tmp_buf2, AES_BLOCK_SIZE, blk1_offset);
0240 }
0241
0242
0243 static void ocs_aes_init_rctx(struct ocs_aes_rctx *rctx)
0244 {
0245
0246 memset(rctx, 0, sizeof(*rctx));
0247
0248
0249 rctx->src_dll.dma_addr = DMA_MAPPING_ERROR;
0250 rctx->dst_dll.dma_addr = DMA_MAPPING_ERROR;
0251 rctx->aad_src_dll.dma_addr = DMA_MAPPING_ERROR;
0252 rctx->aad_dst_dll.dma_addr = DMA_MAPPING_ERROR;
0253 }
0254
0255 static int kmb_ocs_sk_validate_input(struct skcipher_request *req,
0256 enum ocs_mode mode)
0257 {
0258 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0259 int iv_size = crypto_skcipher_ivsize(tfm);
0260
0261 switch (mode) {
0262 case OCS_MODE_ECB:
0263
0264 if (req->cryptlen % AES_BLOCK_SIZE != 0)
0265 return -EINVAL;
0266
0267 return 0;
0268
0269 case OCS_MODE_CBC:
0270
0271 if (req->cryptlen % AES_BLOCK_SIZE != 0)
0272 return -EINVAL;
0273
0274
0275 if (!req->iv || iv_size != AES_BLOCK_SIZE)
0276 return -EINVAL;
0277
0278
0279
0280
0281
0282 return 0;
0283
0284 case OCS_MODE_CTR:
0285
0286 if (!req->iv || iv_size != AES_BLOCK_SIZE)
0287 return -EINVAL;
0288 return 0;
0289
0290 case OCS_MODE_CTS:
0291
0292 if (req->cryptlen < AES_BLOCK_SIZE)
0293 return -EINVAL;
0294
0295
0296 if (!req->iv || iv_size != AES_BLOCK_SIZE)
0297 return -EINVAL;
0298
0299 return 0;
0300 default:
0301 return -EINVAL;
0302 }
0303 }
0304
0305
0306
0307
0308
0309
0310
0311 static int kmb_ocs_sk_common(struct skcipher_request *req,
0312 enum ocs_cipher cipher,
0313 enum ocs_instruction instruction,
0314 enum ocs_mode mode)
0315 {
0316 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0317 struct ocs_aes_rctx *rctx = skcipher_request_ctx(req);
0318 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);
0319 struct ocs_aes_dev *aes_dev;
0320 int rc;
0321
0322 if (tctx->use_fallback) {
0323 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, tctx->sw_cipher.sk);
0324
0325 skcipher_request_set_sync_tfm(subreq, tctx->sw_cipher.sk);
0326 skcipher_request_set_callback(subreq, req->base.flags, NULL,
0327 NULL);
0328 skcipher_request_set_crypt(subreq, req->src, req->dst,
0329 req->cryptlen, req->iv);
0330
0331 if (instruction == OCS_ENCRYPT)
0332 rc = crypto_skcipher_encrypt(subreq);
0333 else
0334 rc = crypto_skcipher_decrypt(subreq);
0335
0336 skcipher_request_zero(subreq);
0337
0338 return rc;
0339 }
0340
0341
0342
0343
0344
0345
0346 if (!req->cryptlen && mode != OCS_MODE_CTS)
0347 return 0;
0348
0349 rc = kmb_ocs_sk_validate_input(req, mode);
0350 if (rc)
0351 return rc;
0352
0353 aes_dev = kmb_ocs_aes_find_dev(tctx);
0354 if (!aes_dev)
0355 return -ENODEV;
0356
0357 if (cipher != tctx->cipher)
0358 return -EINVAL;
0359
0360 ocs_aes_init_rctx(rctx);
0361 rctx->instruction = instruction;
0362 rctx->mode = mode;
0363
0364 return crypto_transfer_skcipher_request_to_engine(aes_dev->engine, req);
0365 }
0366
0367 static void cleanup_ocs_dma_linked_list(struct device *dev,
0368 struct ocs_dll_desc *dll)
0369 {
0370 if (dll->vaddr)
0371 dma_free_coherent(dev, dll->size, dll->vaddr, dll->dma_addr);
0372 dll->vaddr = NULL;
0373 dll->size = 0;
0374 dll->dma_addr = DMA_MAPPING_ERROR;
0375 }
0376
0377 static void kmb_ocs_sk_dma_cleanup(struct skcipher_request *req)
0378 {
0379 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0380 struct ocs_aes_rctx *rctx = skcipher_request_ctx(req);
0381 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);
0382 struct device *dev = tctx->aes_dev->dev;
0383
0384 if (rctx->src_dma_count) {
0385 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
0386 rctx->src_dma_count = 0;
0387 }
0388
0389 if (rctx->dst_dma_count) {
0390 dma_unmap_sg(dev, req->dst, rctx->dst_nents, rctx->in_place ?
0391 DMA_BIDIRECTIONAL :
0392 DMA_FROM_DEVICE);
0393 rctx->dst_dma_count = 0;
0394 }
0395
0396
0397 cleanup_ocs_dma_linked_list(dev, &rctx->src_dll);
0398 cleanup_ocs_dma_linked_list(dev, &rctx->dst_dll);
0399 }
0400
0401 static int kmb_ocs_sk_prepare_inplace(struct skcipher_request *req)
0402 {
0403 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0404 struct ocs_aes_rctx *rctx = skcipher_request_ctx(req);
0405 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);
0406 int iv_size = crypto_skcipher_ivsize(tfm);
0407 int rc;
0408
0409
0410
0411
0412
0413
0414
0415
0416 if (rctx->mode == OCS_MODE_CBC && rctx->instruction == OCS_DECRYPT)
0417 scatterwalk_map_and_copy(rctx->last_ct_blk, req->src,
0418 req->cryptlen - iv_size, iv_size, 0);
0419
0420
0421 if (rctx->cts_swap && rctx->instruction == OCS_DECRYPT)
0422 sg_swap_blocks(req->dst, rctx->dst_nents,
0423 req->cryptlen - AES_BLOCK_SIZE,
0424 req->cryptlen - (2 * AES_BLOCK_SIZE));
0425
0426
0427 rctx->dst_dma_count = dma_map_sg(tctx->aes_dev->dev, req->dst,
0428 rctx->dst_nents, DMA_BIDIRECTIONAL);
0429 if (rctx->dst_dma_count == 0) {
0430 dev_err(tctx->aes_dev->dev, "Failed to map destination sg\n");
0431 return -ENOMEM;
0432 }
0433
0434
0435 rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,
0436 rctx->dst_dma_count, &rctx->dst_dll,
0437 req->cryptlen, 0);
0438 if (rc)
0439 return rc;
0440
0441
0442
0443
0444
0445 rctx->src_dll.dma_addr = rctx->dst_dll.dma_addr;
0446
0447 return 0;
0448 }
0449
0450 static int kmb_ocs_sk_prepare_notinplace(struct skcipher_request *req)
0451 {
0452 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0453 struct ocs_aes_rctx *rctx = skcipher_request_ctx(req);
0454 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);
0455 int rc;
0456
0457 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen);
0458 if (rctx->src_nents < 0)
0459 return -EBADMSG;
0460
0461
0462 rctx->src_dma_count = dma_map_sg(tctx->aes_dev->dev, req->src,
0463 rctx->src_nents, DMA_TO_DEVICE);
0464 if (rctx->src_dma_count == 0) {
0465 dev_err(tctx->aes_dev->dev, "Failed to map source sg\n");
0466 return -ENOMEM;
0467 }
0468
0469
0470 rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->src,
0471 rctx->src_dma_count, &rctx->src_dll,
0472 req->cryptlen, 0);
0473 if (rc)
0474 return rc;
0475
0476
0477 rctx->dst_dma_count = dma_map_sg(tctx->aes_dev->dev, req->dst,
0478 rctx->dst_nents, DMA_FROM_DEVICE);
0479 if (rctx->dst_dma_count == 0) {
0480 dev_err(tctx->aes_dev->dev, "Failed to map destination sg\n");
0481 return -ENOMEM;
0482 }
0483
0484
0485 rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,
0486 rctx->dst_dma_count, &rctx->dst_dll,
0487 req->cryptlen, 0);
0488 if (rc)
0489 return rc;
0490
0491
0492 if (!(rctx->cts_swap && rctx->instruction == OCS_DECRYPT))
0493 return 0;
0494
0495
0496
0497
0498
0499
0500
0501
0502 rc = ocs_aes_bypass_op(tctx->aes_dev, rctx->dst_dll.dma_addr,
0503 rctx->src_dll.dma_addr, req->cryptlen);
0504 if (rc)
0505 return rc;
0506
0507
0508
0509
0510
0511 kmb_ocs_sk_dma_cleanup(req);
0512 rctx->in_place = true;
0513
0514 return kmb_ocs_sk_prepare_inplace(req);
0515 }
0516
0517 static int kmb_ocs_sk_run(struct skcipher_request *req)
0518 {
0519 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0520 struct ocs_aes_rctx *rctx = skcipher_request_ctx(req);
0521 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);
0522 struct ocs_aes_dev *aes_dev = tctx->aes_dev;
0523 int iv_size = crypto_skcipher_ivsize(tfm);
0524 int rc;
0525
0526 rctx->dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
0527 if (rctx->dst_nents < 0)
0528 return -EBADMSG;
0529
0530
0531
0532
0533
0534
0535
0536
0537
0538 rctx->cts_swap = (rctx->mode == OCS_MODE_CTS &&
0539 req->cryptlen > AES_BLOCK_SIZE &&
0540 req->cryptlen % AES_BLOCK_SIZE == 0);
0541
0542 rctx->in_place = (req->src == req->dst);
0543
0544 if (rctx->in_place)
0545 rc = kmb_ocs_sk_prepare_inplace(req);
0546 else
0547 rc = kmb_ocs_sk_prepare_notinplace(req);
0548
0549 if (rc)
0550 goto error;
0551
0552 rc = ocs_aes_op(aes_dev, rctx->mode, tctx->cipher, rctx->instruction,
0553 rctx->dst_dll.dma_addr, rctx->src_dll.dma_addr,
0554 req->cryptlen, req->iv, iv_size);
0555 if (rc)
0556 goto error;
0557
0558
0559 kmb_ocs_sk_dma_cleanup(req);
0560
0561
0562 if (rctx->cts_swap && rctx->instruction == OCS_ENCRYPT) {
0563 sg_swap_blocks(req->dst, rctx->dst_nents,
0564 req->cryptlen - AES_BLOCK_SIZE,
0565 req->cryptlen - (2 * AES_BLOCK_SIZE));
0566 return 0;
0567 }
0568
0569
0570 if (rctx->mode == OCS_MODE_CBC) {
0571
0572 if (rctx->instruction == OCS_ENCRYPT) {
0573 scatterwalk_map_and_copy(req->iv, req->dst,
0574 req->cryptlen - iv_size,
0575 iv_size, 0);
0576 return 0;
0577 }
0578
0579 if (rctx->in_place)
0580 memcpy(req->iv, rctx->last_ct_blk, iv_size);
0581 else
0582 scatterwalk_map_and_copy(req->iv, req->src,
0583 req->cryptlen - iv_size,
0584 iv_size, 0);
0585 return 0;
0586 }
0587
0588
0589 return 0;
0590
0591 error:
0592 kmb_ocs_sk_dma_cleanup(req);
0593
0594 return rc;
0595 }
0596
0597 static int kmb_ocs_aead_validate_input(struct aead_request *req,
0598 enum ocs_instruction instruction,
0599 enum ocs_mode mode)
0600 {
0601 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
0602 int tag_size = crypto_aead_authsize(tfm);
0603 int iv_size = crypto_aead_ivsize(tfm);
0604
0605
0606 if (instruction == OCS_DECRYPT && req->cryptlen < tag_size)
0607 return -EINVAL;
0608
0609
0610 if (!req->iv)
0611 return -EINVAL;
0612
0613 switch (mode) {
0614 case OCS_MODE_GCM:
0615 if (iv_size != GCM_AES_IV_SIZE)
0616 return -EINVAL;
0617
0618 return 0;
0619
0620 case OCS_MODE_CCM:
0621
0622 if (iv_size != AES_BLOCK_SIZE)
0623 return -EINVAL;
0624
0625 return 0;
0626
0627 default:
0628 return -EINVAL;
0629 }
0630 }
0631
0632
0633
0634
0635
0636
0637
0638 static int kmb_ocs_aead_common(struct aead_request *req,
0639 enum ocs_cipher cipher,
0640 enum ocs_instruction instruction,
0641 enum ocs_mode mode)
0642 {
0643 struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
0644 struct ocs_aes_rctx *rctx = aead_request_ctx(req);
0645 struct ocs_aes_dev *dd;
0646 int rc;
0647
0648 if (tctx->use_fallback) {
0649 struct aead_request *subreq = aead_request_ctx(req);
0650
0651 aead_request_set_tfm(subreq, tctx->sw_cipher.aead);
0652 aead_request_set_callback(subreq, req->base.flags,
0653 req->base.complete, req->base.data);
0654 aead_request_set_crypt(subreq, req->src, req->dst,
0655 req->cryptlen, req->iv);
0656 aead_request_set_ad(subreq, req->assoclen);
0657 rc = crypto_aead_setauthsize(tctx->sw_cipher.aead,
0658 crypto_aead_authsize(crypto_aead_reqtfm(req)));
0659 if (rc)
0660 return rc;
0661
0662 return (instruction == OCS_ENCRYPT) ?
0663 crypto_aead_encrypt(subreq) :
0664 crypto_aead_decrypt(subreq);
0665 }
0666
0667 rc = kmb_ocs_aead_validate_input(req, instruction, mode);
0668 if (rc)
0669 return rc;
0670
0671 dd = kmb_ocs_aes_find_dev(tctx);
0672 if (!dd)
0673 return -ENODEV;
0674
0675 if (cipher != tctx->cipher)
0676 return -EINVAL;
0677
0678 ocs_aes_init_rctx(rctx);
0679 rctx->instruction = instruction;
0680 rctx->mode = mode;
0681
0682 return crypto_transfer_aead_request_to_engine(dd->engine, req);
0683 }
0684
0685 static void kmb_ocs_aead_dma_cleanup(struct aead_request *req)
0686 {
0687 struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
0688 struct ocs_aes_rctx *rctx = aead_request_ctx(req);
0689 struct device *dev = tctx->aes_dev->dev;
0690
0691 if (rctx->src_dma_count) {
0692 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
0693 rctx->src_dma_count = 0;
0694 }
0695
0696 if (rctx->dst_dma_count) {
0697 dma_unmap_sg(dev, req->dst, rctx->dst_nents, rctx->in_place ?
0698 DMA_BIDIRECTIONAL :
0699 DMA_FROM_DEVICE);
0700 rctx->dst_dma_count = 0;
0701 }
0702
0703 cleanup_ocs_dma_linked_list(dev, &rctx->src_dll);
0704 cleanup_ocs_dma_linked_list(dev, &rctx->dst_dll);
0705 cleanup_ocs_dma_linked_list(dev, &rctx->aad_src_dll);
0706 cleanup_ocs_dma_linked_list(dev, &rctx->aad_dst_dll);
0707 }
0708
0709
0710
0711
0712
0713
0714
0715
0716
0717
0718
0719
0720
0721
0722 static int kmb_ocs_aead_dma_prepare(struct aead_request *req, u32 *src_dll_size)
0723 {
0724 struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
0725 const int tag_size = crypto_aead_authsize(crypto_aead_reqtfm(req));
0726 struct ocs_aes_rctx *rctx = aead_request_ctx(req);
0727 u32 in_size;
0728 u32 out_size;
0729 u32 dst_size;
0730 int rc;
0731
0732
0733 rctx->src_nents = sg_nents_for_len(req->src,
0734 req->assoclen + req->cryptlen);
0735 if (rctx->src_nents < 0)
0736 return -EBADMSG;
0737
0738 if (rctx->instruction == OCS_DECRYPT) {
0739
0740
0741
0742
0743
0744
0745
0746
0747
0748 in_size = req->cryptlen - tag_size;
0749
0750
0751 out_size = in_size;
0752
0753
0754 dst_size = req->assoclen + out_size;
0755
0756
0757
0758
0759
0760
0761 sg_pcopy_to_buffer(req->src, rctx->src_nents, rctx->in_tag,
0762 tag_size, req->assoclen + in_size);
0763
0764 } else {
0765
0766
0767
0768
0769
0770
0771 in_size = req->cryptlen;
0772
0773
0774
0775
0776
0777
0778 out_size = (rctx->mode == OCS_MODE_CCM) ? in_size + tag_size :
0779 in_size;
0780
0781 dst_size = req->assoclen + in_size + tag_size;
0782 }
0783 *src_dll_size = in_size;
0784
0785
0786 rctx->dst_nents = sg_nents_for_len(req->dst, dst_size);
0787 if (rctx->dst_nents < 0)
0788 return -EBADMSG;
0789
0790 rctx->in_place = (req->src == req->dst) ? 1 : 0;
0791
0792
0793 rctx->dst_dma_count = dma_map_sg(tctx->aes_dev->dev, req->dst,
0794 rctx->dst_nents,
0795 rctx->in_place ? DMA_BIDIRECTIONAL :
0796 DMA_FROM_DEVICE);
0797 if (rctx->dst_dma_count == 0 && rctx->dst_nents != 0) {
0798 dev_err(tctx->aes_dev->dev, "Failed to map destination sg\n");
0799 return -ENOMEM;
0800 }
0801
0802
0803 rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,
0804 rctx->dst_dma_count,
0805 &rctx->aad_dst_dll, req->assoclen,
0806 0);
0807 if (rc)
0808 return rc;
0809
0810
0811 rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,
0812 rctx->dst_dma_count, &rctx->dst_dll,
0813 out_size, req->assoclen);
0814 if (rc)
0815 return rc;
0816
0817 if (rctx->in_place) {
0818
0819 if (!(rctx->mode == OCS_MODE_CCM &&
0820 rctx->instruction == OCS_ENCRYPT)) {
0821
0822
0823
0824
0825
0826 rctx->src_dll.dma_addr = rctx->dst_dll.dma_addr;
0827 rctx->aad_src_dll.dma_addr = rctx->aad_dst_dll.dma_addr;
0828
0829 return 0;
0830 }
0831
0832
0833
0834
0835
0836 rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,
0837 rctx->dst_dma_count,
0838 &rctx->aad_src_dll,
0839 req->assoclen, 0);
0840 if (rc)
0841 return rc;
0842 rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->dst,
0843 rctx->dst_dma_count,
0844 &rctx->src_dll, in_size,
0845 req->assoclen);
0846 if (rc)
0847 return rc;
0848
0849 return 0;
0850 }
0851
0852
0853
0854 rctx->src_dma_count = dma_map_sg(tctx->aes_dev->dev, req->src,
0855 rctx->src_nents, DMA_TO_DEVICE);
0856 if (rctx->src_dma_count == 0 && rctx->src_nents != 0) {
0857 dev_err(tctx->aes_dev->dev, "Failed to map source sg\n");
0858 return -ENOMEM;
0859 }
0860
0861
0862 rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->src,
0863 rctx->src_dma_count,
0864 &rctx->aad_src_dll,
0865 req->assoclen, 0);
0866 if (rc)
0867 return rc;
0868
0869
0870 rc = ocs_create_linked_list_from_sg(tctx->aes_dev, req->src,
0871 rctx->src_dma_count,
0872 &rctx->src_dll, in_size,
0873 req->assoclen);
0874 if (rc)
0875 return rc;
0876
0877 if (req->assoclen == 0)
0878 return 0;
0879
0880
0881 rc = ocs_aes_bypass_op(tctx->aes_dev, rctx->aad_dst_dll.dma_addr,
0882 rctx->aad_src_dll.dma_addr, req->cryptlen);
0883 if (rc)
0884 dev_err(tctx->aes_dev->dev,
0885 "Failed to copy source AAD to destination AAD\n");
0886
0887 return rc;
0888 }
0889
0890 static int kmb_ocs_aead_run(struct aead_request *req)
0891 {
0892 struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
0893 const int tag_size = crypto_aead_authsize(crypto_aead_reqtfm(req));
0894 struct ocs_aes_rctx *rctx = aead_request_ctx(req);
0895 u32 in_size;
0896 int rc;
0897
0898 rc = kmb_ocs_aead_dma_prepare(req, &in_size);
0899 if (rc)
0900 goto exit;
0901
0902
0903 if (rctx->mode == OCS_MODE_CCM) {
0904 rc = ocs_aes_ccm_op(tctx->aes_dev, tctx->cipher,
0905 rctx->instruction, rctx->dst_dll.dma_addr,
0906 rctx->src_dll.dma_addr, in_size,
0907 req->iv,
0908 rctx->aad_src_dll.dma_addr, req->assoclen,
0909 rctx->in_tag, tag_size);
0910 goto exit;
0911 }
0912
0913 rc = ocs_aes_gcm_op(tctx->aes_dev, tctx->cipher,
0914 rctx->instruction,
0915 rctx->dst_dll.dma_addr,
0916 rctx->src_dll.dma_addr, in_size,
0917 req->iv,
0918 rctx->aad_src_dll.dma_addr, req->assoclen,
0919 rctx->out_tag, tag_size);
0920 if (rc)
0921 goto exit;
0922
0923
0924 if (rctx->instruction == OCS_DECRYPT) {
0925 rc = memcmp(rctx->in_tag, rctx->out_tag, tag_size) ?
0926 -EBADMSG : 0;
0927 goto exit;
0928 }
0929
0930
0931
0932
0933 kmb_ocs_aead_dma_cleanup(req);
0934
0935
0936 sg_pcopy_from_buffer(req->dst, rctx->dst_nents, rctx->out_tag,
0937 tag_size, req->assoclen + req->cryptlen);
0938
0939
0940 return 0;
0941
0942 exit:
0943 kmb_ocs_aead_dma_cleanup(req);
0944
0945 return rc;
0946 }
0947
0948 static int kmb_ocs_aes_sk_do_one_request(struct crypto_engine *engine,
0949 void *areq)
0950 {
0951 struct skcipher_request *req =
0952 container_of(areq, struct skcipher_request, base);
0953 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0954 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);
0955 int err;
0956
0957 if (!tctx->aes_dev) {
0958 err = -ENODEV;
0959 goto exit;
0960 }
0961
0962 err = ocs_aes_set_key(tctx->aes_dev, tctx->key_len, tctx->key,
0963 tctx->cipher);
0964 if (err)
0965 goto exit;
0966
0967 err = kmb_ocs_sk_run(req);
0968
0969 exit:
0970 crypto_finalize_skcipher_request(engine, req, err);
0971
0972 return 0;
0973 }
0974
0975 static int kmb_ocs_aes_aead_do_one_request(struct crypto_engine *engine,
0976 void *areq)
0977 {
0978 struct aead_request *req = container_of(areq,
0979 struct aead_request, base);
0980 struct ocs_aes_tctx *tctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
0981 int err;
0982
0983 if (!tctx->aes_dev)
0984 return -ENODEV;
0985
0986 err = ocs_aes_set_key(tctx->aes_dev, tctx->key_len, tctx->key,
0987 tctx->cipher);
0988 if (err)
0989 goto exit;
0990
0991 err = kmb_ocs_aead_run(req);
0992
0993 exit:
0994 crypto_finalize_aead_request(tctx->aes_dev->engine, req, err);
0995
0996 return 0;
0997 }
0998
0999 static int kmb_ocs_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
1000 unsigned int key_len)
1001 {
1002 return kmb_ocs_sk_set_key(tfm, in_key, key_len, OCS_AES);
1003 }
1004
1005 static int kmb_ocs_aes_aead_set_key(struct crypto_aead *tfm, const u8 *in_key,
1006 unsigned int key_len)
1007 {
1008 return kmb_ocs_aead_set_key(tfm, in_key, key_len, OCS_AES);
1009 }
1010
1011 #ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB
1012 static int kmb_ocs_aes_ecb_encrypt(struct skcipher_request *req)
1013 {
1014 return kmb_ocs_sk_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_ECB);
1015 }
1016
1017 static int kmb_ocs_aes_ecb_decrypt(struct skcipher_request *req)
1018 {
1019 return kmb_ocs_sk_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_ECB);
1020 }
1021 #endif
1022
1023 static int kmb_ocs_aes_cbc_encrypt(struct skcipher_request *req)
1024 {
1025 return kmb_ocs_sk_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_CBC);
1026 }
1027
1028 static int kmb_ocs_aes_cbc_decrypt(struct skcipher_request *req)
1029 {
1030 return kmb_ocs_sk_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_CBC);
1031 }
1032
1033 static int kmb_ocs_aes_ctr_encrypt(struct skcipher_request *req)
1034 {
1035 return kmb_ocs_sk_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_CTR);
1036 }
1037
1038 static int kmb_ocs_aes_ctr_decrypt(struct skcipher_request *req)
1039 {
1040 return kmb_ocs_sk_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_CTR);
1041 }
1042
1043 #ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS
1044 static int kmb_ocs_aes_cts_encrypt(struct skcipher_request *req)
1045 {
1046 return kmb_ocs_sk_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_CTS);
1047 }
1048
1049 static int kmb_ocs_aes_cts_decrypt(struct skcipher_request *req)
1050 {
1051 return kmb_ocs_sk_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_CTS);
1052 }
1053 #endif
1054
1055 static int kmb_ocs_aes_gcm_encrypt(struct aead_request *req)
1056 {
1057 return kmb_ocs_aead_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_GCM);
1058 }
1059
1060 static int kmb_ocs_aes_gcm_decrypt(struct aead_request *req)
1061 {
1062 return kmb_ocs_aead_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_GCM);
1063 }
1064
1065 static int kmb_ocs_aes_ccm_encrypt(struct aead_request *req)
1066 {
1067 return kmb_ocs_aead_common(req, OCS_AES, OCS_ENCRYPT, OCS_MODE_CCM);
1068 }
1069
1070 static int kmb_ocs_aes_ccm_decrypt(struct aead_request *req)
1071 {
1072 return kmb_ocs_aead_common(req, OCS_AES, OCS_DECRYPT, OCS_MODE_CCM);
1073 }
1074
1075 static int kmb_ocs_sm4_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
1076 unsigned int key_len)
1077 {
1078 return kmb_ocs_sk_set_key(tfm, in_key, key_len, OCS_SM4);
1079 }
1080
1081 static int kmb_ocs_sm4_aead_set_key(struct crypto_aead *tfm, const u8 *in_key,
1082 unsigned int key_len)
1083 {
1084 return kmb_ocs_aead_set_key(tfm, in_key, key_len, OCS_SM4);
1085 }
1086
1087 #ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB
1088 static int kmb_ocs_sm4_ecb_encrypt(struct skcipher_request *req)
1089 {
1090 return kmb_ocs_sk_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_ECB);
1091 }
1092
1093 static int kmb_ocs_sm4_ecb_decrypt(struct skcipher_request *req)
1094 {
1095 return kmb_ocs_sk_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_ECB);
1096 }
1097 #endif
1098
1099 static int kmb_ocs_sm4_cbc_encrypt(struct skcipher_request *req)
1100 {
1101 return kmb_ocs_sk_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_CBC);
1102 }
1103
1104 static int kmb_ocs_sm4_cbc_decrypt(struct skcipher_request *req)
1105 {
1106 return kmb_ocs_sk_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_CBC);
1107 }
1108
1109 static int kmb_ocs_sm4_ctr_encrypt(struct skcipher_request *req)
1110 {
1111 return kmb_ocs_sk_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_CTR);
1112 }
1113
1114 static int kmb_ocs_sm4_ctr_decrypt(struct skcipher_request *req)
1115 {
1116 return kmb_ocs_sk_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_CTR);
1117 }
1118
1119 #ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS
1120 static int kmb_ocs_sm4_cts_encrypt(struct skcipher_request *req)
1121 {
1122 return kmb_ocs_sk_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_CTS);
1123 }
1124
1125 static int kmb_ocs_sm4_cts_decrypt(struct skcipher_request *req)
1126 {
1127 return kmb_ocs_sk_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_CTS);
1128 }
1129 #endif
1130
1131 static int kmb_ocs_sm4_gcm_encrypt(struct aead_request *req)
1132 {
1133 return kmb_ocs_aead_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_GCM);
1134 }
1135
1136 static int kmb_ocs_sm4_gcm_decrypt(struct aead_request *req)
1137 {
1138 return kmb_ocs_aead_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_GCM);
1139 }
1140
1141 static int kmb_ocs_sm4_ccm_encrypt(struct aead_request *req)
1142 {
1143 return kmb_ocs_aead_common(req, OCS_SM4, OCS_ENCRYPT, OCS_MODE_CCM);
1144 }
1145
1146 static int kmb_ocs_sm4_ccm_decrypt(struct aead_request *req)
1147 {
1148 return kmb_ocs_aead_common(req, OCS_SM4, OCS_DECRYPT, OCS_MODE_CCM);
1149 }
1150
1151 static inline int ocs_common_init(struct ocs_aes_tctx *tctx)
1152 {
1153 tctx->engine_ctx.op.prepare_request = NULL;
1154 tctx->engine_ctx.op.do_one_request = kmb_ocs_aes_sk_do_one_request;
1155 tctx->engine_ctx.op.unprepare_request = NULL;
1156
1157 return 0;
1158 }
1159
1160 static int ocs_aes_init_tfm(struct crypto_skcipher *tfm)
1161 {
1162 const char *alg_name = crypto_tfm_alg_name(&tfm->base);
1163 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);
1164 struct crypto_sync_skcipher *blk;
1165
1166
1167 blk = crypto_alloc_sync_skcipher(alg_name, 0, CRYPTO_ALG_NEED_FALLBACK);
1168 if (IS_ERR(blk))
1169 return PTR_ERR(blk);
1170
1171 tctx->sw_cipher.sk = blk;
1172
1173 crypto_skcipher_set_reqsize(tfm, sizeof(struct ocs_aes_rctx));
1174
1175 return ocs_common_init(tctx);
1176 }
1177
1178 static int ocs_sm4_init_tfm(struct crypto_skcipher *tfm)
1179 {
1180 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);
1181
1182 crypto_skcipher_set_reqsize(tfm, sizeof(struct ocs_aes_rctx));
1183
1184 return ocs_common_init(tctx);
1185 }
1186
1187 static inline void clear_key(struct ocs_aes_tctx *tctx)
1188 {
1189 memzero_explicit(tctx->key, OCS_AES_KEYSIZE_256);
1190
1191
1192 if (tctx->aes_dev)
1193 ocs_aes_set_key(tctx->aes_dev, OCS_AES_KEYSIZE_256,
1194 tctx->key, OCS_AES);
1195 }
1196
1197 static void ocs_exit_tfm(struct crypto_skcipher *tfm)
1198 {
1199 struct ocs_aes_tctx *tctx = crypto_skcipher_ctx(tfm);
1200
1201 clear_key(tctx);
1202
1203 if (tctx->sw_cipher.sk) {
1204 crypto_free_sync_skcipher(tctx->sw_cipher.sk);
1205 tctx->sw_cipher.sk = NULL;
1206 }
1207 }
1208
1209 static inline int ocs_common_aead_init(struct ocs_aes_tctx *tctx)
1210 {
1211 tctx->engine_ctx.op.prepare_request = NULL;
1212 tctx->engine_ctx.op.do_one_request = kmb_ocs_aes_aead_do_one_request;
1213 tctx->engine_ctx.op.unprepare_request = NULL;
1214
1215 return 0;
1216 }
1217
1218 static int ocs_aes_aead_cra_init(struct crypto_aead *tfm)
1219 {
1220 const char *alg_name = crypto_tfm_alg_name(&tfm->base);
1221 struct ocs_aes_tctx *tctx = crypto_aead_ctx(tfm);
1222 struct crypto_aead *blk;
1223
1224
1225 blk = crypto_alloc_aead(alg_name, 0, CRYPTO_ALG_NEED_FALLBACK);
1226 if (IS_ERR(blk))
1227 return PTR_ERR(blk);
1228
1229 tctx->sw_cipher.aead = blk;
1230
1231 crypto_aead_set_reqsize(tfm,
1232 max(sizeof(struct ocs_aes_rctx),
1233 (sizeof(struct aead_request) +
1234 crypto_aead_reqsize(tctx->sw_cipher.aead))));
1235
1236 return ocs_common_aead_init(tctx);
1237 }
1238
1239 static int kmb_ocs_aead_ccm_setauthsize(struct crypto_aead *tfm,
1240 unsigned int authsize)
1241 {
1242 switch (authsize) {
1243 case 4:
1244 case 6:
1245 case 8:
1246 case 10:
1247 case 12:
1248 case 14:
1249 case 16:
1250 return 0;
1251 default:
1252 return -EINVAL;
1253 }
1254 }
1255
1256 static int kmb_ocs_aead_gcm_setauthsize(struct crypto_aead *tfm,
1257 unsigned int authsize)
1258 {
1259 return crypto_gcm_check_authsize(authsize);
1260 }
1261
1262 static int ocs_sm4_aead_cra_init(struct crypto_aead *tfm)
1263 {
1264 struct ocs_aes_tctx *tctx = crypto_aead_ctx(tfm);
1265
1266 crypto_aead_set_reqsize(tfm, sizeof(struct ocs_aes_rctx));
1267
1268 return ocs_common_aead_init(tctx);
1269 }
1270
1271 static void ocs_aead_cra_exit(struct crypto_aead *tfm)
1272 {
1273 struct ocs_aes_tctx *tctx = crypto_aead_ctx(tfm);
1274
1275 clear_key(tctx);
1276
1277 if (tctx->sw_cipher.aead) {
1278 crypto_free_aead(tctx->sw_cipher.aead);
1279 tctx->sw_cipher.aead = NULL;
1280 }
1281 }
1282
1283 static struct skcipher_alg algs[] = {
1284 #ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB
1285 {
1286 .base.cra_name = "ecb(aes)",
1287 .base.cra_driver_name = "ecb-aes-keembay-ocs",
1288 .base.cra_priority = KMB_OCS_PRIORITY,
1289 .base.cra_flags = CRYPTO_ALG_ASYNC |
1290 CRYPTO_ALG_KERN_DRIVER_ONLY |
1291 CRYPTO_ALG_NEED_FALLBACK,
1292 .base.cra_blocksize = AES_BLOCK_SIZE,
1293 .base.cra_ctxsize = sizeof(struct ocs_aes_tctx),
1294 .base.cra_module = THIS_MODULE,
1295 .base.cra_alignmask = 0,
1296
1297 .min_keysize = OCS_AES_MIN_KEY_SIZE,
1298 .max_keysize = OCS_AES_MAX_KEY_SIZE,
1299 .setkey = kmb_ocs_aes_set_key,
1300 .encrypt = kmb_ocs_aes_ecb_encrypt,
1301 .decrypt = kmb_ocs_aes_ecb_decrypt,
1302 .init = ocs_aes_init_tfm,
1303 .exit = ocs_exit_tfm,
1304 },
1305 #endif
1306 {
1307 .base.cra_name = "cbc(aes)",
1308 .base.cra_driver_name = "cbc-aes-keembay-ocs",
1309 .base.cra_priority = KMB_OCS_PRIORITY,
1310 .base.cra_flags = CRYPTO_ALG_ASYNC |
1311 CRYPTO_ALG_KERN_DRIVER_ONLY |
1312 CRYPTO_ALG_NEED_FALLBACK,
1313 .base.cra_blocksize = AES_BLOCK_SIZE,
1314 .base.cra_ctxsize = sizeof(struct ocs_aes_tctx),
1315 .base.cra_module = THIS_MODULE,
1316 .base.cra_alignmask = 0,
1317
1318 .min_keysize = OCS_AES_MIN_KEY_SIZE,
1319 .max_keysize = OCS_AES_MAX_KEY_SIZE,
1320 .ivsize = AES_BLOCK_SIZE,
1321 .setkey = kmb_ocs_aes_set_key,
1322 .encrypt = kmb_ocs_aes_cbc_encrypt,
1323 .decrypt = kmb_ocs_aes_cbc_decrypt,
1324 .init = ocs_aes_init_tfm,
1325 .exit = ocs_exit_tfm,
1326 },
1327 {
1328 .base.cra_name = "ctr(aes)",
1329 .base.cra_driver_name = "ctr-aes-keembay-ocs",
1330 .base.cra_priority = KMB_OCS_PRIORITY,
1331 .base.cra_flags = CRYPTO_ALG_ASYNC |
1332 CRYPTO_ALG_KERN_DRIVER_ONLY |
1333 CRYPTO_ALG_NEED_FALLBACK,
1334 .base.cra_blocksize = 1,
1335 .base.cra_ctxsize = sizeof(struct ocs_aes_tctx),
1336 .base.cra_module = THIS_MODULE,
1337 .base.cra_alignmask = 0,
1338
1339 .min_keysize = OCS_AES_MIN_KEY_SIZE,
1340 .max_keysize = OCS_AES_MAX_KEY_SIZE,
1341 .ivsize = AES_BLOCK_SIZE,
1342 .setkey = kmb_ocs_aes_set_key,
1343 .encrypt = kmb_ocs_aes_ctr_encrypt,
1344 .decrypt = kmb_ocs_aes_ctr_decrypt,
1345 .init = ocs_aes_init_tfm,
1346 .exit = ocs_exit_tfm,
1347 },
1348 #ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS
1349 {
1350 .base.cra_name = "cts(cbc(aes))",
1351 .base.cra_driver_name = "cts-aes-keembay-ocs",
1352 .base.cra_priority = KMB_OCS_PRIORITY,
1353 .base.cra_flags = CRYPTO_ALG_ASYNC |
1354 CRYPTO_ALG_KERN_DRIVER_ONLY |
1355 CRYPTO_ALG_NEED_FALLBACK,
1356 .base.cra_blocksize = AES_BLOCK_SIZE,
1357 .base.cra_ctxsize = sizeof(struct ocs_aes_tctx),
1358 .base.cra_module = THIS_MODULE,
1359 .base.cra_alignmask = 0,
1360
1361 .min_keysize = OCS_AES_MIN_KEY_SIZE,
1362 .max_keysize = OCS_AES_MAX_KEY_SIZE,
1363 .ivsize = AES_BLOCK_SIZE,
1364 .setkey = kmb_ocs_aes_set_key,
1365 .encrypt = kmb_ocs_aes_cts_encrypt,
1366 .decrypt = kmb_ocs_aes_cts_decrypt,
1367 .init = ocs_aes_init_tfm,
1368 .exit = ocs_exit_tfm,
1369 },
1370 #endif
1371 #ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB
1372 {
1373 .base.cra_name = "ecb(sm4)",
1374 .base.cra_driver_name = "ecb-sm4-keembay-ocs",
1375 .base.cra_priority = KMB_OCS_PRIORITY,
1376 .base.cra_flags = CRYPTO_ALG_ASYNC |
1377 CRYPTO_ALG_KERN_DRIVER_ONLY,
1378 .base.cra_blocksize = AES_BLOCK_SIZE,
1379 .base.cra_ctxsize = sizeof(struct ocs_aes_tctx),
1380 .base.cra_module = THIS_MODULE,
1381 .base.cra_alignmask = 0,
1382
1383 .min_keysize = OCS_SM4_KEY_SIZE,
1384 .max_keysize = OCS_SM4_KEY_SIZE,
1385 .setkey = kmb_ocs_sm4_set_key,
1386 .encrypt = kmb_ocs_sm4_ecb_encrypt,
1387 .decrypt = kmb_ocs_sm4_ecb_decrypt,
1388 .init = ocs_sm4_init_tfm,
1389 .exit = ocs_exit_tfm,
1390 },
1391 #endif
1392 {
1393 .base.cra_name = "cbc(sm4)",
1394 .base.cra_driver_name = "cbc-sm4-keembay-ocs",
1395 .base.cra_priority = KMB_OCS_PRIORITY,
1396 .base.cra_flags = CRYPTO_ALG_ASYNC |
1397 CRYPTO_ALG_KERN_DRIVER_ONLY,
1398 .base.cra_blocksize = AES_BLOCK_SIZE,
1399 .base.cra_ctxsize = sizeof(struct ocs_aes_tctx),
1400 .base.cra_module = THIS_MODULE,
1401 .base.cra_alignmask = 0,
1402
1403 .min_keysize = OCS_SM4_KEY_SIZE,
1404 .max_keysize = OCS_SM4_KEY_SIZE,
1405 .ivsize = AES_BLOCK_SIZE,
1406 .setkey = kmb_ocs_sm4_set_key,
1407 .encrypt = kmb_ocs_sm4_cbc_encrypt,
1408 .decrypt = kmb_ocs_sm4_cbc_decrypt,
1409 .init = ocs_sm4_init_tfm,
1410 .exit = ocs_exit_tfm,
1411 },
1412 {
1413 .base.cra_name = "ctr(sm4)",
1414 .base.cra_driver_name = "ctr-sm4-keembay-ocs",
1415 .base.cra_priority = KMB_OCS_PRIORITY,
1416 .base.cra_flags = CRYPTO_ALG_ASYNC |
1417 CRYPTO_ALG_KERN_DRIVER_ONLY,
1418 .base.cra_blocksize = 1,
1419 .base.cra_ctxsize = sizeof(struct ocs_aes_tctx),
1420 .base.cra_module = THIS_MODULE,
1421 .base.cra_alignmask = 0,
1422
1423 .min_keysize = OCS_SM4_KEY_SIZE,
1424 .max_keysize = OCS_SM4_KEY_SIZE,
1425 .ivsize = AES_BLOCK_SIZE,
1426 .setkey = kmb_ocs_sm4_set_key,
1427 .encrypt = kmb_ocs_sm4_ctr_encrypt,
1428 .decrypt = kmb_ocs_sm4_ctr_decrypt,
1429 .init = ocs_sm4_init_tfm,
1430 .exit = ocs_exit_tfm,
1431 },
1432 #ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS
1433 {
1434 .base.cra_name = "cts(cbc(sm4))",
1435 .base.cra_driver_name = "cts-sm4-keembay-ocs",
1436 .base.cra_priority = KMB_OCS_PRIORITY,
1437 .base.cra_flags = CRYPTO_ALG_ASYNC |
1438 CRYPTO_ALG_KERN_DRIVER_ONLY,
1439 .base.cra_blocksize = AES_BLOCK_SIZE,
1440 .base.cra_ctxsize = sizeof(struct ocs_aes_tctx),
1441 .base.cra_module = THIS_MODULE,
1442 .base.cra_alignmask = 0,
1443
1444 .min_keysize = OCS_SM4_KEY_SIZE,
1445 .max_keysize = OCS_SM4_KEY_SIZE,
1446 .ivsize = AES_BLOCK_SIZE,
1447 .setkey = kmb_ocs_sm4_set_key,
1448 .encrypt = kmb_ocs_sm4_cts_encrypt,
1449 .decrypt = kmb_ocs_sm4_cts_decrypt,
1450 .init = ocs_sm4_init_tfm,
1451 .exit = ocs_exit_tfm,
1452 }
1453 #endif
1454 };
1455
1456 static struct aead_alg algs_aead[] = {
1457 {
1458 .base = {
1459 .cra_name = "gcm(aes)",
1460 .cra_driver_name = "gcm-aes-keembay-ocs",
1461 .cra_priority = KMB_OCS_PRIORITY,
1462 .cra_flags = CRYPTO_ALG_ASYNC |
1463 CRYPTO_ALG_KERN_DRIVER_ONLY |
1464 CRYPTO_ALG_NEED_FALLBACK,
1465 .cra_blocksize = 1,
1466 .cra_ctxsize = sizeof(struct ocs_aes_tctx),
1467 .cra_alignmask = 0,
1468 .cra_module = THIS_MODULE,
1469 },
1470 .init = ocs_aes_aead_cra_init,
1471 .exit = ocs_aead_cra_exit,
1472 .ivsize = GCM_AES_IV_SIZE,
1473 .maxauthsize = AES_BLOCK_SIZE,
1474 .setauthsize = kmb_ocs_aead_gcm_setauthsize,
1475 .setkey = kmb_ocs_aes_aead_set_key,
1476 .encrypt = kmb_ocs_aes_gcm_encrypt,
1477 .decrypt = kmb_ocs_aes_gcm_decrypt,
1478 },
1479 {
1480 .base = {
1481 .cra_name = "ccm(aes)",
1482 .cra_driver_name = "ccm-aes-keembay-ocs",
1483 .cra_priority = KMB_OCS_PRIORITY,
1484 .cra_flags = CRYPTO_ALG_ASYNC |
1485 CRYPTO_ALG_KERN_DRIVER_ONLY |
1486 CRYPTO_ALG_NEED_FALLBACK,
1487 .cra_blocksize = 1,
1488 .cra_ctxsize = sizeof(struct ocs_aes_tctx),
1489 .cra_alignmask = 0,
1490 .cra_module = THIS_MODULE,
1491 },
1492 .init = ocs_aes_aead_cra_init,
1493 .exit = ocs_aead_cra_exit,
1494 .ivsize = AES_BLOCK_SIZE,
1495 .maxauthsize = AES_BLOCK_SIZE,
1496 .setauthsize = kmb_ocs_aead_ccm_setauthsize,
1497 .setkey = kmb_ocs_aes_aead_set_key,
1498 .encrypt = kmb_ocs_aes_ccm_encrypt,
1499 .decrypt = kmb_ocs_aes_ccm_decrypt,
1500 },
1501 {
1502 .base = {
1503 .cra_name = "gcm(sm4)",
1504 .cra_driver_name = "gcm-sm4-keembay-ocs",
1505 .cra_priority = KMB_OCS_PRIORITY,
1506 .cra_flags = CRYPTO_ALG_ASYNC |
1507 CRYPTO_ALG_KERN_DRIVER_ONLY,
1508 .cra_blocksize = 1,
1509 .cra_ctxsize = sizeof(struct ocs_aes_tctx),
1510 .cra_alignmask = 0,
1511 .cra_module = THIS_MODULE,
1512 },
1513 .init = ocs_sm4_aead_cra_init,
1514 .exit = ocs_aead_cra_exit,
1515 .ivsize = GCM_AES_IV_SIZE,
1516 .maxauthsize = AES_BLOCK_SIZE,
1517 .setauthsize = kmb_ocs_aead_gcm_setauthsize,
1518 .setkey = kmb_ocs_sm4_aead_set_key,
1519 .encrypt = kmb_ocs_sm4_gcm_encrypt,
1520 .decrypt = kmb_ocs_sm4_gcm_decrypt,
1521 },
1522 {
1523 .base = {
1524 .cra_name = "ccm(sm4)",
1525 .cra_driver_name = "ccm-sm4-keembay-ocs",
1526 .cra_priority = KMB_OCS_PRIORITY,
1527 .cra_flags = CRYPTO_ALG_ASYNC |
1528 CRYPTO_ALG_KERN_DRIVER_ONLY,
1529 .cra_blocksize = 1,
1530 .cra_ctxsize = sizeof(struct ocs_aes_tctx),
1531 .cra_alignmask = 0,
1532 .cra_module = THIS_MODULE,
1533 },
1534 .init = ocs_sm4_aead_cra_init,
1535 .exit = ocs_aead_cra_exit,
1536 .ivsize = AES_BLOCK_SIZE,
1537 .maxauthsize = AES_BLOCK_SIZE,
1538 .setauthsize = kmb_ocs_aead_ccm_setauthsize,
1539 .setkey = kmb_ocs_sm4_aead_set_key,
1540 .encrypt = kmb_ocs_sm4_ccm_encrypt,
1541 .decrypt = kmb_ocs_sm4_ccm_decrypt,
1542 }
1543 };
1544
1545 static void unregister_aes_algs(struct ocs_aes_dev *aes_dev)
1546 {
1547 crypto_unregister_aeads(algs_aead, ARRAY_SIZE(algs_aead));
1548 crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
1549 }
1550
1551 static int register_aes_algs(struct ocs_aes_dev *aes_dev)
1552 {
1553 int ret;
1554
1555
1556
1557
1558
1559 ret = crypto_register_aeads(algs_aead, ARRAY_SIZE(algs_aead));
1560 if (ret)
1561 return ret;
1562
1563 ret = crypto_register_skciphers(algs, ARRAY_SIZE(algs));
1564 if (ret)
1565 crypto_unregister_aeads(algs_aead, ARRAY_SIZE(algs));
1566
1567 return ret;
1568 }
1569
1570
1571 static const struct of_device_id kmb_ocs_aes_of_match[] = {
1572 {
1573 .compatible = "intel,keembay-ocs-aes",
1574 },
1575 {}
1576 };
1577
1578 static int kmb_ocs_aes_remove(struct platform_device *pdev)
1579 {
1580 struct ocs_aes_dev *aes_dev;
1581
1582 aes_dev = platform_get_drvdata(pdev);
1583 if (!aes_dev)
1584 return -ENODEV;
1585
1586 unregister_aes_algs(aes_dev);
1587
1588 spin_lock(&ocs_aes.lock);
1589 list_del(&aes_dev->list);
1590 spin_unlock(&ocs_aes.lock);
1591
1592 crypto_engine_exit(aes_dev->engine);
1593
1594 return 0;
1595 }
1596
1597 static int kmb_ocs_aes_probe(struct platform_device *pdev)
1598 {
1599 struct device *dev = &pdev->dev;
1600 struct ocs_aes_dev *aes_dev;
1601 int rc;
1602
1603 aes_dev = devm_kzalloc(dev, sizeof(*aes_dev), GFP_KERNEL);
1604 if (!aes_dev)
1605 return -ENOMEM;
1606
1607 aes_dev->dev = dev;
1608
1609 platform_set_drvdata(pdev, aes_dev);
1610
1611 rc = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(32));
1612 if (rc) {
1613 dev_err(dev, "Failed to set 32 bit dma mask %d\n", rc);
1614 return rc;
1615 }
1616
1617
1618 aes_dev->base_reg = devm_platform_ioremap_resource(pdev, 0);
1619 if (IS_ERR(aes_dev->base_reg))
1620 return PTR_ERR(aes_dev->base_reg);
1621
1622
1623 aes_dev->irq = platform_get_irq(pdev, 0);
1624 if (aes_dev->irq < 0)
1625 return aes_dev->irq;
1626
1627 rc = devm_request_threaded_irq(dev, aes_dev->irq, ocs_aes_irq_handler,
1628 NULL, 0, "keembay-ocs-aes", aes_dev);
1629 if (rc < 0) {
1630 dev_err(dev, "Could not request IRQ\n");
1631 return rc;
1632 }
1633
1634 INIT_LIST_HEAD(&aes_dev->list);
1635 spin_lock(&ocs_aes.lock);
1636 list_add_tail(&aes_dev->list, &ocs_aes.dev_list);
1637 spin_unlock(&ocs_aes.lock);
1638
1639 init_completion(&aes_dev->irq_completion);
1640
1641
1642 aes_dev->engine = crypto_engine_alloc_init(dev, true);
1643 if (!aes_dev->engine) {
1644 rc = -ENOMEM;
1645 goto list_del;
1646 }
1647
1648 rc = crypto_engine_start(aes_dev->engine);
1649 if (rc) {
1650 dev_err(dev, "Could not start crypto engine\n");
1651 goto cleanup;
1652 }
1653
1654 rc = register_aes_algs(aes_dev);
1655 if (rc) {
1656 dev_err(dev,
1657 "Could not register OCS algorithms with Crypto API\n");
1658 goto cleanup;
1659 }
1660
1661 return 0;
1662
1663 cleanup:
1664 crypto_engine_exit(aes_dev->engine);
1665 list_del:
1666 spin_lock(&ocs_aes.lock);
1667 list_del(&aes_dev->list);
1668 spin_unlock(&ocs_aes.lock);
1669
1670 return rc;
1671 }
1672
1673
1674 static struct platform_driver kmb_ocs_aes_driver = {
1675 .probe = kmb_ocs_aes_probe,
1676 .remove = kmb_ocs_aes_remove,
1677 .driver = {
1678 .name = DRV_NAME,
1679 .of_match_table = kmb_ocs_aes_of_match,
1680 },
1681 };
1682
1683 module_platform_driver(kmb_ocs_aes_driver);
1684
1685 MODULE_DESCRIPTION("Intel Keem Bay Offload and Crypto Subsystem (OCS) AES/SM4 Driver");
1686 MODULE_LICENSE("GPL");
1687
1688 MODULE_ALIAS_CRYPTO("cbc-aes-keembay-ocs");
1689 MODULE_ALIAS_CRYPTO("ctr-aes-keembay-ocs");
1690 MODULE_ALIAS_CRYPTO("gcm-aes-keembay-ocs");
1691 MODULE_ALIAS_CRYPTO("ccm-aes-keembay-ocs");
1692
1693 MODULE_ALIAS_CRYPTO("cbc-sm4-keembay-ocs");
1694 MODULE_ALIAS_CRYPTO("ctr-sm4-keembay-ocs");
1695 MODULE_ALIAS_CRYPTO("gcm-sm4-keembay-ocs");
1696 MODULE_ALIAS_CRYPTO("ccm-sm4-keembay-ocs");
1697
1698 #ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_ECB
1699 MODULE_ALIAS_CRYPTO("ecb-aes-keembay-ocs");
1700 MODULE_ALIAS_CRYPTO("ecb-sm4-keembay-ocs");
1701 #endif
1702
1703 #ifdef CONFIG_CRYPTO_DEV_KEEMBAY_OCS_AES_SM4_CTS
1704 MODULE_ALIAS_CRYPTO("cts-aes-keembay-ocs");
1705 MODULE_ALIAS_CRYPTO("cts-sm4-keembay-ocs");
1706 #endif