0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #include "compat.h"
0011 #include "ctrl.h"
0012 #include "regs.h"
0013 #include "intern.h"
0014 #include "desc_constr.h"
0015 #include "error.h"
0016 #include "sg_sw_qm.h"
0017 #include "key_gen.h"
0018 #include "qi.h"
0019 #include "jr.h"
0020 #include "caamalg_desc.h"
0021 #include <crypto/xts.h>
0022 #include <asm/unaligned.h>
0023
0024
0025
0026
0027 #define CAAM_CRA_PRIORITY 2000
0028
0029 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
0030 SHA512_DIGEST_SIZE * 2)
0031
0032 #define DESC_MAX_USED_BYTES (DESC_QI_AEAD_GIVENC_LEN + \
0033 CAAM_MAX_KEY_SIZE)
0034 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
0035
0036 struct caam_alg_entry {
0037 int class1_alg_type;
0038 int class2_alg_type;
0039 bool rfc3686;
0040 bool geniv;
0041 bool nodkp;
0042 };
0043
0044 struct caam_aead_alg {
0045 struct aead_alg aead;
0046 struct caam_alg_entry caam;
0047 bool registered;
0048 };
0049
0050 struct caam_skcipher_alg {
0051 struct skcipher_alg skcipher;
0052 struct caam_alg_entry caam;
0053 bool registered;
0054 };
0055
0056
0057
0058
0059 struct caam_ctx {
0060 struct device *jrdev;
0061 u32 sh_desc_enc[DESC_MAX_USED_LEN];
0062 u32 sh_desc_dec[DESC_MAX_USED_LEN];
0063 u8 key[CAAM_MAX_KEY_SIZE];
0064 dma_addr_t key_dma;
0065 enum dma_data_direction dir;
0066 struct alginfo adata;
0067 struct alginfo cdata;
0068 unsigned int authsize;
0069 struct device *qidev;
0070 spinlock_t lock;
0071 struct caam_drv_ctx *drv_ctx[NUM_OP];
0072 bool xts_key_fallback;
0073 struct crypto_skcipher *fallback;
0074 };
0075
0076 struct caam_skcipher_req_ctx {
0077 struct skcipher_request fallback_req;
0078 };
0079
0080 static int aead_set_sh_desc(struct crypto_aead *aead)
0081 {
0082 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
0083 typeof(*alg), aead);
0084 struct caam_ctx *ctx = crypto_aead_ctx(aead);
0085 unsigned int ivsize = crypto_aead_ivsize(aead);
0086 u32 ctx1_iv_off = 0;
0087 u32 *nonce = NULL;
0088 unsigned int data_len[2];
0089 u32 inl_mask;
0090 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
0091 OP_ALG_AAI_CTR_MOD128);
0092 const bool is_rfc3686 = alg->caam.rfc3686;
0093 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
0094
0095 if (!ctx->cdata.keylen || !ctx->authsize)
0096 return 0;
0097
0098
0099
0100
0101
0102
0103 if (ctr_mode)
0104 ctx1_iv_off = 16;
0105
0106
0107
0108
0109
0110 if (is_rfc3686) {
0111 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
0112 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
0113 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
0114 }
0115
0116
0117
0118
0119
0120
0121
0122 ctx->adata.key_virt = ctx->key;
0123 ctx->adata.key_dma = ctx->key_dma;
0124
0125 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
0126 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
0127
0128 data_len[0] = ctx->adata.keylen_pad;
0129 data_len[1] = ctx->cdata.keylen;
0130
0131 if (alg->caam.geniv)
0132 goto skip_enc;
0133
0134
0135 if (desc_inline_query(DESC_QI_AEAD_ENC_LEN +
0136 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
0137 DESC_JOB_IO_LEN, data_len, &inl_mask,
0138 ARRAY_SIZE(data_len)) < 0)
0139 return -EINVAL;
0140
0141 ctx->adata.key_inline = !!(inl_mask & 1);
0142 ctx->cdata.key_inline = !!(inl_mask & 2);
0143
0144 cnstr_shdsc_aead_encap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
0145 ivsize, ctx->authsize, is_rfc3686, nonce,
0146 ctx1_iv_off, true, ctrlpriv->era);
0147
0148 skip_enc:
0149
0150 if (desc_inline_query(DESC_QI_AEAD_DEC_LEN +
0151 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
0152 DESC_JOB_IO_LEN, data_len, &inl_mask,
0153 ARRAY_SIZE(data_len)) < 0)
0154 return -EINVAL;
0155
0156 ctx->adata.key_inline = !!(inl_mask & 1);
0157 ctx->cdata.key_inline = !!(inl_mask & 2);
0158
0159 cnstr_shdsc_aead_decap(ctx->sh_desc_dec, &ctx->cdata, &ctx->adata,
0160 ivsize, ctx->authsize, alg->caam.geniv,
0161 is_rfc3686, nonce, ctx1_iv_off, true,
0162 ctrlpriv->era);
0163
0164 if (!alg->caam.geniv)
0165 goto skip_givenc;
0166
0167
0168 if (desc_inline_query(DESC_QI_AEAD_GIVENC_LEN +
0169 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
0170 DESC_JOB_IO_LEN, data_len, &inl_mask,
0171 ARRAY_SIZE(data_len)) < 0)
0172 return -EINVAL;
0173
0174 ctx->adata.key_inline = !!(inl_mask & 1);
0175 ctx->cdata.key_inline = !!(inl_mask & 2);
0176
0177 cnstr_shdsc_aead_givencap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
0178 ivsize, ctx->authsize, is_rfc3686, nonce,
0179 ctx1_iv_off, true, ctrlpriv->era);
0180
0181 skip_givenc:
0182 return 0;
0183 }
0184
0185 static int aead_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
0186 {
0187 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
0188
0189 ctx->authsize = authsize;
0190 aead_set_sh_desc(authenc);
0191
0192 return 0;
0193 }
0194
0195 static int aead_setkey(struct crypto_aead *aead, const u8 *key,
0196 unsigned int keylen)
0197 {
0198 struct caam_ctx *ctx = crypto_aead_ctx(aead);
0199 struct device *jrdev = ctx->jrdev;
0200 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
0201 struct crypto_authenc_keys keys;
0202 int ret = 0;
0203
0204 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
0205 goto badkey;
0206
0207 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
0208 keys.authkeylen + keys.enckeylen, keys.enckeylen,
0209 keys.authkeylen);
0210 print_hex_dump_debug("key in @" __stringify(__LINE__)": ",
0211 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
0212
0213
0214
0215
0216
0217 if (ctrlpriv->era >= 6) {
0218 ctx->adata.keylen = keys.authkeylen;
0219 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
0220 OP_ALG_ALGSEL_MASK);
0221
0222 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
0223 goto badkey;
0224
0225 memcpy(ctx->key, keys.authkey, keys.authkeylen);
0226 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
0227 keys.enckeylen);
0228 dma_sync_single_for_device(jrdev->parent, ctx->key_dma,
0229 ctx->adata.keylen_pad +
0230 keys.enckeylen, ctx->dir);
0231 goto skip_split_key;
0232 }
0233
0234 ret = gen_split_key(jrdev, ctx->key, &ctx->adata, keys.authkey,
0235 keys.authkeylen, CAAM_MAX_KEY_SIZE -
0236 keys.enckeylen);
0237 if (ret)
0238 goto badkey;
0239
0240
0241 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
0242 dma_sync_single_for_device(jrdev->parent, ctx->key_dma,
0243 ctx->adata.keylen_pad + keys.enckeylen,
0244 ctx->dir);
0245
0246 print_hex_dump_debug("ctx.key@" __stringify(__LINE__)": ",
0247 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
0248 ctx->adata.keylen_pad + keys.enckeylen, 1);
0249
0250 skip_split_key:
0251 ctx->cdata.keylen = keys.enckeylen;
0252
0253 ret = aead_set_sh_desc(aead);
0254 if (ret)
0255 goto badkey;
0256
0257
0258 if (ctx->drv_ctx[ENCRYPT]) {
0259 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
0260 ctx->sh_desc_enc);
0261 if (ret) {
0262 dev_err(jrdev, "driver enc context update failed\n");
0263 goto badkey;
0264 }
0265 }
0266
0267 if (ctx->drv_ctx[DECRYPT]) {
0268 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
0269 ctx->sh_desc_dec);
0270 if (ret) {
0271 dev_err(jrdev, "driver dec context update failed\n");
0272 goto badkey;
0273 }
0274 }
0275
0276 memzero_explicit(&keys, sizeof(keys));
0277 return ret;
0278 badkey:
0279 memzero_explicit(&keys, sizeof(keys));
0280 return -EINVAL;
0281 }
0282
0283 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
0284 unsigned int keylen)
0285 {
0286 struct crypto_authenc_keys keys;
0287 int err;
0288
0289 err = crypto_authenc_extractkeys(&keys, key, keylen);
0290 if (unlikely(err))
0291 return err;
0292
0293 err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?:
0294 aead_setkey(aead, key, keylen);
0295
0296 memzero_explicit(&keys, sizeof(keys));
0297 return err;
0298 }
0299
0300 static int gcm_set_sh_desc(struct crypto_aead *aead)
0301 {
0302 struct caam_ctx *ctx = crypto_aead_ctx(aead);
0303 unsigned int ivsize = crypto_aead_ivsize(aead);
0304 int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
0305 ctx->cdata.keylen;
0306
0307 if (!ctx->cdata.keylen || !ctx->authsize)
0308 return 0;
0309
0310
0311
0312
0313
0314 if (rem_bytes >= DESC_QI_GCM_ENC_LEN) {
0315 ctx->cdata.key_inline = true;
0316 ctx->cdata.key_virt = ctx->key;
0317 } else {
0318 ctx->cdata.key_inline = false;
0319 ctx->cdata.key_dma = ctx->key_dma;
0320 }
0321
0322 cnstr_shdsc_gcm_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
0323 ctx->authsize, true);
0324
0325
0326
0327
0328
0329 if (rem_bytes >= DESC_QI_GCM_DEC_LEN) {
0330 ctx->cdata.key_inline = true;
0331 ctx->cdata.key_virt = ctx->key;
0332 } else {
0333 ctx->cdata.key_inline = false;
0334 ctx->cdata.key_dma = ctx->key_dma;
0335 }
0336
0337 cnstr_shdsc_gcm_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
0338 ctx->authsize, true);
0339
0340 return 0;
0341 }
0342
0343 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
0344 {
0345 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
0346 int err;
0347
0348 err = crypto_gcm_check_authsize(authsize);
0349 if (err)
0350 return err;
0351
0352 ctx->authsize = authsize;
0353 gcm_set_sh_desc(authenc);
0354
0355 return 0;
0356 }
0357
0358 static int gcm_setkey(struct crypto_aead *aead,
0359 const u8 *key, unsigned int keylen)
0360 {
0361 struct caam_ctx *ctx = crypto_aead_ctx(aead);
0362 struct device *jrdev = ctx->jrdev;
0363 int ret;
0364
0365 ret = aes_check_keylen(keylen);
0366 if (ret)
0367 return ret;
0368
0369 print_hex_dump_debug("key in @" __stringify(__LINE__)": ",
0370 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
0371
0372 memcpy(ctx->key, key, keylen);
0373 dma_sync_single_for_device(jrdev->parent, ctx->key_dma, keylen,
0374 ctx->dir);
0375 ctx->cdata.keylen = keylen;
0376
0377 ret = gcm_set_sh_desc(aead);
0378 if (ret)
0379 return ret;
0380
0381
0382 if (ctx->drv_ctx[ENCRYPT]) {
0383 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
0384 ctx->sh_desc_enc);
0385 if (ret) {
0386 dev_err(jrdev, "driver enc context update failed\n");
0387 return ret;
0388 }
0389 }
0390
0391 if (ctx->drv_ctx[DECRYPT]) {
0392 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
0393 ctx->sh_desc_dec);
0394 if (ret) {
0395 dev_err(jrdev, "driver dec context update failed\n");
0396 return ret;
0397 }
0398 }
0399
0400 return 0;
0401 }
0402
0403 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
0404 {
0405 struct caam_ctx *ctx = crypto_aead_ctx(aead);
0406 unsigned int ivsize = crypto_aead_ivsize(aead);
0407 int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
0408 ctx->cdata.keylen;
0409
0410 if (!ctx->cdata.keylen || !ctx->authsize)
0411 return 0;
0412
0413 ctx->cdata.key_virt = ctx->key;
0414
0415
0416
0417
0418
0419 if (rem_bytes >= DESC_QI_RFC4106_ENC_LEN) {
0420 ctx->cdata.key_inline = true;
0421 } else {
0422 ctx->cdata.key_inline = false;
0423 ctx->cdata.key_dma = ctx->key_dma;
0424 }
0425
0426 cnstr_shdsc_rfc4106_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
0427 ctx->authsize, true);
0428
0429
0430
0431
0432
0433 if (rem_bytes >= DESC_QI_RFC4106_DEC_LEN) {
0434 ctx->cdata.key_inline = true;
0435 } else {
0436 ctx->cdata.key_inline = false;
0437 ctx->cdata.key_dma = ctx->key_dma;
0438 }
0439
0440 cnstr_shdsc_rfc4106_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
0441 ctx->authsize, true);
0442
0443 return 0;
0444 }
0445
0446 static int rfc4106_setauthsize(struct crypto_aead *authenc,
0447 unsigned int authsize)
0448 {
0449 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
0450 int err;
0451
0452 err = crypto_rfc4106_check_authsize(authsize);
0453 if (err)
0454 return err;
0455
0456 ctx->authsize = authsize;
0457 rfc4106_set_sh_desc(authenc);
0458
0459 return 0;
0460 }
0461
0462 static int rfc4106_setkey(struct crypto_aead *aead,
0463 const u8 *key, unsigned int keylen)
0464 {
0465 struct caam_ctx *ctx = crypto_aead_ctx(aead);
0466 struct device *jrdev = ctx->jrdev;
0467 int ret;
0468
0469 ret = aes_check_keylen(keylen - 4);
0470 if (ret)
0471 return ret;
0472
0473 print_hex_dump_debug("key in @" __stringify(__LINE__)": ",
0474 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
0475
0476 memcpy(ctx->key, key, keylen);
0477
0478
0479
0480
0481 ctx->cdata.keylen = keylen - 4;
0482 dma_sync_single_for_device(jrdev->parent, ctx->key_dma,
0483 ctx->cdata.keylen, ctx->dir);
0484
0485 ret = rfc4106_set_sh_desc(aead);
0486 if (ret)
0487 return ret;
0488
0489
0490 if (ctx->drv_ctx[ENCRYPT]) {
0491 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
0492 ctx->sh_desc_enc);
0493 if (ret) {
0494 dev_err(jrdev, "driver enc context update failed\n");
0495 return ret;
0496 }
0497 }
0498
0499 if (ctx->drv_ctx[DECRYPT]) {
0500 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
0501 ctx->sh_desc_dec);
0502 if (ret) {
0503 dev_err(jrdev, "driver dec context update failed\n");
0504 return ret;
0505 }
0506 }
0507
0508 return 0;
0509 }
0510
0511 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
0512 {
0513 struct caam_ctx *ctx = crypto_aead_ctx(aead);
0514 unsigned int ivsize = crypto_aead_ivsize(aead);
0515 int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
0516 ctx->cdata.keylen;
0517
0518 if (!ctx->cdata.keylen || !ctx->authsize)
0519 return 0;
0520
0521 ctx->cdata.key_virt = ctx->key;
0522
0523
0524
0525
0526
0527 if (rem_bytes >= DESC_QI_RFC4543_ENC_LEN) {
0528 ctx->cdata.key_inline = true;
0529 } else {
0530 ctx->cdata.key_inline = false;
0531 ctx->cdata.key_dma = ctx->key_dma;
0532 }
0533
0534 cnstr_shdsc_rfc4543_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
0535 ctx->authsize, true);
0536
0537
0538
0539
0540
0541 if (rem_bytes >= DESC_QI_RFC4543_DEC_LEN) {
0542 ctx->cdata.key_inline = true;
0543 } else {
0544 ctx->cdata.key_inline = false;
0545 ctx->cdata.key_dma = ctx->key_dma;
0546 }
0547
0548 cnstr_shdsc_rfc4543_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
0549 ctx->authsize, true);
0550
0551 return 0;
0552 }
0553
0554 static int rfc4543_setauthsize(struct crypto_aead *authenc,
0555 unsigned int authsize)
0556 {
0557 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
0558
0559 if (authsize != 16)
0560 return -EINVAL;
0561
0562 ctx->authsize = authsize;
0563 rfc4543_set_sh_desc(authenc);
0564
0565 return 0;
0566 }
0567
0568 static int rfc4543_setkey(struct crypto_aead *aead,
0569 const u8 *key, unsigned int keylen)
0570 {
0571 struct caam_ctx *ctx = crypto_aead_ctx(aead);
0572 struct device *jrdev = ctx->jrdev;
0573 int ret;
0574
0575 ret = aes_check_keylen(keylen - 4);
0576 if (ret)
0577 return ret;
0578
0579 print_hex_dump_debug("key in @" __stringify(__LINE__)": ",
0580 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
0581
0582 memcpy(ctx->key, key, keylen);
0583
0584
0585
0586
0587 ctx->cdata.keylen = keylen - 4;
0588 dma_sync_single_for_device(jrdev->parent, ctx->key_dma,
0589 ctx->cdata.keylen, ctx->dir);
0590
0591 ret = rfc4543_set_sh_desc(aead);
0592 if (ret)
0593 return ret;
0594
0595
0596 if (ctx->drv_ctx[ENCRYPT]) {
0597 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
0598 ctx->sh_desc_enc);
0599 if (ret) {
0600 dev_err(jrdev, "driver enc context update failed\n");
0601 return ret;
0602 }
0603 }
0604
0605 if (ctx->drv_ctx[DECRYPT]) {
0606 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
0607 ctx->sh_desc_dec);
0608 if (ret) {
0609 dev_err(jrdev, "driver dec context update failed\n");
0610 return ret;
0611 }
0612 }
0613
0614 return 0;
0615 }
0616
0617 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
0618 unsigned int keylen, const u32 ctx1_iv_off)
0619 {
0620 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
0621 struct caam_skcipher_alg *alg =
0622 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
0623 skcipher);
0624 struct device *jrdev = ctx->jrdev;
0625 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
0626 const bool is_rfc3686 = alg->caam.rfc3686;
0627 int ret = 0;
0628
0629 print_hex_dump_debug("key in @" __stringify(__LINE__)": ",
0630 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
0631
0632 ctx->cdata.keylen = keylen;
0633 ctx->cdata.key_virt = key;
0634 ctx->cdata.key_inline = true;
0635
0636
0637 cnstr_shdsc_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
0638 is_rfc3686, ctx1_iv_off);
0639 cnstr_shdsc_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
0640 is_rfc3686, ctx1_iv_off);
0641
0642
0643 if (ctx->drv_ctx[ENCRYPT]) {
0644 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
0645 ctx->sh_desc_enc);
0646 if (ret) {
0647 dev_err(jrdev, "driver enc context update failed\n");
0648 return -EINVAL;
0649 }
0650 }
0651
0652 if (ctx->drv_ctx[DECRYPT]) {
0653 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
0654 ctx->sh_desc_dec);
0655 if (ret) {
0656 dev_err(jrdev, "driver dec context update failed\n");
0657 return -EINVAL;
0658 }
0659 }
0660
0661 return ret;
0662 }
0663
0664 static int aes_skcipher_setkey(struct crypto_skcipher *skcipher,
0665 const u8 *key, unsigned int keylen)
0666 {
0667 int err;
0668
0669 err = aes_check_keylen(keylen);
0670 if (err)
0671 return err;
0672
0673 return skcipher_setkey(skcipher, key, keylen, 0);
0674 }
0675
0676 static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher,
0677 const u8 *key, unsigned int keylen)
0678 {
0679 u32 ctx1_iv_off;
0680 int err;
0681
0682
0683
0684
0685
0686
0687 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
0688 keylen -= CTR_RFC3686_NONCE_SIZE;
0689
0690 err = aes_check_keylen(keylen);
0691 if (err)
0692 return err;
0693
0694 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
0695 }
0696
0697 static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher,
0698 const u8 *key, unsigned int keylen)
0699 {
0700 u32 ctx1_iv_off;
0701 int err;
0702
0703
0704
0705
0706
0707
0708 ctx1_iv_off = 16;
0709
0710 err = aes_check_keylen(keylen);
0711 if (err)
0712 return err;
0713
0714 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
0715 }
0716
0717 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
0718 const u8 *key, unsigned int keylen)
0719 {
0720 return verify_skcipher_des3_key(skcipher, key) ?:
0721 skcipher_setkey(skcipher, key, keylen, 0);
0722 }
0723
0724 static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
0725 const u8 *key, unsigned int keylen)
0726 {
0727 return verify_skcipher_des_key(skcipher, key) ?:
0728 skcipher_setkey(skcipher, key, keylen, 0);
0729 }
0730
0731 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
0732 unsigned int keylen)
0733 {
0734 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
0735 struct device *jrdev = ctx->jrdev;
0736 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
0737 int ret = 0;
0738 int err;
0739
0740 err = xts_verify_key(skcipher, key, keylen);
0741 if (err) {
0742 dev_dbg(jrdev, "key size mismatch\n");
0743 return err;
0744 }
0745
0746 if (keylen != 2 * AES_KEYSIZE_128 && keylen != 2 * AES_KEYSIZE_256)
0747 ctx->xts_key_fallback = true;
0748
0749 if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) {
0750 err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
0751 if (err)
0752 return err;
0753 }
0754
0755 ctx->cdata.keylen = keylen;
0756 ctx->cdata.key_virt = key;
0757 ctx->cdata.key_inline = true;
0758
0759
0760 cnstr_shdsc_xts_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata);
0761 cnstr_shdsc_xts_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata);
0762
0763
0764 if (ctx->drv_ctx[ENCRYPT]) {
0765 ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
0766 ctx->sh_desc_enc);
0767 if (ret) {
0768 dev_err(jrdev, "driver enc context update failed\n");
0769 return -EINVAL;
0770 }
0771 }
0772
0773 if (ctx->drv_ctx[DECRYPT]) {
0774 ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
0775 ctx->sh_desc_dec);
0776 if (ret) {
0777 dev_err(jrdev, "driver dec context update failed\n");
0778 return -EINVAL;
0779 }
0780 }
0781
0782 return ret;
0783 }
0784
0785
0786
0787
0788
0789
0790
0791
0792
0793
0794
0795
0796
0797 struct aead_edesc {
0798 int src_nents;
0799 int dst_nents;
0800 dma_addr_t iv_dma;
0801 int qm_sg_bytes;
0802 dma_addr_t qm_sg_dma;
0803 unsigned int assoclen;
0804 dma_addr_t assoclen_dma;
0805 struct caam_drv_req drv_req;
0806 struct qm_sg_entry sgt[];
0807 };
0808
0809
0810
0811
0812
0813
0814
0815
0816
0817
0818
0819 struct skcipher_edesc {
0820 int src_nents;
0821 int dst_nents;
0822 dma_addr_t iv_dma;
0823 int qm_sg_bytes;
0824 dma_addr_t qm_sg_dma;
0825 struct caam_drv_req drv_req;
0826 struct qm_sg_entry sgt[];
0827 };
0828
0829 static struct caam_drv_ctx *get_drv_ctx(struct caam_ctx *ctx,
0830 enum optype type)
0831 {
0832
0833
0834
0835
0836
0837 struct caam_drv_ctx *drv_ctx = ctx->drv_ctx[type];
0838 u32 *desc;
0839
0840 if (unlikely(!drv_ctx)) {
0841 spin_lock(&ctx->lock);
0842
0843
0844 drv_ctx = ctx->drv_ctx[type];
0845 if (!drv_ctx) {
0846 int cpu;
0847
0848 if (type == ENCRYPT)
0849 desc = ctx->sh_desc_enc;
0850 else
0851 desc = ctx->sh_desc_dec;
0852
0853 cpu = smp_processor_id();
0854 drv_ctx = caam_drv_ctx_init(ctx->qidev, &cpu, desc);
0855 if (!IS_ERR(drv_ctx))
0856 drv_ctx->op_type = type;
0857
0858 ctx->drv_ctx[type] = drv_ctx;
0859 }
0860
0861 spin_unlock(&ctx->lock);
0862 }
0863
0864 return drv_ctx;
0865 }
0866
0867 static void caam_unmap(struct device *dev, struct scatterlist *src,
0868 struct scatterlist *dst, int src_nents,
0869 int dst_nents, dma_addr_t iv_dma, int ivsize,
0870 enum dma_data_direction iv_dir, dma_addr_t qm_sg_dma,
0871 int qm_sg_bytes)
0872 {
0873 if (dst != src) {
0874 if (src_nents)
0875 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
0876 if (dst_nents)
0877 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
0878 } else {
0879 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
0880 }
0881
0882 if (iv_dma)
0883 dma_unmap_single(dev, iv_dma, ivsize, iv_dir);
0884 if (qm_sg_bytes)
0885 dma_unmap_single(dev, qm_sg_dma, qm_sg_bytes, DMA_TO_DEVICE);
0886 }
0887
0888 static void aead_unmap(struct device *dev,
0889 struct aead_edesc *edesc,
0890 struct aead_request *req)
0891 {
0892 struct crypto_aead *aead = crypto_aead_reqtfm(req);
0893 int ivsize = crypto_aead_ivsize(aead);
0894
0895 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
0896 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma,
0897 edesc->qm_sg_bytes);
0898 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE);
0899 }
0900
0901 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
0902 struct skcipher_request *req)
0903 {
0904 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
0905 int ivsize = crypto_skcipher_ivsize(skcipher);
0906
0907 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
0908 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma,
0909 edesc->qm_sg_bytes);
0910 }
0911
0912 static void aead_done(struct caam_drv_req *drv_req, u32 status)
0913 {
0914 struct device *qidev;
0915 struct aead_edesc *edesc;
0916 struct aead_request *aead_req = drv_req->app_ctx;
0917 struct crypto_aead *aead = crypto_aead_reqtfm(aead_req);
0918 struct caam_ctx *caam_ctx = crypto_aead_ctx(aead);
0919 int ecode = 0;
0920
0921 qidev = caam_ctx->qidev;
0922
0923 if (unlikely(status))
0924 ecode = caam_jr_strstatus(qidev, status);
0925
0926 edesc = container_of(drv_req, typeof(*edesc), drv_req);
0927 aead_unmap(qidev, edesc, aead_req);
0928
0929 aead_request_complete(aead_req, ecode);
0930 qi_cache_free(edesc);
0931 }
0932
0933
0934
0935
0936 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
0937 bool encrypt)
0938 {
0939 struct crypto_aead *aead = crypto_aead_reqtfm(req);
0940 struct caam_ctx *ctx = crypto_aead_ctx(aead);
0941 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
0942 typeof(*alg), aead);
0943 struct device *qidev = ctx->qidev;
0944 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
0945 GFP_KERNEL : GFP_ATOMIC;
0946 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
0947 int src_len, dst_len = 0;
0948 struct aead_edesc *edesc;
0949 dma_addr_t qm_sg_dma, iv_dma = 0;
0950 int ivsize = 0;
0951 unsigned int authsize = ctx->authsize;
0952 int qm_sg_index = 0, qm_sg_ents = 0, qm_sg_bytes;
0953 int in_len, out_len;
0954 struct qm_sg_entry *sg_table, *fd_sgt;
0955 struct caam_drv_ctx *drv_ctx;
0956
0957 drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
0958 if (IS_ERR(drv_ctx))
0959 return (struct aead_edesc *)drv_ctx;
0960
0961
0962 edesc = qi_cache_alloc(GFP_DMA | flags);
0963 if (unlikely(!edesc)) {
0964 dev_err(qidev, "could not allocate extended descriptor\n");
0965 return ERR_PTR(-ENOMEM);
0966 }
0967
0968 if (likely(req->src == req->dst)) {
0969 src_len = req->assoclen + req->cryptlen +
0970 (encrypt ? authsize : 0);
0971
0972 src_nents = sg_nents_for_len(req->src, src_len);
0973 if (unlikely(src_nents < 0)) {
0974 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
0975 src_len);
0976 qi_cache_free(edesc);
0977 return ERR_PTR(src_nents);
0978 }
0979
0980 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
0981 DMA_BIDIRECTIONAL);
0982 if (unlikely(!mapped_src_nents)) {
0983 dev_err(qidev, "unable to map source\n");
0984 qi_cache_free(edesc);
0985 return ERR_PTR(-ENOMEM);
0986 }
0987 } else {
0988 src_len = req->assoclen + req->cryptlen;
0989 dst_len = src_len + (encrypt ? authsize : (-authsize));
0990
0991 src_nents = sg_nents_for_len(req->src, src_len);
0992 if (unlikely(src_nents < 0)) {
0993 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
0994 src_len);
0995 qi_cache_free(edesc);
0996 return ERR_PTR(src_nents);
0997 }
0998
0999 dst_nents = sg_nents_for_len(req->dst, dst_len);
1000 if (unlikely(dst_nents < 0)) {
1001 dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n",
1002 dst_len);
1003 qi_cache_free(edesc);
1004 return ERR_PTR(dst_nents);
1005 }
1006
1007 if (src_nents) {
1008 mapped_src_nents = dma_map_sg(qidev, req->src,
1009 src_nents, DMA_TO_DEVICE);
1010 if (unlikely(!mapped_src_nents)) {
1011 dev_err(qidev, "unable to map source\n");
1012 qi_cache_free(edesc);
1013 return ERR_PTR(-ENOMEM);
1014 }
1015 } else {
1016 mapped_src_nents = 0;
1017 }
1018
1019 if (dst_nents) {
1020 mapped_dst_nents = dma_map_sg(qidev, req->dst,
1021 dst_nents,
1022 DMA_FROM_DEVICE);
1023 if (unlikely(!mapped_dst_nents)) {
1024 dev_err(qidev, "unable to map destination\n");
1025 dma_unmap_sg(qidev, req->src, src_nents,
1026 DMA_TO_DEVICE);
1027 qi_cache_free(edesc);
1028 return ERR_PTR(-ENOMEM);
1029 }
1030 } else {
1031 mapped_dst_nents = 0;
1032 }
1033 }
1034
1035 if ((alg->caam.rfc3686 && encrypt) || !alg->caam.geniv)
1036 ivsize = crypto_aead_ivsize(aead);
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050 qm_sg_ents = 1 + !!ivsize + mapped_src_nents;
1051 if (mapped_dst_nents > 1)
1052 qm_sg_ents += pad_sg_nents(mapped_dst_nents);
1053 else if ((req->src == req->dst) && (mapped_src_nents > 1))
1054 qm_sg_ents = max(pad_sg_nents(qm_sg_ents),
1055 1 + !!ivsize + pad_sg_nents(mapped_src_nents));
1056 else
1057 qm_sg_ents = pad_sg_nents(qm_sg_ents);
1058
1059 sg_table = &edesc->sgt[0];
1060 qm_sg_bytes = qm_sg_ents * sizeof(*sg_table);
1061 if (unlikely(offsetof(struct aead_edesc, sgt) + qm_sg_bytes + ivsize >
1062 CAAM_QI_MEMCACHE_SIZE)) {
1063 dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n",
1064 qm_sg_ents, ivsize);
1065 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1066 0, DMA_NONE, 0, 0);
1067 qi_cache_free(edesc);
1068 return ERR_PTR(-ENOMEM);
1069 }
1070
1071 if (ivsize) {
1072 u8 *iv = (u8 *)(sg_table + qm_sg_ents);
1073
1074
1075 memcpy(iv, req->iv, ivsize);
1076
1077 iv_dma = dma_map_single(qidev, iv, ivsize, DMA_TO_DEVICE);
1078 if (dma_mapping_error(qidev, iv_dma)) {
1079 dev_err(qidev, "unable to map IV\n");
1080 caam_unmap(qidev, req->src, req->dst, src_nents,
1081 dst_nents, 0, 0, DMA_NONE, 0, 0);
1082 qi_cache_free(edesc);
1083 return ERR_PTR(-ENOMEM);
1084 }
1085 }
1086
1087 edesc->src_nents = src_nents;
1088 edesc->dst_nents = dst_nents;
1089 edesc->iv_dma = iv_dma;
1090 edesc->drv_req.app_ctx = req;
1091 edesc->drv_req.cbk = aead_done;
1092 edesc->drv_req.drv_ctx = drv_ctx;
1093
1094 edesc->assoclen = cpu_to_caam32(req->assoclen);
1095 edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4,
1096 DMA_TO_DEVICE);
1097 if (dma_mapping_error(qidev, edesc->assoclen_dma)) {
1098 dev_err(qidev, "unable to map assoclen\n");
1099 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1100 iv_dma, ivsize, DMA_TO_DEVICE, 0, 0);
1101 qi_cache_free(edesc);
1102 return ERR_PTR(-ENOMEM);
1103 }
1104
1105 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0);
1106 qm_sg_index++;
1107 if (ivsize) {
1108 dma_to_qm_sg_one(sg_table + qm_sg_index, iv_dma, ivsize, 0);
1109 qm_sg_index++;
1110 }
1111 sg_to_qm_sg_last(req->src, src_len, sg_table + qm_sg_index, 0);
1112 qm_sg_index += mapped_src_nents;
1113
1114 if (mapped_dst_nents > 1)
1115 sg_to_qm_sg_last(req->dst, dst_len, sg_table + qm_sg_index, 0);
1116
1117 qm_sg_dma = dma_map_single(qidev, sg_table, qm_sg_bytes, DMA_TO_DEVICE);
1118 if (dma_mapping_error(qidev, qm_sg_dma)) {
1119 dev_err(qidev, "unable to map S/G table\n");
1120 dma_unmap_single(qidev, edesc->assoclen_dma, 4, DMA_TO_DEVICE);
1121 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1122 iv_dma, ivsize, DMA_TO_DEVICE, 0, 0);
1123 qi_cache_free(edesc);
1124 return ERR_PTR(-ENOMEM);
1125 }
1126
1127 edesc->qm_sg_dma = qm_sg_dma;
1128 edesc->qm_sg_bytes = qm_sg_bytes;
1129
1130 out_len = req->assoclen + req->cryptlen +
1131 (encrypt ? ctx->authsize : (-ctx->authsize));
1132 in_len = 4 + ivsize + req->assoclen + req->cryptlen;
1133
1134 fd_sgt = &edesc->drv_req.fd_sgt[0];
1135 dma_to_qm_sg_one_last_ext(&fd_sgt[1], qm_sg_dma, in_len, 0);
1136
1137 if (req->dst == req->src) {
1138 if (mapped_src_nents == 1)
1139 dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->src),
1140 out_len, 0);
1141 else
1142 dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma +
1143 (1 + !!ivsize) * sizeof(*sg_table),
1144 out_len, 0);
1145 } else if (mapped_dst_nents <= 1) {
1146 dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->dst), out_len,
1147 0);
1148 } else {
1149 dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma + sizeof(*sg_table) *
1150 qm_sg_index, out_len, 0);
1151 }
1152
1153 return edesc;
1154 }
1155
1156 static inline int aead_crypt(struct aead_request *req, bool encrypt)
1157 {
1158 struct aead_edesc *edesc;
1159 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1160 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1161 int ret;
1162
1163 if (unlikely(caam_congested))
1164 return -EAGAIN;
1165
1166
1167 edesc = aead_edesc_alloc(req, encrypt);
1168 if (IS_ERR(edesc))
1169 return PTR_ERR(edesc);
1170
1171
1172 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
1173 if (!ret) {
1174 ret = -EINPROGRESS;
1175 } else {
1176 aead_unmap(ctx->qidev, edesc, req);
1177 qi_cache_free(edesc);
1178 }
1179
1180 return ret;
1181 }
1182
1183 static int aead_encrypt(struct aead_request *req)
1184 {
1185 return aead_crypt(req, true);
1186 }
1187
1188 static int aead_decrypt(struct aead_request *req)
1189 {
1190 return aead_crypt(req, false);
1191 }
1192
1193 static int ipsec_gcm_encrypt(struct aead_request *req)
1194 {
1195 return crypto_ipsec_check_assoclen(req->assoclen) ? : aead_crypt(req,
1196 true);
1197 }
1198
1199 static int ipsec_gcm_decrypt(struct aead_request *req)
1200 {
1201 return crypto_ipsec_check_assoclen(req->assoclen) ? : aead_crypt(req,
1202 false);
1203 }
1204
1205 static void skcipher_done(struct caam_drv_req *drv_req, u32 status)
1206 {
1207 struct skcipher_edesc *edesc;
1208 struct skcipher_request *req = drv_req->app_ctx;
1209 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1210 struct caam_ctx *caam_ctx = crypto_skcipher_ctx(skcipher);
1211 struct device *qidev = caam_ctx->qidev;
1212 int ivsize = crypto_skcipher_ivsize(skcipher);
1213 int ecode = 0;
1214
1215 dev_dbg(qidev, "%s %d: status 0x%x\n", __func__, __LINE__, status);
1216
1217 edesc = container_of(drv_req, typeof(*edesc), drv_req);
1218
1219 if (status)
1220 ecode = caam_jr_strstatus(qidev, status);
1221
1222 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ",
1223 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1224 edesc->src_nents > 1 ? 100 : ivsize, 1);
1225 caam_dump_sg("dst @" __stringify(__LINE__)": ",
1226 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1227 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1228
1229 skcipher_unmap(qidev, edesc, req);
1230
1231
1232
1233
1234
1235
1236 if (!ecode)
1237 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes,
1238 ivsize);
1239
1240 qi_cache_free(edesc);
1241 skcipher_request_complete(req, ecode);
1242 }
1243
1244 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1245 bool encrypt)
1246 {
1247 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1248 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1249 struct device *qidev = ctx->qidev;
1250 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1251 GFP_KERNEL : GFP_ATOMIC;
1252 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1253 struct skcipher_edesc *edesc;
1254 dma_addr_t iv_dma;
1255 u8 *iv;
1256 int ivsize = crypto_skcipher_ivsize(skcipher);
1257 int dst_sg_idx, qm_sg_ents, qm_sg_bytes;
1258 struct qm_sg_entry *sg_table, *fd_sgt;
1259 struct caam_drv_ctx *drv_ctx;
1260
1261 drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
1262 if (IS_ERR(drv_ctx))
1263 return (struct skcipher_edesc *)drv_ctx;
1264
1265 src_nents = sg_nents_for_len(req->src, req->cryptlen);
1266 if (unlikely(src_nents < 0)) {
1267 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
1268 req->cryptlen);
1269 return ERR_PTR(src_nents);
1270 }
1271
1272 if (unlikely(req->src != req->dst)) {
1273 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1274 if (unlikely(dst_nents < 0)) {
1275 dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n",
1276 req->cryptlen);
1277 return ERR_PTR(dst_nents);
1278 }
1279
1280 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
1281 DMA_TO_DEVICE);
1282 if (unlikely(!mapped_src_nents)) {
1283 dev_err(qidev, "unable to map source\n");
1284 return ERR_PTR(-ENOMEM);
1285 }
1286
1287 mapped_dst_nents = dma_map_sg(qidev, req->dst, dst_nents,
1288 DMA_FROM_DEVICE);
1289 if (unlikely(!mapped_dst_nents)) {
1290 dev_err(qidev, "unable to map destination\n");
1291 dma_unmap_sg(qidev, req->src, src_nents, DMA_TO_DEVICE);
1292 return ERR_PTR(-ENOMEM);
1293 }
1294 } else {
1295 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
1296 DMA_BIDIRECTIONAL);
1297 if (unlikely(!mapped_src_nents)) {
1298 dev_err(qidev, "unable to map source\n");
1299 return ERR_PTR(-ENOMEM);
1300 }
1301 }
1302
1303 qm_sg_ents = 1 + mapped_src_nents;
1304 dst_sg_idx = qm_sg_ents;
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314 if (req->src != req->dst)
1315 qm_sg_ents += pad_sg_nents(mapped_dst_nents + 1);
1316 else
1317 qm_sg_ents = 1 + pad_sg_nents(qm_sg_ents);
1318
1319 qm_sg_bytes = qm_sg_ents * sizeof(struct qm_sg_entry);
1320 if (unlikely(offsetof(struct skcipher_edesc, sgt) + qm_sg_bytes +
1321 ivsize > CAAM_QI_MEMCACHE_SIZE)) {
1322 dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n",
1323 qm_sg_ents, ivsize);
1324 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1325 0, DMA_NONE, 0, 0);
1326 return ERR_PTR(-ENOMEM);
1327 }
1328
1329
1330 edesc = qi_cache_alloc(GFP_DMA | flags);
1331 if (unlikely(!edesc)) {
1332 dev_err(qidev, "could not allocate extended descriptor\n");
1333 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1334 0, DMA_NONE, 0, 0);
1335 return ERR_PTR(-ENOMEM);
1336 }
1337
1338
1339 sg_table = &edesc->sgt[0];
1340 iv = (u8 *)(sg_table + qm_sg_ents);
1341 memcpy(iv, req->iv, ivsize);
1342
1343 iv_dma = dma_map_single(qidev, iv, ivsize, DMA_BIDIRECTIONAL);
1344 if (dma_mapping_error(qidev, iv_dma)) {
1345 dev_err(qidev, "unable to map IV\n");
1346 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1347 0, DMA_NONE, 0, 0);
1348 qi_cache_free(edesc);
1349 return ERR_PTR(-ENOMEM);
1350 }
1351
1352 edesc->src_nents = src_nents;
1353 edesc->dst_nents = dst_nents;
1354 edesc->iv_dma = iv_dma;
1355 edesc->qm_sg_bytes = qm_sg_bytes;
1356 edesc->drv_req.app_ctx = req;
1357 edesc->drv_req.cbk = skcipher_done;
1358 edesc->drv_req.drv_ctx = drv_ctx;
1359
1360 dma_to_qm_sg_one(sg_table, iv_dma, ivsize, 0);
1361 sg_to_qm_sg(req->src, req->cryptlen, sg_table + 1, 0);
1362
1363 if (req->src != req->dst)
1364 sg_to_qm_sg(req->dst, req->cryptlen, sg_table + dst_sg_idx, 0);
1365
1366 dma_to_qm_sg_one(sg_table + dst_sg_idx + mapped_dst_nents, iv_dma,
1367 ivsize, 0);
1368
1369 edesc->qm_sg_dma = dma_map_single(qidev, sg_table, edesc->qm_sg_bytes,
1370 DMA_TO_DEVICE);
1371 if (dma_mapping_error(qidev, edesc->qm_sg_dma)) {
1372 dev_err(qidev, "unable to map S/G table\n");
1373 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1374 iv_dma, ivsize, DMA_BIDIRECTIONAL, 0, 0);
1375 qi_cache_free(edesc);
1376 return ERR_PTR(-ENOMEM);
1377 }
1378
1379 fd_sgt = &edesc->drv_req.fd_sgt[0];
1380
1381 dma_to_qm_sg_one_last_ext(&fd_sgt[1], edesc->qm_sg_dma,
1382 ivsize + req->cryptlen, 0);
1383
1384 if (req->src == req->dst)
1385 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma +
1386 sizeof(*sg_table), req->cryptlen + ivsize,
1387 0);
1388 else
1389 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + dst_sg_idx *
1390 sizeof(*sg_table), req->cryptlen + ivsize,
1391 0);
1392
1393 return edesc;
1394 }
1395
1396 static inline bool xts_skcipher_ivsize(struct skcipher_request *req)
1397 {
1398 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1399 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
1400
1401 return !!get_unaligned((u64 *)(req->iv + (ivsize / 2)));
1402 }
1403
1404 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1405 {
1406 struct skcipher_edesc *edesc;
1407 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1408 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1409 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1410 int ret;
1411
1412
1413
1414
1415
1416
1417 if (!req->cryptlen && !ctx->fallback)
1418 return 0;
1419
1420 if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) ||
1421 ctx->xts_key_fallback)) {
1422 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1423
1424 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
1425 skcipher_request_set_callback(&rctx->fallback_req,
1426 req->base.flags,
1427 req->base.complete,
1428 req->base.data);
1429 skcipher_request_set_crypt(&rctx->fallback_req, req->src,
1430 req->dst, req->cryptlen, req->iv);
1431
1432 return encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :
1433 crypto_skcipher_decrypt(&rctx->fallback_req);
1434 }
1435
1436 if (unlikely(caam_congested))
1437 return -EAGAIN;
1438
1439
1440 edesc = skcipher_edesc_alloc(req, encrypt);
1441 if (IS_ERR(edesc))
1442 return PTR_ERR(edesc);
1443
1444 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
1445 if (!ret) {
1446 ret = -EINPROGRESS;
1447 } else {
1448 skcipher_unmap(ctx->qidev, edesc, req);
1449 qi_cache_free(edesc);
1450 }
1451
1452 return ret;
1453 }
1454
1455 static int skcipher_encrypt(struct skcipher_request *req)
1456 {
1457 return skcipher_crypt(req, true);
1458 }
1459
1460 static int skcipher_decrypt(struct skcipher_request *req)
1461 {
1462 return skcipher_crypt(req, false);
1463 }
1464
1465 static struct caam_skcipher_alg driver_algs[] = {
1466 {
1467 .skcipher = {
1468 .base = {
1469 .cra_name = "cbc(aes)",
1470 .cra_driver_name = "cbc-aes-caam-qi",
1471 .cra_blocksize = AES_BLOCK_SIZE,
1472 },
1473 .setkey = aes_skcipher_setkey,
1474 .encrypt = skcipher_encrypt,
1475 .decrypt = skcipher_decrypt,
1476 .min_keysize = AES_MIN_KEY_SIZE,
1477 .max_keysize = AES_MAX_KEY_SIZE,
1478 .ivsize = AES_BLOCK_SIZE,
1479 },
1480 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1481 },
1482 {
1483 .skcipher = {
1484 .base = {
1485 .cra_name = "cbc(des3_ede)",
1486 .cra_driver_name = "cbc-3des-caam-qi",
1487 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1488 },
1489 .setkey = des3_skcipher_setkey,
1490 .encrypt = skcipher_encrypt,
1491 .decrypt = skcipher_decrypt,
1492 .min_keysize = DES3_EDE_KEY_SIZE,
1493 .max_keysize = DES3_EDE_KEY_SIZE,
1494 .ivsize = DES3_EDE_BLOCK_SIZE,
1495 },
1496 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1497 },
1498 {
1499 .skcipher = {
1500 .base = {
1501 .cra_name = "cbc(des)",
1502 .cra_driver_name = "cbc-des-caam-qi",
1503 .cra_blocksize = DES_BLOCK_SIZE,
1504 },
1505 .setkey = des_skcipher_setkey,
1506 .encrypt = skcipher_encrypt,
1507 .decrypt = skcipher_decrypt,
1508 .min_keysize = DES_KEY_SIZE,
1509 .max_keysize = DES_KEY_SIZE,
1510 .ivsize = DES_BLOCK_SIZE,
1511 },
1512 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1513 },
1514 {
1515 .skcipher = {
1516 .base = {
1517 .cra_name = "ctr(aes)",
1518 .cra_driver_name = "ctr-aes-caam-qi",
1519 .cra_blocksize = 1,
1520 },
1521 .setkey = ctr_skcipher_setkey,
1522 .encrypt = skcipher_encrypt,
1523 .decrypt = skcipher_decrypt,
1524 .min_keysize = AES_MIN_KEY_SIZE,
1525 .max_keysize = AES_MAX_KEY_SIZE,
1526 .ivsize = AES_BLOCK_SIZE,
1527 .chunksize = AES_BLOCK_SIZE,
1528 },
1529 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1530 OP_ALG_AAI_CTR_MOD128,
1531 },
1532 {
1533 .skcipher = {
1534 .base = {
1535 .cra_name = "rfc3686(ctr(aes))",
1536 .cra_driver_name = "rfc3686-ctr-aes-caam-qi",
1537 .cra_blocksize = 1,
1538 },
1539 .setkey = rfc3686_skcipher_setkey,
1540 .encrypt = skcipher_encrypt,
1541 .decrypt = skcipher_decrypt,
1542 .min_keysize = AES_MIN_KEY_SIZE +
1543 CTR_RFC3686_NONCE_SIZE,
1544 .max_keysize = AES_MAX_KEY_SIZE +
1545 CTR_RFC3686_NONCE_SIZE,
1546 .ivsize = CTR_RFC3686_IV_SIZE,
1547 .chunksize = AES_BLOCK_SIZE,
1548 },
1549 .caam = {
1550 .class1_alg_type = OP_ALG_ALGSEL_AES |
1551 OP_ALG_AAI_CTR_MOD128,
1552 .rfc3686 = true,
1553 },
1554 },
1555 {
1556 .skcipher = {
1557 .base = {
1558 .cra_name = "xts(aes)",
1559 .cra_driver_name = "xts-aes-caam-qi",
1560 .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
1561 .cra_blocksize = AES_BLOCK_SIZE,
1562 },
1563 .setkey = xts_skcipher_setkey,
1564 .encrypt = skcipher_encrypt,
1565 .decrypt = skcipher_decrypt,
1566 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1567 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1568 .ivsize = AES_BLOCK_SIZE,
1569 },
1570 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1571 },
1572 };
1573
1574 static struct caam_aead_alg driver_aeads[] = {
1575 {
1576 .aead = {
1577 .base = {
1578 .cra_name = "rfc4106(gcm(aes))",
1579 .cra_driver_name = "rfc4106-gcm-aes-caam-qi",
1580 .cra_blocksize = 1,
1581 },
1582 .setkey = rfc4106_setkey,
1583 .setauthsize = rfc4106_setauthsize,
1584 .encrypt = ipsec_gcm_encrypt,
1585 .decrypt = ipsec_gcm_decrypt,
1586 .ivsize = 8,
1587 .maxauthsize = AES_BLOCK_SIZE,
1588 },
1589 .caam = {
1590 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1591 .nodkp = true,
1592 },
1593 },
1594 {
1595 .aead = {
1596 .base = {
1597 .cra_name = "rfc4543(gcm(aes))",
1598 .cra_driver_name = "rfc4543-gcm-aes-caam-qi",
1599 .cra_blocksize = 1,
1600 },
1601 .setkey = rfc4543_setkey,
1602 .setauthsize = rfc4543_setauthsize,
1603 .encrypt = ipsec_gcm_encrypt,
1604 .decrypt = ipsec_gcm_decrypt,
1605 .ivsize = 8,
1606 .maxauthsize = AES_BLOCK_SIZE,
1607 },
1608 .caam = {
1609 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1610 .nodkp = true,
1611 },
1612 },
1613
1614 {
1615 .aead = {
1616 .base = {
1617 .cra_name = "gcm(aes)",
1618 .cra_driver_name = "gcm-aes-caam-qi",
1619 .cra_blocksize = 1,
1620 },
1621 .setkey = gcm_setkey,
1622 .setauthsize = gcm_setauthsize,
1623 .encrypt = aead_encrypt,
1624 .decrypt = aead_decrypt,
1625 .ivsize = 12,
1626 .maxauthsize = AES_BLOCK_SIZE,
1627 },
1628 .caam = {
1629 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1630 .nodkp = true,
1631 }
1632 },
1633
1634 {
1635 .aead = {
1636 .base = {
1637 .cra_name = "authenc(hmac(md5),cbc(aes))",
1638 .cra_driver_name = "authenc-hmac-md5-"
1639 "cbc-aes-caam-qi",
1640 .cra_blocksize = AES_BLOCK_SIZE,
1641 },
1642 .setkey = aead_setkey,
1643 .setauthsize = aead_setauthsize,
1644 .encrypt = aead_encrypt,
1645 .decrypt = aead_decrypt,
1646 .ivsize = AES_BLOCK_SIZE,
1647 .maxauthsize = MD5_DIGEST_SIZE,
1648 },
1649 .caam = {
1650 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1651 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1652 OP_ALG_AAI_HMAC_PRECOMP,
1653 }
1654 },
1655 {
1656 .aead = {
1657 .base = {
1658 .cra_name = "echainiv(authenc(hmac(md5),"
1659 "cbc(aes)))",
1660 .cra_driver_name = "echainiv-authenc-hmac-md5-"
1661 "cbc-aes-caam-qi",
1662 .cra_blocksize = AES_BLOCK_SIZE,
1663 },
1664 .setkey = aead_setkey,
1665 .setauthsize = aead_setauthsize,
1666 .encrypt = aead_encrypt,
1667 .decrypt = aead_decrypt,
1668 .ivsize = AES_BLOCK_SIZE,
1669 .maxauthsize = MD5_DIGEST_SIZE,
1670 },
1671 .caam = {
1672 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1673 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1674 OP_ALG_AAI_HMAC_PRECOMP,
1675 .geniv = true,
1676 }
1677 },
1678 {
1679 .aead = {
1680 .base = {
1681 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1682 .cra_driver_name = "authenc-hmac-sha1-"
1683 "cbc-aes-caam-qi",
1684 .cra_blocksize = AES_BLOCK_SIZE,
1685 },
1686 .setkey = aead_setkey,
1687 .setauthsize = aead_setauthsize,
1688 .encrypt = aead_encrypt,
1689 .decrypt = aead_decrypt,
1690 .ivsize = AES_BLOCK_SIZE,
1691 .maxauthsize = SHA1_DIGEST_SIZE,
1692 },
1693 .caam = {
1694 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1695 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1696 OP_ALG_AAI_HMAC_PRECOMP,
1697 }
1698 },
1699 {
1700 .aead = {
1701 .base = {
1702 .cra_name = "echainiv(authenc(hmac(sha1),"
1703 "cbc(aes)))",
1704 .cra_driver_name = "echainiv-authenc-"
1705 "hmac-sha1-cbc-aes-caam-qi",
1706 .cra_blocksize = AES_BLOCK_SIZE,
1707 },
1708 .setkey = aead_setkey,
1709 .setauthsize = aead_setauthsize,
1710 .encrypt = aead_encrypt,
1711 .decrypt = aead_decrypt,
1712 .ivsize = AES_BLOCK_SIZE,
1713 .maxauthsize = SHA1_DIGEST_SIZE,
1714 },
1715 .caam = {
1716 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1717 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1718 OP_ALG_AAI_HMAC_PRECOMP,
1719 .geniv = true,
1720 },
1721 },
1722 {
1723 .aead = {
1724 .base = {
1725 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1726 .cra_driver_name = "authenc-hmac-sha224-"
1727 "cbc-aes-caam-qi",
1728 .cra_blocksize = AES_BLOCK_SIZE,
1729 },
1730 .setkey = aead_setkey,
1731 .setauthsize = aead_setauthsize,
1732 .encrypt = aead_encrypt,
1733 .decrypt = aead_decrypt,
1734 .ivsize = AES_BLOCK_SIZE,
1735 .maxauthsize = SHA224_DIGEST_SIZE,
1736 },
1737 .caam = {
1738 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1739 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1740 OP_ALG_AAI_HMAC_PRECOMP,
1741 }
1742 },
1743 {
1744 .aead = {
1745 .base = {
1746 .cra_name = "echainiv(authenc(hmac(sha224),"
1747 "cbc(aes)))",
1748 .cra_driver_name = "echainiv-authenc-"
1749 "hmac-sha224-cbc-aes-caam-qi",
1750 .cra_blocksize = AES_BLOCK_SIZE,
1751 },
1752 .setkey = aead_setkey,
1753 .setauthsize = aead_setauthsize,
1754 .encrypt = aead_encrypt,
1755 .decrypt = aead_decrypt,
1756 .ivsize = AES_BLOCK_SIZE,
1757 .maxauthsize = SHA224_DIGEST_SIZE,
1758 },
1759 .caam = {
1760 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1761 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1762 OP_ALG_AAI_HMAC_PRECOMP,
1763 .geniv = true,
1764 }
1765 },
1766 {
1767 .aead = {
1768 .base = {
1769 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1770 .cra_driver_name = "authenc-hmac-sha256-"
1771 "cbc-aes-caam-qi",
1772 .cra_blocksize = AES_BLOCK_SIZE,
1773 },
1774 .setkey = aead_setkey,
1775 .setauthsize = aead_setauthsize,
1776 .encrypt = aead_encrypt,
1777 .decrypt = aead_decrypt,
1778 .ivsize = AES_BLOCK_SIZE,
1779 .maxauthsize = SHA256_DIGEST_SIZE,
1780 },
1781 .caam = {
1782 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1783 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1784 OP_ALG_AAI_HMAC_PRECOMP,
1785 }
1786 },
1787 {
1788 .aead = {
1789 .base = {
1790 .cra_name = "echainiv(authenc(hmac(sha256),"
1791 "cbc(aes)))",
1792 .cra_driver_name = "echainiv-authenc-"
1793 "hmac-sha256-cbc-aes-"
1794 "caam-qi",
1795 .cra_blocksize = AES_BLOCK_SIZE,
1796 },
1797 .setkey = aead_setkey,
1798 .setauthsize = aead_setauthsize,
1799 .encrypt = aead_encrypt,
1800 .decrypt = aead_decrypt,
1801 .ivsize = AES_BLOCK_SIZE,
1802 .maxauthsize = SHA256_DIGEST_SIZE,
1803 },
1804 .caam = {
1805 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1806 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1807 OP_ALG_AAI_HMAC_PRECOMP,
1808 .geniv = true,
1809 }
1810 },
1811 {
1812 .aead = {
1813 .base = {
1814 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1815 .cra_driver_name = "authenc-hmac-sha384-"
1816 "cbc-aes-caam-qi",
1817 .cra_blocksize = AES_BLOCK_SIZE,
1818 },
1819 .setkey = aead_setkey,
1820 .setauthsize = aead_setauthsize,
1821 .encrypt = aead_encrypt,
1822 .decrypt = aead_decrypt,
1823 .ivsize = AES_BLOCK_SIZE,
1824 .maxauthsize = SHA384_DIGEST_SIZE,
1825 },
1826 .caam = {
1827 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1828 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1829 OP_ALG_AAI_HMAC_PRECOMP,
1830 }
1831 },
1832 {
1833 .aead = {
1834 .base = {
1835 .cra_name = "echainiv(authenc(hmac(sha384),"
1836 "cbc(aes)))",
1837 .cra_driver_name = "echainiv-authenc-"
1838 "hmac-sha384-cbc-aes-"
1839 "caam-qi",
1840 .cra_blocksize = AES_BLOCK_SIZE,
1841 },
1842 .setkey = aead_setkey,
1843 .setauthsize = aead_setauthsize,
1844 .encrypt = aead_encrypt,
1845 .decrypt = aead_decrypt,
1846 .ivsize = AES_BLOCK_SIZE,
1847 .maxauthsize = SHA384_DIGEST_SIZE,
1848 },
1849 .caam = {
1850 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1851 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1852 OP_ALG_AAI_HMAC_PRECOMP,
1853 .geniv = true,
1854 }
1855 },
1856 {
1857 .aead = {
1858 .base = {
1859 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1860 .cra_driver_name = "authenc-hmac-sha512-"
1861 "cbc-aes-caam-qi",
1862 .cra_blocksize = AES_BLOCK_SIZE,
1863 },
1864 .setkey = aead_setkey,
1865 .setauthsize = aead_setauthsize,
1866 .encrypt = aead_encrypt,
1867 .decrypt = aead_decrypt,
1868 .ivsize = AES_BLOCK_SIZE,
1869 .maxauthsize = SHA512_DIGEST_SIZE,
1870 },
1871 .caam = {
1872 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1873 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1874 OP_ALG_AAI_HMAC_PRECOMP,
1875 }
1876 },
1877 {
1878 .aead = {
1879 .base = {
1880 .cra_name = "echainiv(authenc(hmac(sha512),"
1881 "cbc(aes)))",
1882 .cra_driver_name = "echainiv-authenc-"
1883 "hmac-sha512-cbc-aes-"
1884 "caam-qi",
1885 .cra_blocksize = AES_BLOCK_SIZE,
1886 },
1887 .setkey = aead_setkey,
1888 .setauthsize = aead_setauthsize,
1889 .encrypt = aead_encrypt,
1890 .decrypt = aead_decrypt,
1891 .ivsize = AES_BLOCK_SIZE,
1892 .maxauthsize = SHA512_DIGEST_SIZE,
1893 },
1894 .caam = {
1895 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1896 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1897 OP_ALG_AAI_HMAC_PRECOMP,
1898 .geniv = true,
1899 }
1900 },
1901 {
1902 .aead = {
1903 .base = {
1904 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
1905 .cra_driver_name = "authenc-hmac-md5-"
1906 "cbc-des3_ede-caam-qi",
1907 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1908 },
1909 .setkey = des3_aead_setkey,
1910 .setauthsize = aead_setauthsize,
1911 .encrypt = aead_encrypt,
1912 .decrypt = aead_decrypt,
1913 .ivsize = DES3_EDE_BLOCK_SIZE,
1914 .maxauthsize = MD5_DIGEST_SIZE,
1915 },
1916 .caam = {
1917 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1918 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1919 OP_ALG_AAI_HMAC_PRECOMP,
1920 }
1921 },
1922 {
1923 .aead = {
1924 .base = {
1925 .cra_name = "echainiv(authenc(hmac(md5),"
1926 "cbc(des3_ede)))",
1927 .cra_driver_name = "echainiv-authenc-hmac-md5-"
1928 "cbc-des3_ede-caam-qi",
1929 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1930 },
1931 .setkey = des3_aead_setkey,
1932 .setauthsize = aead_setauthsize,
1933 .encrypt = aead_encrypt,
1934 .decrypt = aead_decrypt,
1935 .ivsize = DES3_EDE_BLOCK_SIZE,
1936 .maxauthsize = MD5_DIGEST_SIZE,
1937 },
1938 .caam = {
1939 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1940 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1941 OP_ALG_AAI_HMAC_PRECOMP,
1942 .geniv = true,
1943 }
1944 },
1945 {
1946 .aead = {
1947 .base = {
1948 .cra_name = "authenc(hmac(sha1),"
1949 "cbc(des3_ede))",
1950 .cra_driver_name = "authenc-hmac-sha1-"
1951 "cbc-des3_ede-caam-qi",
1952 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1953 },
1954 .setkey = des3_aead_setkey,
1955 .setauthsize = aead_setauthsize,
1956 .encrypt = aead_encrypt,
1957 .decrypt = aead_decrypt,
1958 .ivsize = DES3_EDE_BLOCK_SIZE,
1959 .maxauthsize = SHA1_DIGEST_SIZE,
1960 },
1961 .caam = {
1962 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1963 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1964 OP_ALG_AAI_HMAC_PRECOMP,
1965 },
1966 },
1967 {
1968 .aead = {
1969 .base = {
1970 .cra_name = "echainiv(authenc(hmac(sha1),"
1971 "cbc(des3_ede)))",
1972 .cra_driver_name = "echainiv-authenc-"
1973 "hmac-sha1-"
1974 "cbc-des3_ede-caam-qi",
1975 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1976 },
1977 .setkey = des3_aead_setkey,
1978 .setauthsize = aead_setauthsize,
1979 .encrypt = aead_encrypt,
1980 .decrypt = aead_decrypt,
1981 .ivsize = DES3_EDE_BLOCK_SIZE,
1982 .maxauthsize = SHA1_DIGEST_SIZE,
1983 },
1984 .caam = {
1985 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1986 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1987 OP_ALG_AAI_HMAC_PRECOMP,
1988 .geniv = true,
1989 }
1990 },
1991 {
1992 .aead = {
1993 .base = {
1994 .cra_name = "authenc(hmac(sha224),"
1995 "cbc(des3_ede))",
1996 .cra_driver_name = "authenc-hmac-sha224-"
1997 "cbc-des3_ede-caam-qi",
1998 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1999 },
2000 .setkey = des3_aead_setkey,
2001 .setauthsize = aead_setauthsize,
2002 .encrypt = aead_encrypt,
2003 .decrypt = aead_decrypt,
2004 .ivsize = DES3_EDE_BLOCK_SIZE,
2005 .maxauthsize = SHA224_DIGEST_SIZE,
2006 },
2007 .caam = {
2008 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2009 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2010 OP_ALG_AAI_HMAC_PRECOMP,
2011 },
2012 },
2013 {
2014 .aead = {
2015 .base = {
2016 .cra_name = "echainiv(authenc(hmac(sha224),"
2017 "cbc(des3_ede)))",
2018 .cra_driver_name = "echainiv-authenc-"
2019 "hmac-sha224-"
2020 "cbc-des3_ede-caam-qi",
2021 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2022 },
2023 .setkey = des3_aead_setkey,
2024 .setauthsize = aead_setauthsize,
2025 .encrypt = aead_encrypt,
2026 .decrypt = aead_decrypt,
2027 .ivsize = DES3_EDE_BLOCK_SIZE,
2028 .maxauthsize = SHA224_DIGEST_SIZE,
2029 },
2030 .caam = {
2031 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2032 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2033 OP_ALG_AAI_HMAC_PRECOMP,
2034 .geniv = true,
2035 }
2036 },
2037 {
2038 .aead = {
2039 .base = {
2040 .cra_name = "authenc(hmac(sha256),"
2041 "cbc(des3_ede))",
2042 .cra_driver_name = "authenc-hmac-sha256-"
2043 "cbc-des3_ede-caam-qi",
2044 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2045 },
2046 .setkey = des3_aead_setkey,
2047 .setauthsize = aead_setauthsize,
2048 .encrypt = aead_encrypt,
2049 .decrypt = aead_decrypt,
2050 .ivsize = DES3_EDE_BLOCK_SIZE,
2051 .maxauthsize = SHA256_DIGEST_SIZE,
2052 },
2053 .caam = {
2054 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2055 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2056 OP_ALG_AAI_HMAC_PRECOMP,
2057 },
2058 },
2059 {
2060 .aead = {
2061 .base = {
2062 .cra_name = "echainiv(authenc(hmac(sha256),"
2063 "cbc(des3_ede)))",
2064 .cra_driver_name = "echainiv-authenc-"
2065 "hmac-sha256-"
2066 "cbc-des3_ede-caam-qi",
2067 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2068 },
2069 .setkey = des3_aead_setkey,
2070 .setauthsize = aead_setauthsize,
2071 .encrypt = aead_encrypt,
2072 .decrypt = aead_decrypt,
2073 .ivsize = DES3_EDE_BLOCK_SIZE,
2074 .maxauthsize = SHA256_DIGEST_SIZE,
2075 },
2076 .caam = {
2077 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2078 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2079 OP_ALG_AAI_HMAC_PRECOMP,
2080 .geniv = true,
2081 }
2082 },
2083 {
2084 .aead = {
2085 .base = {
2086 .cra_name = "authenc(hmac(sha384),"
2087 "cbc(des3_ede))",
2088 .cra_driver_name = "authenc-hmac-sha384-"
2089 "cbc-des3_ede-caam-qi",
2090 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2091 },
2092 .setkey = des3_aead_setkey,
2093 .setauthsize = aead_setauthsize,
2094 .encrypt = aead_encrypt,
2095 .decrypt = aead_decrypt,
2096 .ivsize = DES3_EDE_BLOCK_SIZE,
2097 .maxauthsize = SHA384_DIGEST_SIZE,
2098 },
2099 .caam = {
2100 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2101 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2102 OP_ALG_AAI_HMAC_PRECOMP,
2103 },
2104 },
2105 {
2106 .aead = {
2107 .base = {
2108 .cra_name = "echainiv(authenc(hmac(sha384),"
2109 "cbc(des3_ede)))",
2110 .cra_driver_name = "echainiv-authenc-"
2111 "hmac-sha384-"
2112 "cbc-des3_ede-caam-qi",
2113 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2114 },
2115 .setkey = des3_aead_setkey,
2116 .setauthsize = aead_setauthsize,
2117 .encrypt = aead_encrypt,
2118 .decrypt = aead_decrypt,
2119 .ivsize = DES3_EDE_BLOCK_SIZE,
2120 .maxauthsize = SHA384_DIGEST_SIZE,
2121 },
2122 .caam = {
2123 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2124 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2125 OP_ALG_AAI_HMAC_PRECOMP,
2126 .geniv = true,
2127 }
2128 },
2129 {
2130 .aead = {
2131 .base = {
2132 .cra_name = "authenc(hmac(sha512),"
2133 "cbc(des3_ede))",
2134 .cra_driver_name = "authenc-hmac-sha512-"
2135 "cbc-des3_ede-caam-qi",
2136 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2137 },
2138 .setkey = des3_aead_setkey,
2139 .setauthsize = aead_setauthsize,
2140 .encrypt = aead_encrypt,
2141 .decrypt = aead_decrypt,
2142 .ivsize = DES3_EDE_BLOCK_SIZE,
2143 .maxauthsize = SHA512_DIGEST_SIZE,
2144 },
2145 .caam = {
2146 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2147 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2148 OP_ALG_AAI_HMAC_PRECOMP,
2149 },
2150 },
2151 {
2152 .aead = {
2153 .base = {
2154 .cra_name = "echainiv(authenc(hmac(sha512),"
2155 "cbc(des3_ede)))",
2156 .cra_driver_name = "echainiv-authenc-"
2157 "hmac-sha512-"
2158 "cbc-des3_ede-caam-qi",
2159 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2160 },
2161 .setkey = des3_aead_setkey,
2162 .setauthsize = aead_setauthsize,
2163 .encrypt = aead_encrypt,
2164 .decrypt = aead_decrypt,
2165 .ivsize = DES3_EDE_BLOCK_SIZE,
2166 .maxauthsize = SHA512_DIGEST_SIZE,
2167 },
2168 .caam = {
2169 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2170 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2171 OP_ALG_AAI_HMAC_PRECOMP,
2172 .geniv = true,
2173 }
2174 },
2175 {
2176 .aead = {
2177 .base = {
2178 .cra_name = "authenc(hmac(md5),cbc(des))",
2179 .cra_driver_name = "authenc-hmac-md5-"
2180 "cbc-des-caam-qi",
2181 .cra_blocksize = DES_BLOCK_SIZE,
2182 },
2183 .setkey = aead_setkey,
2184 .setauthsize = aead_setauthsize,
2185 .encrypt = aead_encrypt,
2186 .decrypt = aead_decrypt,
2187 .ivsize = DES_BLOCK_SIZE,
2188 .maxauthsize = MD5_DIGEST_SIZE,
2189 },
2190 .caam = {
2191 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2192 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2193 OP_ALG_AAI_HMAC_PRECOMP,
2194 },
2195 },
2196 {
2197 .aead = {
2198 .base = {
2199 .cra_name = "echainiv(authenc(hmac(md5),"
2200 "cbc(des)))",
2201 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2202 "cbc-des-caam-qi",
2203 .cra_blocksize = DES_BLOCK_SIZE,
2204 },
2205 .setkey = aead_setkey,
2206 .setauthsize = aead_setauthsize,
2207 .encrypt = aead_encrypt,
2208 .decrypt = aead_decrypt,
2209 .ivsize = DES_BLOCK_SIZE,
2210 .maxauthsize = MD5_DIGEST_SIZE,
2211 },
2212 .caam = {
2213 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2214 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2215 OP_ALG_AAI_HMAC_PRECOMP,
2216 .geniv = true,
2217 }
2218 },
2219 {
2220 .aead = {
2221 .base = {
2222 .cra_name = "authenc(hmac(sha1),cbc(des))",
2223 .cra_driver_name = "authenc-hmac-sha1-"
2224 "cbc-des-caam-qi",
2225 .cra_blocksize = DES_BLOCK_SIZE,
2226 },
2227 .setkey = aead_setkey,
2228 .setauthsize = aead_setauthsize,
2229 .encrypt = aead_encrypt,
2230 .decrypt = aead_decrypt,
2231 .ivsize = DES_BLOCK_SIZE,
2232 .maxauthsize = SHA1_DIGEST_SIZE,
2233 },
2234 .caam = {
2235 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2236 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2237 OP_ALG_AAI_HMAC_PRECOMP,
2238 },
2239 },
2240 {
2241 .aead = {
2242 .base = {
2243 .cra_name = "echainiv(authenc(hmac(sha1),"
2244 "cbc(des)))",
2245 .cra_driver_name = "echainiv-authenc-"
2246 "hmac-sha1-cbc-des-caam-qi",
2247 .cra_blocksize = DES_BLOCK_SIZE,
2248 },
2249 .setkey = aead_setkey,
2250 .setauthsize = aead_setauthsize,
2251 .encrypt = aead_encrypt,
2252 .decrypt = aead_decrypt,
2253 .ivsize = DES_BLOCK_SIZE,
2254 .maxauthsize = SHA1_DIGEST_SIZE,
2255 },
2256 .caam = {
2257 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2258 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2259 OP_ALG_AAI_HMAC_PRECOMP,
2260 .geniv = true,
2261 }
2262 },
2263 {
2264 .aead = {
2265 .base = {
2266 .cra_name = "authenc(hmac(sha224),cbc(des))",
2267 .cra_driver_name = "authenc-hmac-sha224-"
2268 "cbc-des-caam-qi",
2269 .cra_blocksize = DES_BLOCK_SIZE,
2270 },
2271 .setkey = aead_setkey,
2272 .setauthsize = aead_setauthsize,
2273 .encrypt = aead_encrypt,
2274 .decrypt = aead_decrypt,
2275 .ivsize = DES_BLOCK_SIZE,
2276 .maxauthsize = SHA224_DIGEST_SIZE,
2277 },
2278 .caam = {
2279 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2280 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2281 OP_ALG_AAI_HMAC_PRECOMP,
2282 },
2283 },
2284 {
2285 .aead = {
2286 .base = {
2287 .cra_name = "echainiv(authenc(hmac(sha224),"
2288 "cbc(des)))",
2289 .cra_driver_name = "echainiv-authenc-"
2290 "hmac-sha224-cbc-des-"
2291 "caam-qi",
2292 .cra_blocksize = DES_BLOCK_SIZE,
2293 },
2294 .setkey = aead_setkey,
2295 .setauthsize = aead_setauthsize,
2296 .encrypt = aead_encrypt,
2297 .decrypt = aead_decrypt,
2298 .ivsize = DES_BLOCK_SIZE,
2299 .maxauthsize = SHA224_DIGEST_SIZE,
2300 },
2301 .caam = {
2302 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2303 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2304 OP_ALG_AAI_HMAC_PRECOMP,
2305 .geniv = true,
2306 }
2307 },
2308 {
2309 .aead = {
2310 .base = {
2311 .cra_name = "authenc(hmac(sha256),cbc(des))",
2312 .cra_driver_name = "authenc-hmac-sha256-"
2313 "cbc-des-caam-qi",
2314 .cra_blocksize = DES_BLOCK_SIZE,
2315 },
2316 .setkey = aead_setkey,
2317 .setauthsize = aead_setauthsize,
2318 .encrypt = aead_encrypt,
2319 .decrypt = aead_decrypt,
2320 .ivsize = DES_BLOCK_SIZE,
2321 .maxauthsize = SHA256_DIGEST_SIZE,
2322 },
2323 .caam = {
2324 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2325 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2326 OP_ALG_AAI_HMAC_PRECOMP,
2327 },
2328 },
2329 {
2330 .aead = {
2331 .base = {
2332 .cra_name = "echainiv(authenc(hmac(sha256),"
2333 "cbc(des)))",
2334 .cra_driver_name = "echainiv-authenc-"
2335 "hmac-sha256-cbc-des-"
2336 "caam-qi",
2337 .cra_blocksize = DES_BLOCK_SIZE,
2338 },
2339 .setkey = aead_setkey,
2340 .setauthsize = aead_setauthsize,
2341 .encrypt = aead_encrypt,
2342 .decrypt = aead_decrypt,
2343 .ivsize = DES_BLOCK_SIZE,
2344 .maxauthsize = SHA256_DIGEST_SIZE,
2345 },
2346 .caam = {
2347 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2348 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2349 OP_ALG_AAI_HMAC_PRECOMP,
2350 .geniv = true,
2351 },
2352 },
2353 {
2354 .aead = {
2355 .base = {
2356 .cra_name = "authenc(hmac(sha384),cbc(des))",
2357 .cra_driver_name = "authenc-hmac-sha384-"
2358 "cbc-des-caam-qi",
2359 .cra_blocksize = DES_BLOCK_SIZE,
2360 },
2361 .setkey = aead_setkey,
2362 .setauthsize = aead_setauthsize,
2363 .encrypt = aead_encrypt,
2364 .decrypt = aead_decrypt,
2365 .ivsize = DES_BLOCK_SIZE,
2366 .maxauthsize = SHA384_DIGEST_SIZE,
2367 },
2368 .caam = {
2369 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2370 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2371 OP_ALG_AAI_HMAC_PRECOMP,
2372 },
2373 },
2374 {
2375 .aead = {
2376 .base = {
2377 .cra_name = "echainiv(authenc(hmac(sha384),"
2378 "cbc(des)))",
2379 .cra_driver_name = "echainiv-authenc-"
2380 "hmac-sha384-cbc-des-"
2381 "caam-qi",
2382 .cra_blocksize = DES_BLOCK_SIZE,
2383 },
2384 .setkey = aead_setkey,
2385 .setauthsize = aead_setauthsize,
2386 .encrypt = aead_encrypt,
2387 .decrypt = aead_decrypt,
2388 .ivsize = DES_BLOCK_SIZE,
2389 .maxauthsize = SHA384_DIGEST_SIZE,
2390 },
2391 .caam = {
2392 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2393 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2394 OP_ALG_AAI_HMAC_PRECOMP,
2395 .geniv = true,
2396 }
2397 },
2398 {
2399 .aead = {
2400 .base = {
2401 .cra_name = "authenc(hmac(sha512),cbc(des))",
2402 .cra_driver_name = "authenc-hmac-sha512-"
2403 "cbc-des-caam-qi",
2404 .cra_blocksize = DES_BLOCK_SIZE,
2405 },
2406 .setkey = aead_setkey,
2407 .setauthsize = aead_setauthsize,
2408 .encrypt = aead_encrypt,
2409 .decrypt = aead_decrypt,
2410 .ivsize = DES_BLOCK_SIZE,
2411 .maxauthsize = SHA512_DIGEST_SIZE,
2412 },
2413 .caam = {
2414 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2415 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2416 OP_ALG_AAI_HMAC_PRECOMP,
2417 }
2418 },
2419 {
2420 .aead = {
2421 .base = {
2422 .cra_name = "echainiv(authenc(hmac(sha512),"
2423 "cbc(des)))",
2424 .cra_driver_name = "echainiv-authenc-"
2425 "hmac-sha512-cbc-des-"
2426 "caam-qi",
2427 .cra_blocksize = DES_BLOCK_SIZE,
2428 },
2429 .setkey = aead_setkey,
2430 .setauthsize = aead_setauthsize,
2431 .encrypt = aead_encrypt,
2432 .decrypt = aead_decrypt,
2433 .ivsize = DES_BLOCK_SIZE,
2434 .maxauthsize = SHA512_DIGEST_SIZE,
2435 },
2436 .caam = {
2437 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2438 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2439 OP_ALG_AAI_HMAC_PRECOMP,
2440 .geniv = true,
2441 }
2442 },
2443 };
2444
2445 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
2446 bool uses_dkp)
2447 {
2448 struct caam_drv_private *priv;
2449 struct device *dev;
2450
2451
2452
2453
2454
2455 ctx->jrdev = caam_jr_alloc();
2456 if (IS_ERR(ctx->jrdev)) {
2457 pr_err("Job Ring Device allocation for transform failed\n");
2458 return PTR_ERR(ctx->jrdev);
2459 }
2460
2461 dev = ctx->jrdev->parent;
2462 priv = dev_get_drvdata(dev);
2463 if (priv->era >= 6 && uses_dkp)
2464 ctx->dir = DMA_BIDIRECTIONAL;
2465 else
2466 ctx->dir = DMA_TO_DEVICE;
2467
2468 ctx->key_dma = dma_map_single(dev, ctx->key, sizeof(ctx->key),
2469 ctx->dir);
2470 if (dma_mapping_error(dev, ctx->key_dma)) {
2471 dev_err(dev, "unable to map key\n");
2472 caam_jr_free(ctx->jrdev);
2473 return -ENOMEM;
2474 }
2475
2476
2477 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
2478 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
2479
2480 ctx->qidev = dev;
2481
2482 spin_lock_init(&ctx->lock);
2483 ctx->drv_ctx[ENCRYPT] = NULL;
2484 ctx->drv_ctx[DECRYPT] = NULL;
2485
2486 return 0;
2487 }
2488
2489 static int caam_cra_init(struct crypto_skcipher *tfm)
2490 {
2491 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
2492 struct caam_skcipher_alg *caam_alg =
2493 container_of(alg, typeof(*caam_alg), skcipher);
2494 struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
2495 u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
2496 int ret = 0;
2497
2498 if (alg_aai == OP_ALG_AAI_XTS) {
2499 const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
2500 struct crypto_skcipher *fallback;
2501
2502 fallback = crypto_alloc_skcipher(tfm_name, 0,
2503 CRYPTO_ALG_NEED_FALLBACK);
2504 if (IS_ERR(fallback)) {
2505 pr_err("Failed to allocate %s fallback: %ld\n",
2506 tfm_name, PTR_ERR(fallback));
2507 return PTR_ERR(fallback);
2508 }
2509
2510 ctx->fallback = fallback;
2511 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) +
2512 crypto_skcipher_reqsize(fallback));
2513 }
2514
2515 ret = caam_init_common(ctx, &caam_alg->caam, false);
2516 if (ret && ctx->fallback)
2517 crypto_free_skcipher(ctx->fallback);
2518
2519 return ret;
2520 }
2521
2522 static int caam_aead_init(struct crypto_aead *tfm)
2523 {
2524 struct aead_alg *alg = crypto_aead_alg(tfm);
2525 struct caam_aead_alg *caam_alg = container_of(alg, typeof(*caam_alg),
2526 aead);
2527 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
2528
2529 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
2530 }
2531
2532 static void caam_exit_common(struct caam_ctx *ctx)
2533 {
2534 caam_drv_ctx_rel(ctx->drv_ctx[ENCRYPT]);
2535 caam_drv_ctx_rel(ctx->drv_ctx[DECRYPT]);
2536
2537 dma_unmap_single(ctx->jrdev->parent, ctx->key_dma, sizeof(ctx->key),
2538 ctx->dir);
2539
2540 caam_jr_free(ctx->jrdev);
2541 }
2542
2543 static void caam_cra_exit(struct crypto_skcipher *tfm)
2544 {
2545 struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
2546
2547 if (ctx->fallback)
2548 crypto_free_skcipher(ctx->fallback);
2549 caam_exit_common(ctx);
2550 }
2551
2552 static void caam_aead_exit(struct crypto_aead *tfm)
2553 {
2554 caam_exit_common(crypto_aead_ctx(tfm));
2555 }
2556
2557 void caam_qi_algapi_exit(void)
2558 {
2559 int i;
2560
2561 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
2562 struct caam_aead_alg *t_alg = driver_aeads + i;
2563
2564 if (t_alg->registered)
2565 crypto_unregister_aead(&t_alg->aead);
2566 }
2567
2568 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2569 struct caam_skcipher_alg *t_alg = driver_algs + i;
2570
2571 if (t_alg->registered)
2572 crypto_unregister_skcipher(&t_alg->skcipher);
2573 }
2574 }
2575
2576 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
2577 {
2578 struct skcipher_alg *alg = &t_alg->skcipher;
2579
2580 alg->base.cra_module = THIS_MODULE;
2581 alg->base.cra_priority = CAAM_CRA_PRIORITY;
2582 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
2583 alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
2584 CRYPTO_ALG_KERN_DRIVER_ONLY);
2585
2586 alg->init = caam_cra_init;
2587 alg->exit = caam_cra_exit;
2588 }
2589
2590 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
2591 {
2592 struct aead_alg *alg = &t_alg->aead;
2593
2594 alg->base.cra_module = THIS_MODULE;
2595 alg->base.cra_priority = CAAM_CRA_PRIORITY;
2596 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
2597 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
2598 CRYPTO_ALG_KERN_DRIVER_ONLY;
2599
2600 alg->init = caam_aead_init;
2601 alg->exit = caam_aead_exit;
2602 }
2603
2604 int caam_qi_algapi_init(struct device *ctrldev)
2605 {
2606 struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
2607 int i = 0, err = 0;
2608 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst;
2609 unsigned int md_limit = SHA512_DIGEST_SIZE;
2610 bool registered = false;
2611
2612
2613 if (!priv->qi_present || caam_dpaa2)
2614 return 0;
2615
2616
2617
2618
2619
2620 if (priv->era < 10) {
2621 u32 cha_vid, cha_inst;
2622
2623 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
2624 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
2625 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
2626
2627 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
2628 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
2629 CHA_ID_LS_DES_SHIFT;
2630 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
2631 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
2632 } else {
2633 u32 aesa, mdha;
2634
2635 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
2636 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
2637
2638 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
2639 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
2640
2641 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
2642 aes_inst = aesa & CHA_VER_NUM_MASK;
2643 md_inst = mdha & CHA_VER_NUM_MASK;
2644 }
2645
2646
2647 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
2648 md_limit = SHA256_DIGEST_SIZE;
2649
2650 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2651 struct caam_skcipher_alg *t_alg = driver_algs + i;
2652 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
2653
2654
2655 if (!des_inst &&
2656 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
2657 (alg_sel == OP_ALG_ALGSEL_DES)))
2658 continue;
2659
2660
2661 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
2662 continue;
2663
2664 caam_skcipher_alg_init(t_alg);
2665
2666 err = crypto_register_skcipher(&t_alg->skcipher);
2667 if (err) {
2668 dev_warn(ctrldev, "%s alg registration failed\n",
2669 t_alg->skcipher.base.cra_driver_name);
2670 continue;
2671 }
2672
2673 t_alg->registered = true;
2674 registered = true;
2675 }
2676
2677 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
2678 struct caam_aead_alg *t_alg = driver_aeads + i;
2679 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
2680 OP_ALG_ALGSEL_MASK;
2681 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
2682 OP_ALG_ALGSEL_MASK;
2683 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
2684
2685
2686 if (!des_inst &&
2687 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
2688 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
2689 continue;
2690
2691
2692 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
2693 continue;
2694
2695
2696
2697
2698
2699 if (aes_vid == CHA_VER_VID_AES_LP && alg_aai == OP_ALG_AAI_GCM)
2700 continue;
2701
2702
2703
2704
2705
2706 if (c2_alg_sel &&
2707 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
2708 continue;
2709
2710 caam_aead_alg_init(t_alg);
2711
2712 err = crypto_register_aead(&t_alg->aead);
2713 if (err) {
2714 pr_warn("%s alg registration failed\n",
2715 t_alg->aead.base.cra_driver_name);
2716 continue;
2717 }
2718
2719 t_alg->registered = true;
2720 registered = true;
2721 }
2722
2723 if (registered)
2724 dev_info(ctrldev, "algorithms registered in /proc/crypto\n");
2725
2726 return err;
2727 }