0001
0002 #include <linux/kernel.h>
0003 #include <linux/printk.h>
0004 #include <linux/crypto.h>
0005 #include <linux/rtnetlink.h>
0006
0007 #include <crypto/aead.h>
0008 #include <crypto/authenc.h>
0009 #include <crypto/des.h>
0010 #include <crypto/internal/aead.h>
0011 #include <crypto/scatterwalk.h>
0012 #include <crypto/gcm.h>
0013
0014 #include "nitrox_dev.h"
0015 #include "nitrox_common.h"
0016 #include "nitrox_req.h"
0017
0018 #define GCM_AES_SALT_SIZE 4
0019
0020 union gph_p3 {
0021 struct {
0022 #ifdef __BIG_ENDIAN_BITFIELD
0023 u16 iv_offset : 8;
0024 u16 auth_offset : 8;
0025 #else
0026 u16 auth_offset : 8;
0027 u16 iv_offset : 8;
0028 #endif
0029 };
0030 u16 param;
0031 };
0032
0033 static int nitrox_aes_gcm_setkey(struct crypto_aead *aead, const u8 *key,
0034 unsigned int keylen)
0035 {
0036 int aes_keylen;
0037 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
0038 struct flexi_crypto_context *fctx;
0039 union fc_ctx_flags flags;
0040
0041 aes_keylen = flexi_aes_keylen(keylen);
0042 if (aes_keylen < 0)
0043 return -EINVAL;
0044
0045
0046 fctx = nctx->u.fctx;
0047 flags.fu = be64_to_cpu(fctx->flags.f);
0048 flags.w0.aes_keylen = aes_keylen;
0049 fctx->flags.f = cpu_to_be64(flags.fu);
0050
0051
0052 memset(&fctx->crypto, 0, sizeof(fctx->crypto));
0053 memcpy(fctx->crypto.u.key, key, keylen);
0054
0055 return 0;
0056 }
0057
0058 static int nitrox_aead_setauthsize(struct crypto_aead *aead,
0059 unsigned int authsize)
0060 {
0061 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
0062 struct flexi_crypto_context *fctx = nctx->u.fctx;
0063 union fc_ctx_flags flags;
0064
0065 flags.fu = be64_to_cpu(fctx->flags.f);
0066 flags.w0.mac_len = authsize;
0067 fctx->flags.f = cpu_to_be64(flags.fu);
0068
0069 aead->authsize = authsize;
0070
0071 return 0;
0072 }
0073
0074 static int nitrox_aes_gcm_setauthsize(struct crypto_aead *aead,
0075 unsigned int authsize)
0076 {
0077 switch (authsize) {
0078 case 4:
0079 case 8:
0080 case 12:
0081 case 13:
0082 case 14:
0083 case 15:
0084 case 16:
0085 break;
0086 default:
0087 return -EINVAL;
0088 }
0089
0090 return nitrox_aead_setauthsize(aead, authsize);
0091 }
0092
0093 static int alloc_src_sglist(struct nitrox_kcrypt_request *nkreq,
0094 struct scatterlist *src, char *iv, int ivsize,
0095 int buflen)
0096 {
0097 int nents = sg_nents_for_len(src, buflen);
0098 int ret;
0099
0100 if (nents < 0)
0101 return nents;
0102
0103
0104 nents += 1;
0105
0106 ret = alloc_src_req_buf(nkreq, nents, ivsize);
0107 if (ret)
0108 return ret;
0109
0110 nitrox_creq_copy_iv(nkreq->src, iv, ivsize);
0111 nitrox_creq_set_src_sg(nkreq, nents, ivsize, src, buflen);
0112
0113 return 0;
0114 }
0115
0116 static int alloc_dst_sglist(struct nitrox_kcrypt_request *nkreq,
0117 struct scatterlist *dst, int ivsize, int buflen)
0118 {
0119 int nents = sg_nents_for_len(dst, buflen);
0120 int ret;
0121
0122 if (nents < 0)
0123 return nents;
0124
0125
0126 nents += 3;
0127
0128
0129
0130 ret = alloc_dst_req_buf(nkreq, nents);
0131 if (ret)
0132 return ret;
0133
0134 nitrox_creq_set_orh(nkreq);
0135 nitrox_creq_set_comp(nkreq);
0136 nitrox_creq_set_dst_sg(nkreq, nents, ivsize, dst, buflen);
0137
0138 return 0;
0139 }
0140
0141 static void free_src_sglist(struct nitrox_kcrypt_request *nkreq)
0142 {
0143 kfree(nkreq->src);
0144 }
0145
0146 static void free_dst_sglist(struct nitrox_kcrypt_request *nkreq)
0147 {
0148 kfree(nkreq->dst);
0149 }
0150
0151 static int nitrox_set_creq(struct nitrox_aead_rctx *rctx)
0152 {
0153 struct se_crypto_request *creq = &rctx->nkreq.creq;
0154 union gph_p3 param3;
0155 int ret;
0156
0157 creq->flags = rctx->flags;
0158 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL :
0159 GFP_ATOMIC;
0160
0161 creq->ctrl.value = 0;
0162 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC;
0163 creq->ctrl.s.arg = rctx->ctrl_arg;
0164
0165 creq->gph.param0 = cpu_to_be16(rctx->cryptlen);
0166 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen);
0167 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen);
0168 param3.iv_offset = 0;
0169 param3.auth_offset = rctx->ivsize;
0170 creq->gph.param3 = cpu_to_be16(param3.param);
0171
0172 creq->ctx_handle = rctx->ctx_handle;
0173 creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context);
0174
0175 ret = alloc_src_sglist(&rctx->nkreq, rctx->src, rctx->iv, rctx->ivsize,
0176 rctx->srclen);
0177 if (ret)
0178 return ret;
0179
0180 ret = alloc_dst_sglist(&rctx->nkreq, rctx->dst, rctx->ivsize,
0181 rctx->dstlen);
0182 if (ret) {
0183 free_src_sglist(&rctx->nkreq);
0184 return ret;
0185 }
0186
0187 return 0;
0188 }
0189
0190 static void nitrox_aead_callback(void *arg, int err)
0191 {
0192 struct aead_request *areq = arg;
0193 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
0194
0195 free_src_sglist(&rctx->nkreq);
0196 free_dst_sglist(&rctx->nkreq);
0197 if (err) {
0198 pr_err_ratelimited("request failed status 0x%0x\n", err);
0199 err = -EINVAL;
0200 }
0201
0202 areq->base.complete(&areq->base, err);
0203 }
0204
0205 static inline bool nitrox_aes_gcm_assoclen_supported(unsigned int assoclen)
0206 {
0207 if (assoclen <= 512)
0208 return true;
0209
0210 return false;
0211 }
0212
0213 static int nitrox_aes_gcm_enc(struct aead_request *areq)
0214 {
0215 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
0216 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
0217 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
0218 struct se_crypto_request *creq = &rctx->nkreq.creq;
0219 struct flexi_crypto_context *fctx = nctx->u.fctx;
0220 int ret;
0221
0222 if (!nitrox_aes_gcm_assoclen_supported(areq->assoclen))
0223 return -EINVAL;
0224
0225 memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE);
0226
0227 rctx->cryptlen = areq->cryptlen;
0228 rctx->assoclen = areq->assoclen;
0229 rctx->srclen = areq->assoclen + areq->cryptlen;
0230 rctx->dstlen = rctx->srclen + aead->authsize;
0231 rctx->iv = &areq->iv[GCM_AES_SALT_SIZE];
0232 rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE;
0233 rctx->flags = areq->base.flags;
0234 rctx->ctx_handle = nctx->u.ctx_handle;
0235 rctx->src = areq->src;
0236 rctx->dst = areq->dst;
0237 rctx->ctrl_arg = ENCRYPT;
0238 ret = nitrox_set_creq(rctx);
0239 if (ret)
0240 return ret;
0241
0242
0243 return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback,
0244 areq);
0245 }
0246
0247 static int nitrox_aes_gcm_dec(struct aead_request *areq)
0248 {
0249 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
0250 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
0251 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq);
0252 struct se_crypto_request *creq = &rctx->nkreq.creq;
0253 struct flexi_crypto_context *fctx = nctx->u.fctx;
0254 int ret;
0255
0256 if (!nitrox_aes_gcm_assoclen_supported(areq->assoclen))
0257 return -EINVAL;
0258
0259 memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE);
0260
0261 rctx->cryptlen = areq->cryptlen - aead->authsize;
0262 rctx->assoclen = areq->assoclen;
0263 rctx->srclen = areq->cryptlen + areq->assoclen;
0264 rctx->dstlen = rctx->srclen - aead->authsize;
0265 rctx->iv = &areq->iv[GCM_AES_SALT_SIZE];
0266 rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE;
0267 rctx->flags = areq->base.flags;
0268 rctx->ctx_handle = nctx->u.ctx_handle;
0269 rctx->src = areq->src;
0270 rctx->dst = areq->dst;
0271 rctx->ctrl_arg = DECRYPT;
0272 ret = nitrox_set_creq(rctx);
0273 if (ret)
0274 return ret;
0275
0276
0277 return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback,
0278 areq);
0279 }
0280
0281 static int nitrox_aead_init(struct crypto_aead *aead)
0282 {
0283 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
0284 struct crypto_ctx_hdr *chdr;
0285
0286
0287 nctx->ndev = nitrox_get_first_device();
0288 if (!nctx->ndev)
0289 return -ENODEV;
0290
0291
0292 chdr = crypto_alloc_context(nctx->ndev);
0293 if (!chdr) {
0294 nitrox_put_device(nctx->ndev);
0295 return -ENOMEM;
0296 }
0297 nctx->chdr = chdr;
0298 nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr +
0299 sizeof(struct ctx_hdr));
0300 nctx->u.fctx->flags.f = 0;
0301
0302 return 0;
0303 }
0304
0305 static int nitrox_gcm_common_init(struct crypto_aead *aead)
0306 {
0307 int ret;
0308 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
0309 union fc_ctx_flags *flags;
0310
0311 ret = nitrox_aead_init(aead);
0312 if (ret)
0313 return ret;
0314
0315 flags = &nctx->u.fctx->flags;
0316 flags->w0.cipher_type = CIPHER_AES_GCM;
0317 flags->w0.hash_type = AUTH_NULL;
0318 flags->w0.iv_source = IV_FROM_DPTR;
0319
0320 flags->w0.auth_input_type = 1;
0321 flags->f = cpu_to_be64(flags->fu);
0322
0323 return 0;
0324 }
0325
0326 static int nitrox_aes_gcm_init(struct crypto_aead *aead)
0327 {
0328 int ret;
0329
0330 ret = nitrox_gcm_common_init(aead);
0331 if (ret)
0332 return ret;
0333
0334 crypto_aead_set_reqsize(aead,
0335 sizeof(struct aead_request) +
0336 sizeof(struct nitrox_aead_rctx));
0337
0338 return 0;
0339 }
0340
0341 static void nitrox_aead_exit(struct crypto_aead *aead)
0342 {
0343 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
0344
0345
0346 if (nctx->u.ctx_handle) {
0347 struct flexi_crypto_context *fctx = nctx->u.fctx;
0348
0349 memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys));
0350 memzero_explicit(&fctx->auth, sizeof(struct auth_keys));
0351 crypto_free_context((void *)nctx->chdr);
0352 }
0353 nitrox_put_device(nctx->ndev);
0354
0355 nctx->u.ctx_handle = 0;
0356 nctx->ndev = NULL;
0357 }
0358
0359 static int nitrox_rfc4106_setkey(struct crypto_aead *aead, const u8 *key,
0360 unsigned int keylen)
0361 {
0362 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
0363 struct flexi_crypto_context *fctx = nctx->u.fctx;
0364 int ret;
0365
0366 if (keylen < GCM_AES_SALT_SIZE)
0367 return -EINVAL;
0368
0369 keylen -= GCM_AES_SALT_SIZE;
0370 ret = nitrox_aes_gcm_setkey(aead, key, keylen);
0371 if (ret)
0372 return ret;
0373
0374 memcpy(fctx->crypto.iv, key + keylen, GCM_AES_SALT_SIZE);
0375 return 0;
0376 }
0377
0378 static int nitrox_rfc4106_setauthsize(struct crypto_aead *aead,
0379 unsigned int authsize)
0380 {
0381 switch (authsize) {
0382 case 8:
0383 case 12:
0384 case 16:
0385 break;
0386 default:
0387 return -EINVAL;
0388 }
0389
0390 return nitrox_aead_setauthsize(aead, authsize);
0391 }
0392
0393 static int nitrox_rfc4106_set_aead_rctx_sglist(struct aead_request *areq)
0394 {
0395 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
0396 struct nitrox_aead_rctx *aead_rctx = &rctx->base;
0397 unsigned int assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
0398 struct scatterlist *sg;
0399
0400 if (areq->assoclen != 16 && areq->assoclen != 20)
0401 return -EINVAL;
0402
0403 scatterwalk_map_and_copy(rctx->assoc, areq->src, 0, assoclen, 0);
0404 sg_init_table(rctx->src, 3);
0405 sg_set_buf(rctx->src, rctx->assoc, assoclen);
0406 sg = scatterwalk_ffwd(rctx->src + 1, areq->src, areq->assoclen);
0407 if (sg != rctx->src + 1)
0408 sg_chain(rctx->src, 2, sg);
0409
0410 if (areq->src != areq->dst) {
0411 sg_init_table(rctx->dst, 3);
0412 sg_set_buf(rctx->dst, rctx->assoc, assoclen);
0413 sg = scatterwalk_ffwd(rctx->dst + 1, areq->dst, areq->assoclen);
0414 if (sg != rctx->dst + 1)
0415 sg_chain(rctx->dst, 2, sg);
0416 }
0417
0418 aead_rctx->src = rctx->src;
0419 aead_rctx->dst = (areq->src == areq->dst) ? rctx->src : rctx->dst;
0420
0421 return 0;
0422 }
0423
0424 static void nitrox_rfc4106_callback(void *arg, int err)
0425 {
0426 struct aead_request *areq = arg;
0427 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
0428 struct nitrox_kcrypt_request *nkreq = &rctx->base.nkreq;
0429
0430 free_src_sglist(nkreq);
0431 free_dst_sglist(nkreq);
0432 if (err) {
0433 pr_err_ratelimited("request failed status 0x%0x\n", err);
0434 err = -EINVAL;
0435 }
0436
0437 areq->base.complete(&areq->base, err);
0438 }
0439
0440 static int nitrox_rfc4106_enc(struct aead_request *areq)
0441 {
0442 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
0443 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
0444 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
0445 struct nitrox_aead_rctx *aead_rctx = &rctx->base;
0446 struct se_crypto_request *creq = &aead_rctx->nkreq.creq;
0447 int ret;
0448
0449 aead_rctx->cryptlen = areq->cryptlen;
0450 aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
0451 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen;
0452 aead_rctx->dstlen = aead_rctx->srclen + aead->authsize;
0453 aead_rctx->iv = areq->iv;
0454 aead_rctx->ivsize = GCM_RFC4106_IV_SIZE;
0455 aead_rctx->flags = areq->base.flags;
0456 aead_rctx->ctx_handle = nctx->u.ctx_handle;
0457 aead_rctx->ctrl_arg = ENCRYPT;
0458
0459 ret = nitrox_rfc4106_set_aead_rctx_sglist(areq);
0460 if (ret)
0461 return ret;
0462
0463 ret = nitrox_set_creq(aead_rctx);
0464 if (ret)
0465 return ret;
0466
0467
0468 return nitrox_process_se_request(nctx->ndev, creq,
0469 nitrox_rfc4106_callback, areq);
0470 }
0471
0472 static int nitrox_rfc4106_dec(struct aead_request *areq)
0473 {
0474 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
0475 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead);
0476 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq);
0477 struct nitrox_aead_rctx *aead_rctx = &rctx->base;
0478 struct se_crypto_request *creq = &aead_rctx->nkreq.creq;
0479 int ret;
0480
0481 aead_rctx->cryptlen = areq->cryptlen - aead->authsize;
0482 aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE;
0483 aead_rctx->srclen =
0484 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen;
0485 aead_rctx->dstlen = aead_rctx->srclen - aead->authsize;
0486 aead_rctx->iv = areq->iv;
0487 aead_rctx->ivsize = GCM_RFC4106_IV_SIZE;
0488 aead_rctx->flags = areq->base.flags;
0489 aead_rctx->ctx_handle = nctx->u.ctx_handle;
0490 aead_rctx->ctrl_arg = DECRYPT;
0491
0492 ret = nitrox_rfc4106_set_aead_rctx_sglist(areq);
0493 if (ret)
0494 return ret;
0495
0496 ret = nitrox_set_creq(aead_rctx);
0497 if (ret)
0498 return ret;
0499
0500
0501 return nitrox_process_se_request(nctx->ndev, creq,
0502 nitrox_rfc4106_callback, areq);
0503 }
0504
0505 static int nitrox_rfc4106_init(struct crypto_aead *aead)
0506 {
0507 int ret;
0508
0509 ret = nitrox_gcm_common_init(aead);
0510 if (ret)
0511 return ret;
0512
0513 crypto_aead_set_reqsize(aead, sizeof(struct aead_request) +
0514 sizeof(struct nitrox_rfc4106_rctx));
0515
0516 return 0;
0517 }
0518
0519 static struct aead_alg nitrox_aeads[] = { {
0520 .base = {
0521 .cra_name = "gcm(aes)",
0522 .cra_driver_name = "n5_aes_gcm",
0523 .cra_priority = PRIO,
0524 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
0525 .cra_blocksize = 1,
0526 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
0527 .cra_alignmask = 0,
0528 .cra_module = THIS_MODULE,
0529 },
0530 .setkey = nitrox_aes_gcm_setkey,
0531 .setauthsize = nitrox_aes_gcm_setauthsize,
0532 .encrypt = nitrox_aes_gcm_enc,
0533 .decrypt = nitrox_aes_gcm_dec,
0534 .init = nitrox_aes_gcm_init,
0535 .exit = nitrox_aead_exit,
0536 .ivsize = GCM_AES_IV_SIZE,
0537 .maxauthsize = AES_BLOCK_SIZE,
0538 }, {
0539 .base = {
0540 .cra_name = "rfc4106(gcm(aes))",
0541 .cra_driver_name = "n5_rfc4106",
0542 .cra_priority = PRIO,
0543 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
0544 .cra_blocksize = 1,
0545 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
0546 .cra_alignmask = 0,
0547 .cra_module = THIS_MODULE,
0548 },
0549 .setkey = nitrox_rfc4106_setkey,
0550 .setauthsize = nitrox_rfc4106_setauthsize,
0551 .encrypt = nitrox_rfc4106_enc,
0552 .decrypt = nitrox_rfc4106_dec,
0553 .init = nitrox_rfc4106_init,
0554 .exit = nitrox_aead_exit,
0555 .ivsize = GCM_RFC4106_IV_SIZE,
0556 .maxauthsize = AES_BLOCK_SIZE,
0557 } };
0558
0559 int nitrox_register_aeads(void)
0560 {
0561 return crypto_register_aeads(nitrox_aeads, ARRAY_SIZE(nitrox_aeads));
0562 }
0563
0564 void nitrox_unregister_aeads(void)
0565 {
0566 crypto_unregister_aeads(nitrox_aeads, ARRAY_SIZE(nitrox_aeads));
0567 }