0001
0002 #include <linux/crypto.h>
0003 #include <linux/kernel.h>
0004 #include <linux/module.h>
0005 #include <linux/printk.h>
0006
0007 #include <crypto/aes.h>
0008 #include <crypto/skcipher.h>
0009 #include <crypto/scatterwalk.h>
0010 #include <crypto/ctr.h>
0011 #include <crypto/internal/des.h>
0012 #include <crypto/xts.h>
0013
0014 #include "nitrox_dev.h"
0015 #include "nitrox_common.h"
0016 #include "nitrox_req.h"
0017
0018 struct nitrox_cipher {
0019 const char *name;
0020 enum flexi_cipher value;
0021 };
0022
0023
0024
0025
0026 static const struct nitrox_cipher flexi_cipher_table[] = {
0027 { "null", CIPHER_NULL },
0028 { "cbc(des3_ede)", CIPHER_3DES_CBC },
0029 { "ecb(des3_ede)", CIPHER_3DES_ECB },
0030 { "cbc(aes)", CIPHER_AES_CBC },
0031 { "ecb(aes)", CIPHER_AES_ECB },
0032 { "cfb(aes)", CIPHER_AES_CFB },
0033 { "rfc3686(ctr(aes))", CIPHER_AES_CTR },
0034 { "xts(aes)", CIPHER_AES_XTS },
0035 { "cts(cbc(aes))", CIPHER_AES_CBC_CTS },
0036 { NULL, CIPHER_INVALID }
0037 };
0038
0039 static enum flexi_cipher flexi_cipher_type(const char *name)
0040 {
0041 const struct nitrox_cipher *cipher = flexi_cipher_table;
0042
0043 while (cipher->name) {
0044 if (!strcmp(cipher->name, name))
0045 break;
0046 cipher++;
0047 }
0048 return cipher->value;
0049 }
0050
0051 static void free_src_sglist(struct skcipher_request *skreq)
0052 {
0053 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
0054
0055 kfree(nkreq->src);
0056 }
0057
0058 static void free_dst_sglist(struct skcipher_request *skreq)
0059 {
0060 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
0061
0062 kfree(nkreq->dst);
0063 }
0064
0065 static void nitrox_skcipher_callback(void *arg, int err)
0066 {
0067 struct skcipher_request *skreq = arg;
0068
0069 free_src_sglist(skreq);
0070 free_dst_sglist(skreq);
0071 if (err) {
0072 pr_err_ratelimited("request failed status 0x%0x\n", err);
0073 err = -EINVAL;
0074 }
0075
0076 skcipher_request_complete(skreq, err);
0077 }
0078
0079 static void nitrox_cbc_cipher_callback(void *arg, int err)
0080 {
0081 struct skcipher_request *skreq = arg;
0082 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
0083 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq);
0084 int ivsize = crypto_skcipher_ivsize(cipher);
0085 unsigned int start = skreq->cryptlen - ivsize;
0086
0087 if (err) {
0088 nitrox_skcipher_callback(arg, err);
0089 return;
0090 }
0091
0092 if (nkreq->creq.ctrl.s.arg == ENCRYPT) {
0093 scatterwalk_map_and_copy(skreq->iv, skreq->dst, start, ivsize,
0094 0);
0095 } else {
0096 if (skreq->src != skreq->dst) {
0097 scatterwalk_map_and_copy(skreq->iv, skreq->src, start,
0098 ivsize, 0);
0099 } else {
0100 memcpy(skreq->iv, nkreq->iv_out, ivsize);
0101 kfree(nkreq->iv_out);
0102 }
0103 }
0104
0105 nitrox_skcipher_callback(arg, err);
0106 }
0107
0108 static int nitrox_skcipher_init(struct crypto_skcipher *tfm)
0109 {
0110 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm);
0111 struct crypto_ctx_hdr *chdr;
0112
0113
0114 nctx->ndev = nitrox_get_first_device();
0115 if (!nctx->ndev)
0116 return -ENODEV;
0117
0118
0119 chdr = crypto_alloc_context(nctx->ndev);
0120 if (!chdr) {
0121 nitrox_put_device(nctx->ndev);
0122 return -ENOMEM;
0123 }
0124
0125 nctx->callback = nitrox_skcipher_callback;
0126 nctx->chdr = chdr;
0127 nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr +
0128 sizeof(struct ctx_hdr));
0129 crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(tfm) +
0130 sizeof(struct nitrox_kcrypt_request));
0131 return 0;
0132 }
0133
0134 static int nitrox_cbc_init(struct crypto_skcipher *tfm)
0135 {
0136 int err;
0137 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm);
0138
0139 err = nitrox_skcipher_init(tfm);
0140 if (err)
0141 return err;
0142
0143 nctx->callback = nitrox_cbc_cipher_callback;
0144 return 0;
0145 }
0146
0147 static void nitrox_skcipher_exit(struct crypto_skcipher *tfm)
0148 {
0149 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm);
0150
0151
0152 if (nctx->u.ctx_handle) {
0153 struct flexi_crypto_context *fctx = nctx->u.fctx;
0154
0155 memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys));
0156 memzero_explicit(&fctx->auth, sizeof(struct auth_keys));
0157 crypto_free_context((void *)nctx->chdr);
0158 }
0159 nitrox_put_device(nctx->ndev);
0160
0161 nctx->u.ctx_handle = 0;
0162 nctx->ndev = NULL;
0163 }
0164
0165 static inline int nitrox_skcipher_setkey(struct crypto_skcipher *cipher,
0166 int aes_keylen, const u8 *key,
0167 unsigned int keylen)
0168 {
0169 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
0170 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm);
0171 struct flexi_crypto_context *fctx;
0172 union fc_ctx_flags *flags;
0173 enum flexi_cipher cipher_type;
0174 const char *name;
0175
0176 name = crypto_tfm_alg_name(tfm);
0177 cipher_type = flexi_cipher_type(name);
0178 if (unlikely(cipher_type == CIPHER_INVALID)) {
0179 pr_err("unsupported cipher: %s\n", name);
0180 return -EINVAL;
0181 }
0182
0183
0184 fctx = nctx->u.fctx;
0185 flags = &fctx->flags;
0186 flags->f = 0;
0187 flags->w0.cipher_type = cipher_type;
0188 flags->w0.aes_keylen = aes_keylen;
0189 flags->w0.iv_source = IV_FROM_DPTR;
0190 flags->f = cpu_to_be64(*(u64 *)&flags->w0);
0191
0192 memcpy(fctx->crypto.u.key, key, keylen);
0193
0194 return 0;
0195 }
0196
0197 static int nitrox_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
0198 unsigned int keylen)
0199 {
0200 int aes_keylen;
0201
0202 aes_keylen = flexi_aes_keylen(keylen);
0203 if (aes_keylen < 0)
0204 return -EINVAL;
0205 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen);
0206 }
0207
0208 static int alloc_src_sglist(struct skcipher_request *skreq, int ivsize)
0209 {
0210 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
0211 int nents = sg_nents(skreq->src) + 1;
0212 int ret;
0213
0214
0215 ret = alloc_src_req_buf(nkreq, nents, ivsize);
0216 if (ret)
0217 return ret;
0218
0219 nitrox_creq_copy_iv(nkreq->src, skreq->iv, ivsize);
0220 nitrox_creq_set_src_sg(nkreq, nents, ivsize, skreq->src,
0221 skreq->cryptlen);
0222
0223 return 0;
0224 }
0225
0226 static int alloc_dst_sglist(struct skcipher_request *skreq, int ivsize)
0227 {
0228 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
0229 int nents = sg_nents(skreq->dst) + 3;
0230 int ret;
0231
0232
0233
0234
0235 ret = alloc_dst_req_buf(nkreq, nents);
0236 if (ret)
0237 return ret;
0238
0239 nitrox_creq_set_orh(nkreq);
0240 nitrox_creq_set_comp(nkreq);
0241 nitrox_creq_set_dst_sg(nkreq, nents, ivsize, skreq->dst,
0242 skreq->cryptlen);
0243
0244 return 0;
0245 }
0246
0247 static int nitrox_skcipher_crypt(struct skcipher_request *skreq, bool enc)
0248 {
0249 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq);
0250 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(cipher);
0251 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
0252 int ivsize = crypto_skcipher_ivsize(cipher);
0253 struct se_crypto_request *creq;
0254 int ret;
0255
0256 creq = &nkreq->creq;
0257 creq->flags = skreq->base.flags;
0258 creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
0259 GFP_KERNEL : GFP_ATOMIC;
0260
0261
0262 creq->ctrl.value = 0;
0263 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC;
0264 creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT);
0265
0266 creq->gph.param0 = cpu_to_be16(skreq->cryptlen);
0267 creq->gph.param1 = 0;
0268
0269 creq->gph.param2 = cpu_to_be16(ivsize);
0270 creq->gph.param3 = 0;
0271
0272 creq->ctx_handle = nctx->u.ctx_handle;
0273 creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context);
0274
0275 ret = alloc_src_sglist(skreq, ivsize);
0276 if (ret)
0277 return ret;
0278
0279 ret = alloc_dst_sglist(skreq, ivsize);
0280 if (ret) {
0281 free_src_sglist(skreq);
0282 return ret;
0283 }
0284
0285
0286 return nitrox_process_se_request(nctx->ndev, creq, nctx->callback,
0287 skreq);
0288 }
0289
0290 static int nitrox_cbc_decrypt(struct skcipher_request *skreq)
0291 {
0292 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
0293 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq);
0294 int ivsize = crypto_skcipher_ivsize(cipher);
0295 gfp_t flags = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
0296 GFP_KERNEL : GFP_ATOMIC;
0297 unsigned int start = skreq->cryptlen - ivsize;
0298
0299 if (skreq->src != skreq->dst)
0300 return nitrox_skcipher_crypt(skreq, false);
0301
0302 nkreq->iv_out = kmalloc(ivsize, flags);
0303 if (!nkreq->iv_out)
0304 return -ENOMEM;
0305
0306 scatterwalk_map_and_copy(nkreq->iv_out, skreq->src, start, ivsize, 0);
0307 return nitrox_skcipher_crypt(skreq, false);
0308 }
0309
0310 static int nitrox_aes_encrypt(struct skcipher_request *skreq)
0311 {
0312 return nitrox_skcipher_crypt(skreq, true);
0313 }
0314
0315 static int nitrox_aes_decrypt(struct skcipher_request *skreq)
0316 {
0317 return nitrox_skcipher_crypt(skreq, false);
0318 }
0319
0320 static int nitrox_3des_setkey(struct crypto_skcipher *cipher,
0321 const u8 *key, unsigned int keylen)
0322 {
0323 return verify_skcipher_des3_key(cipher, key) ?:
0324 nitrox_skcipher_setkey(cipher, 0, key, keylen);
0325 }
0326
0327 static int nitrox_3des_encrypt(struct skcipher_request *skreq)
0328 {
0329 return nitrox_skcipher_crypt(skreq, true);
0330 }
0331
0332 static int nitrox_3des_decrypt(struct skcipher_request *skreq)
0333 {
0334 return nitrox_skcipher_crypt(skreq, false);
0335 }
0336
0337 static int nitrox_aes_xts_setkey(struct crypto_skcipher *cipher,
0338 const u8 *key, unsigned int keylen)
0339 {
0340 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
0341 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm);
0342 struct flexi_crypto_context *fctx;
0343 int aes_keylen, ret;
0344
0345 ret = xts_check_key(tfm, key, keylen);
0346 if (ret)
0347 return ret;
0348
0349 keylen /= 2;
0350
0351 aes_keylen = flexi_aes_keylen(keylen);
0352 if (aes_keylen < 0)
0353 return -EINVAL;
0354
0355 fctx = nctx->u.fctx;
0356
0357 memcpy(fctx->auth.u.key2, (key + keylen), keylen);
0358
0359 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen);
0360 }
0361
0362 static int nitrox_aes_ctr_rfc3686_setkey(struct crypto_skcipher *cipher,
0363 const u8 *key, unsigned int keylen)
0364 {
0365 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
0366 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm);
0367 struct flexi_crypto_context *fctx;
0368 int aes_keylen;
0369
0370 if (keylen < CTR_RFC3686_NONCE_SIZE)
0371 return -EINVAL;
0372
0373 fctx = nctx->u.fctx;
0374
0375 memcpy(fctx->crypto.iv, key + (keylen - CTR_RFC3686_NONCE_SIZE),
0376 CTR_RFC3686_NONCE_SIZE);
0377
0378 keylen -= CTR_RFC3686_NONCE_SIZE;
0379
0380 aes_keylen = flexi_aes_keylen(keylen);
0381 if (aes_keylen < 0)
0382 return -EINVAL;
0383 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen);
0384 }
0385
0386 static struct skcipher_alg nitrox_skciphers[] = { {
0387 .base = {
0388 .cra_name = "cbc(aes)",
0389 .cra_driver_name = "n5_cbc(aes)",
0390 .cra_priority = PRIO,
0391 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
0392 .cra_blocksize = AES_BLOCK_SIZE,
0393 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
0394 .cra_alignmask = 0,
0395 .cra_module = THIS_MODULE,
0396 },
0397 .min_keysize = AES_MIN_KEY_SIZE,
0398 .max_keysize = AES_MAX_KEY_SIZE,
0399 .ivsize = AES_BLOCK_SIZE,
0400 .setkey = nitrox_aes_setkey,
0401 .encrypt = nitrox_aes_encrypt,
0402 .decrypt = nitrox_cbc_decrypt,
0403 .init = nitrox_cbc_init,
0404 .exit = nitrox_skcipher_exit,
0405 }, {
0406 .base = {
0407 .cra_name = "ecb(aes)",
0408 .cra_driver_name = "n5_ecb(aes)",
0409 .cra_priority = PRIO,
0410 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
0411 .cra_blocksize = AES_BLOCK_SIZE,
0412 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
0413 .cra_alignmask = 0,
0414 .cra_module = THIS_MODULE,
0415 },
0416 .min_keysize = AES_MIN_KEY_SIZE,
0417 .max_keysize = AES_MAX_KEY_SIZE,
0418 .ivsize = AES_BLOCK_SIZE,
0419 .setkey = nitrox_aes_setkey,
0420 .encrypt = nitrox_aes_encrypt,
0421 .decrypt = nitrox_aes_decrypt,
0422 .init = nitrox_skcipher_init,
0423 .exit = nitrox_skcipher_exit,
0424 }, {
0425 .base = {
0426 .cra_name = "cfb(aes)",
0427 .cra_driver_name = "n5_cfb(aes)",
0428 .cra_priority = PRIO,
0429 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
0430 .cra_blocksize = AES_BLOCK_SIZE,
0431 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
0432 .cra_alignmask = 0,
0433 .cra_module = THIS_MODULE,
0434 },
0435 .min_keysize = AES_MIN_KEY_SIZE,
0436 .max_keysize = AES_MAX_KEY_SIZE,
0437 .ivsize = AES_BLOCK_SIZE,
0438 .setkey = nitrox_aes_setkey,
0439 .encrypt = nitrox_aes_encrypt,
0440 .decrypt = nitrox_aes_decrypt,
0441 .init = nitrox_skcipher_init,
0442 .exit = nitrox_skcipher_exit,
0443 }, {
0444 .base = {
0445 .cra_name = "xts(aes)",
0446 .cra_driver_name = "n5_xts(aes)",
0447 .cra_priority = PRIO,
0448 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
0449 .cra_blocksize = AES_BLOCK_SIZE,
0450 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
0451 .cra_alignmask = 0,
0452 .cra_module = THIS_MODULE,
0453 },
0454 .min_keysize = 2 * AES_MIN_KEY_SIZE,
0455 .max_keysize = 2 * AES_MAX_KEY_SIZE,
0456 .ivsize = AES_BLOCK_SIZE,
0457 .setkey = nitrox_aes_xts_setkey,
0458 .encrypt = nitrox_aes_encrypt,
0459 .decrypt = nitrox_aes_decrypt,
0460 .init = nitrox_skcipher_init,
0461 .exit = nitrox_skcipher_exit,
0462 }, {
0463 .base = {
0464 .cra_name = "rfc3686(ctr(aes))",
0465 .cra_driver_name = "n5_rfc3686(ctr(aes))",
0466 .cra_priority = PRIO,
0467 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
0468 .cra_blocksize = 1,
0469 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
0470 .cra_alignmask = 0,
0471 .cra_module = THIS_MODULE,
0472 },
0473 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
0474 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
0475 .ivsize = CTR_RFC3686_IV_SIZE,
0476 .init = nitrox_skcipher_init,
0477 .exit = nitrox_skcipher_exit,
0478 .setkey = nitrox_aes_ctr_rfc3686_setkey,
0479 .encrypt = nitrox_aes_encrypt,
0480 .decrypt = nitrox_aes_decrypt,
0481 }, {
0482 .base = {
0483 .cra_name = "cts(cbc(aes))",
0484 .cra_driver_name = "n5_cts(cbc(aes))",
0485 .cra_priority = PRIO,
0486 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
0487 .cra_blocksize = AES_BLOCK_SIZE,
0488 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
0489 .cra_alignmask = 0,
0490 .cra_module = THIS_MODULE,
0491 },
0492 .min_keysize = AES_MIN_KEY_SIZE,
0493 .max_keysize = AES_MAX_KEY_SIZE,
0494 .ivsize = AES_BLOCK_SIZE,
0495 .setkey = nitrox_aes_setkey,
0496 .encrypt = nitrox_aes_encrypt,
0497 .decrypt = nitrox_aes_decrypt,
0498 .init = nitrox_skcipher_init,
0499 .exit = nitrox_skcipher_exit,
0500 }, {
0501 .base = {
0502 .cra_name = "cbc(des3_ede)",
0503 .cra_driver_name = "n5_cbc(des3_ede)",
0504 .cra_priority = PRIO,
0505 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
0506 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
0507 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
0508 .cra_alignmask = 0,
0509 .cra_module = THIS_MODULE,
0510 },
0511 .min_keysize = DES3_EDE_KEY_SIZE,
0512 .max_keysize = DES3_EDE_KEY_SIZE,
0513 .ivsize = DES3_EDE_BLOCK_SIZE,
0514 .setkey = nitrox_3des_setkey,
0515 .encrypt = nitrox_3des_encrypt,
0516 .decrypt = nitrox_cbc_decrypt,
0517 .init = nitrox_cbc_init,
0518 .exit = nitrox_skcipher_exit,
0519 }, {
0520 .base = {
0521 .cra_name = "ecb(des3_ede)",
0522 .cra_driver_name = "n5_ecb(des3_ede)",
0523 .cra_priority = PRIO,
0524 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
0525 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
0526 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
0527 .cra_alignmask = 0,
0528 .cra_module = THIS_MODULE,
0529 },
0530 .min_keysize = DES3_EDE_KEY_SIZE,
0531 .max_keysize = DES3_EDE_KEY_SIZE,
0532 .ivsize = DES3_EDE_BLOCK_SIZE,
0533 .setkey = nitrox_3des_setkey,
0534 .encrypt = nitrox_3des_encrypt,
0535 .decrypt = nitrox_3des_decrypt,
0536 .init = nitrox_skcipher_init,
0537 .exit = nitrox_skcipher_exit,
0538 }
0539
0540 };
0541
0542 int nitrox_register_skciphers(void)
0543 {
0544 return crypto_register_skciphers(nitrox_skciphers,
0545 ARRAY_SIZE(nitrox_skciphers));
0546 }
0547
0548 void nitrox_unregister_skciphers(void)
0549 {
0550 crypto_unregister_skciphers(nitrox_skciphers,
0551 ARRAY_SIZE(nitrox_skciphers));
0552 }