Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-only
0002 
0003 /*
0004  * Copyright (C) 2016 Cavium, Inc.
0005  */
0006 
0007 #include <crypto/aes.h>
0008 #include <crypto/algapi.h>
0009 #include <crypto/authenc.h>
0010 #include <crypto/internal/des.h>
0011 #include <crypto/xts.h>
0012 #include <linux/crypto.h>
0013 #include <linux/err.h>
0014 #include <linux/list.h>
0015 #include <linux/scatterlist.h>
0016 
0017 #include "cptvf.h"
0018 #include "cptvf_algs.h"
0019 
0020 struct cpt_device_handle {
0021     void *cdev[MAX_DEVICES];
0022     u32 dev_count;
0023 };
0024 
0025 static struct cpt_device_handle dev_handle;
0026 
0027 static void cvm_callback(u32 status, void *arg)
0028 {
0029     struct crypto_async_request *req = (struct crypto_async_request *)arg;
0030 
0031     req->complete(req, !status);
0032 }
0033 
0034 static inline void update_input_iv(struct cpt_request_info *req_info,
0035                    u8 *iv, u32 enc_iv_len,
0036                    u32 *argcnt)
0037 {
0038     /* Setting the iv information */
0039     req_info->in[*argcnt].vptr = (void *)iv;
0040     req_info->in[*argcnt].size = enc_iv_len;
0041     req_info->req.dlen += enc_iv_len;
0042 
0043     ++(*argcnt);
0044 }
0045 
0046 static inline void update_output_iv(struct cpt_request_info *req_info,
0047                     u8 *iv, u32 enc_iv_len,
0048                     u32 *argcnt)
0049 {
0050     /* Setting the iv information */
0051     req_info->out[*argcnt].vptr = (void *)iv;
0052     req_info->out[*argcnt].size = enc_iv_len;
0053     req_info->rlen += enc_iv_len;
0054 
0055     ++(*argcnt);
0056 }
0057 
0058 static inline void update_input_data(struct cpt_request_info *req_info,
0059                      struct scatterlist *inp_sg,
0060                      u32 nbytes, u32 *argcnt)
0061 {
0062     req_info->req.dlen += nbytes;
0063 
0064     while (nbytes) {
0065         u32 len = min(nbytes, inp_sg->length);
0066         u8 *ptr = sg_virt(inp_sg);
0067 
0068         req_info->in[*argcnt].vptr = (void *)ptr;
0069         req_info->in[*argcnt].size = len;
0070         nbytes -= len;
0071 
0072         ++(*argcnt);
0073         ++inp_sg;
0074     }
0075 }
0076 
0077 static inline void update_output_data(struct cpt_request_info *req_info,
0078                       struct scatterlist *outp_sg,
0079                       u32 nbytes, u32 *argcnt)
0080 {
0081     req_info->rlen += nbytes;
0082 
0083     while (nbytes) {
0084         u32 len = min(nbytes, outp_sg->length);
0085         u8 *ptr = sg_virt(outp_sg);
0086 
0087         req_info->out[*argcnt].vptr = (void *)ptr;
0088         req_info->out[*argcnt].size = len;
0089         nbytes -= len;
0090         ++(*argcnt);
0091         ++outp_sg;
0092     }
0093 }
0094 
0095 static inline u32 create_ctx_hdr(struct skcipher_request *req, u32 enc,
0096                  u32 *argcnt)
0097 {
0098     struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0099     struct cvm_enc_ctx *ctx = crypto_skcipher_ctx(tfm);
0100     struct cvm_req_ctx *rctx = skcipher_request_ctx(req);
0101     struct fc_context *fctx = &rctx->fctx;
0102     u32 enc_iv_len = crypto_skcipher_ivsize(tfm);
0103     struct cpt_request_info *req_info = &rctx->cpt_req;
0104     __be64 *ctrl_flags = NULL;
0105     __be64 *offset_control;
0106 
0107     req_info->ctrl.s.grp = 0;
0108     req_info->ctrl.s.dma_mode = DMA_GATHER_SCATTER;
0109     req_info->ctrl.s.se_req = SE_CORE_REQ;
0110 
0111     req_info->req.opcode.s.major = MAJOR_OP_FC |
0112                     DMA_MODE_FLAG(DMA_GATHER_SCATTER);
0113     if (enc)
0114         req_info->req.opcode.s.minor = 2;
0115     else
0116         req_info->req.opcode.s.minor = 3;
0117 
0118     req_info->req.param1 = req->cryptlen; /* Encryption Data length */
0119     req_info->req.param2 = 0; /*Auth data length */
0120 
0121     fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type;
0122     fctx->enc.enc_ctrl.e.aes_key = ctx->key_type;
0123     fctx->enc.enc_ctrl.e.iv_source = FROM_DPTR;
0124 
0125     if (ctx->cipher_type == AES_XTS)
0126         memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len * 2);
0127     else
0128         memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len);
0129     ctrl_flags = (__be64 *)&fctx->enc.enc_ctrl.flags;
0130     *ctrl_flags = cpu_to_be64(fctx->enc.enc_ctrl.flags);
0131 
0132     offset_control = (__be64 *)&rctx->control_word;
0133     *offset_control = cpu_to_be64(((u64)(enc_iv_len) << 16));
0134     /* Storing  Packet Data Information in offset
0135      * Control Word First 8 bytes
0136      */
0137     req_info->in[*argcnt].vptr = (u8 *)offset_control;
0138     req_info->in[*argcnt].size = CONTROL_WORD_LEN;
0139     req_info->req.dlen += CONTROL_WORD_LEN;
0140     ++(*argcnt);
0141 
0142     req_info->in[*argcnt].vptr = (u8 *)fctx;
0143     req_info->in[*argcnt].size = sizeof(struct fc_context);
0144     req_info->req.dlen += sizeof(struct fc_context);
0145 
0146     ++(*argcnt);
0147 
0148     return 0;
0149 }
0150 
0151 static inline u32 create_input_list(struct skcipher_request  *req, u32 enc,
0152                     u32 enc_iv_len)
0153 {
0154     struct cvm_req_ctx *rctx = skcipher_request_ctx(req);
0155     struct cpt_request_info *req_info = &rctx->cpt_req;
0156     u32 argcnt =  0;
0157 
0158     create_ctx_hdr(req, enc, &argcnt);
0159     update_input_iv(req_info, req->iv, enc_iv_len, &argcnt);
0160     update_input_data(req_info, req->src, req->cryptlen, &argcnt);
0161     req_info->incnt = argcnt;
0162 
0163     return 0;
0164 }
0165 
0166 static inline void store_cb_info(struct skcipher_request *req,
0167                  struct cpt_request_info *req_info)
0168 {
0169     req_info->callback = (void *)cvm_callback;
0170     req_info->callback_arg = (void *)&req->base;
0171 }
0172 
0173 static inline void create_output_list(struct skcipher_request *req,
0174                       u32 enc_iv_len)
0175 {
0176     struct cvm_req_ctx *rctx = skcipher_request_ctx(req);
0177     struct cpt_request_info *req_info = &rctx->cpt_req;
0178     u32 argcnt = 0;
0179 
0180     /* OUTPUT Buffer Processing
0181      * AES encryption/decryption output would be
0182      * received in the following format
0183      *
0184      * ------IV--------|------ENCRYPTED/DECRYPTED DATA-----|
0185      * [ 16 Bytes/     [   Request Enc/Dec/ DATA Len AES CBC ]
0186      */
0187     /* Reading IV information */
0188     update_output_iv(req_info, req->iv, enc_iv_len, &argcnt);
0189     update_output_data(req_info, req->dst, req->cryptlen, &argcnt);
0190     req_info->outcnt = argcnt;
0191 }
0192 
0193 static inline int cvm_enc_dec(struct skcipher_request *req, u32 enc)
0194 {
0195     struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0196     struct cvm_req_ctx *rctx = skcipher_request_ctx(req);
0197     u32 enc_iv_len = crypto_skcipher_ivsize(tfm);
0198     struct fc_context *fctx = &rctx->fctx;
0199     struct cpt_request_info *req_info = &rctx->cpt_req;
0200     void *cdev = NULL;
0201     int status;
0202 
0203     memset(req_info, 0, sizeof(struct cpt_request_info));
0204     req_info->may_sleep = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) != 0;
0205     memset(fctx, 0, sizeof(struct fc_context));
0206     create_input_list(req, enc, enc_iv_len);
0207     create_output_list(req, enc_iv_len);
0208     store_cb_info(req, req_info);
0209     cdev = dev_handle.cdev[smp_processor_id()];
0210     status = cptvf_do_request(cdev, req_info);
0211     /* We perform an asynchronous send and once
0212      * the request is completed the driver would
0213      * intimate through  registered call back functions
0214      */
0215 
0216     if (status)
0217         return status;
0218     else
0219         return -EINPROGRESS;
0220 }
0221 
0222 static int cvm_encrypt(struct skcipher_request *req)
0223 {
0224     return cvm_enc_dec(req, true);
0225 }
0226 
0227 static int cvm_decrypt(struct skcipher_request *req)
0228 {
0229     return cvm_enc_dec(req, false);
0230 }
0231 
0232 static int cvm_xts_setkey(struct crypto_skcipher *cipher, const u8 *key,
0233            u32 keylen)
0234 {
0235     struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
0236     struct cvm_enc_ctx *ctx = crypto_tfm_ctx(tfm);
0237     int err;
0238     const u8 *key1 = key;
0239     const u8 *key2 = key + (keylen / 2);
0240 
0241     err = xts_check_key(tfm, key, keylen);
0242     if (err)
0243         return err;
0244     ctx->key_len = keylen;
0245     memcpy(ctx->enc_key, key1, keylen / 2);
0246     memcpy(ctx->enc_key + KEY2_OFFSET, key2, keylen / 2);
0247     ctx->cipher_type = AES_XTS;
0248     switch (ctx->key_len) {
0249     case 32:
0250         ctx->key_type = AES_128_BIT;
0251         break;
0252     case 64:
0253         ctx->key_type = AES_256_BIT;
0254         break;
0255     default:
0256         return -EINVAL;
0257     }
0258 
0259     return 0;
0260 }
0261 
0262 static int cvm_validate_keylen(struct cvm_enc_ctx *ctx, u32 keylen)
0263 {
0264     if ((keylen == 16) || (keylen == 24) || (keylen == 32)) {
0265         ctx->key_len = keylen;
0266         switch (ctx->key_len) {
0267         case 16:
0268             ctx->key_type = AES_128_BIT;
0269             break;
0270         case 24:
0271             ctx->key_type = AES_192_BIT;
0272             break;
0273         case 32:
0274             ctx->key_type = AES_256_BIT;
0275             break;
0276         default:
0277             return -EINVAL;
0278         }
0279 
0280         if (ctx->cipher_type == DES3_CBC)
0281             ctx->key_type = 0;
0282 
0283         return 0;
0284     }
0285 
0286     return -EINVAL;
0287 }
0288 
0289 static int cvm_setkey(struct crypto_skcipher *cipher, const u8 *key,
0290               u32 keylen, u8 cipher_type)
0291 {
0292     struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
0293     struct cvm_enc_ctx *ctx = crypto_tfm_ctx(tfm);
0294 
0295     ctx->cipher_type = cipher_type;
0296     if (!cvm_validate_keylen(ctx, keylen)) {
0297         memcpy(ctx->enc_key, key, keylen);
0298         return 0;
0299     } else {
0300         return -EINVAL;
0301     }
0302 }
0303 
0304 static int cvm_cbc_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
0305                   u32 keylen)
0306 {
0307     return cvm_setkey(cipher, key, keylen, AES_CBC);
0308 }
0309 
0310 static int cvm_ecb_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
0311                   u32 keylen)
0312 {
0313     return cvm_setkey(cipher, key, keylen, AES_ECB);
0314 }
0315 
0316 static int cvm_cfb_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
0317                   u32 keylen)
0318 {
0319     return cvm_setkey(cipher, key, keylen, AES_CFB);
0320 }
0321 
0322 static int cvm_cbc_des3_setkey(struct crypto_skcipher *cipher, const u8 *key,
0323                    u32 keylen)
0324 {
0325     return verify_skcipher_des3_key(cipher, key) ?:
0326            cvm_setkey(cipher, key, keylen, DES3_CBC);
0327 }
0328 
0329 static int cvm_ecb_des3_setkey(struct crypto_skcipher *cipher, const u8 *key,
0330                    u32 keylen)
0331 {
0332     return verify_skcipher_des3_key(cipher, key) ?:
0333            cvm_setkey(cipher, key, keylen, DES3_ECB);
0334 }
0335 
0336 static int cvm_enc_dec_init(struct crypto_skcipher *tfm)
0337 {
0338     crypto_skcipher_set_reqsize(tfm, sizeof(struct cvm_req_ctx));
0339 
0340     return 0;
0341 }
0342 
0343 static struct skcipher_alg algs[] = { {
0344     .base.cra_flags     = CRYPTO_ALG_ASYNC |
0345                   CRYPTO_ALG_ALLOCATES_MEMORY,
0346     .base.cra_blocksize = AES_BLOCK_SIZE,
0347     .base.cra_ctxsize   = sizeof(struct cvm_enc_ctx),
0348     .base.cra_alignmask = 7,
0349     .base.cra_priority  = 4001,
0350     .base.cra_name      = "xts(aes)",
0351     .base.cra_driver_name   = "cavium-xts-aes",
0352     .base.cra_module    = THIS_MODULE,
0353 
0354     .ivsize         = AES_BLOCK_SIZE,
0355     .min_keysize        = 2 * AES_MIN_KEY_SIZE,
0356     .max_keysize        = 2 * AES_MAX_KEY_SIZE,
0357     .setkey         = cvm_xts_setkey,
0358     .encrypt        = cvm_encrypt,
0359     .decrypt        = cvm_decrypt,
0360     .init           = cvm_enc_dec_init,
0361 }, {
0362     .base.cra_flags     = CRYPTO_ALG_ASYNC |
0363                   CRYPTO_ALG_ALLOCATES_MEMORY,
0364     .base.cra_blocksize = AES_BLOCK_SIZE,
0365     .base.cra_ctxsize   = sizeof(struct cvm_enc_ctx),
0366     .base.cra_alignmask = 7,
0367     .base.cra_priority  = 4001,
0368     .base.cra_name      = "cbc(aes)",
0369     .base.cra_driver_name   = "cavium-cbc-aes",
0370     .base.cra_module    = THIS_MODULE,
0371 
0372     .ivsize         = AES_BLOCK_SIZE,
0373     .min_keysize        = AES_MIN_KEY_SIZE,
0374     .max_keysize        = AES_MAX_KEY_SIZE,
0375     .setkey         = cvm_cbc_aes_setkey,
0376     .encrypt        = cvm_encrypt,
0377     .decrypt        = cvm_decrypt,
0378     .init           = cvm_enc_dec_init,
0379 }, {
0380     .base.cra_flags     = CRYPTO_ALG_ASYNC |
0381                   CRYPTO_ALG_ALLOCATES_MEMORY,
0382     .base.cra_blocksize = AES_BLOCK_SIZE,
0383     .base.cra_ctxsize   = sizeof(struct cvm_enc_ctx),
0384     .base.cra_alignmask = 7,
0385     .base.cra_priority  = 4001,
0386     .base.cra_name      = "ecb(aes)",
0387     .base.cra_driver_name   = "cavium-ecb-aes",
0388     .base.cra_module    = THIS_MODULE,
0389 
0390     .min_keysize        = AES_MIN_KEY_SIZE,
0391     .max_keysize        = AES_MAX_KEY_SIZE,
0392     .setkey         = cvm_ecb_aes_setkey,
0393     .encrypt        = cvm_encrypt,
0394     .decrypt        = cvm_decrypt,
0395     .init           = cvm_enc_dec_init,
0396 }, {
0397     .base.cra_flags     = CRYPTO_ALG_ASYNC |
0398                   CRYPTO_ALG_ALLOCATES_MEMORY,
0399     .base.cra_blocksize = AES_BLOCK_SIZE,
0400     .base.cra_ctxsize   = sizeof(struct cvm_enc_ctx),
0401     .base.cra_alignmask = 7,
0402     .base.cra_priority  = 4001,
0403     .base.cra_name      = "cfb(aes)",
0404     .base.cra_driver_name   = "cavium-cfb-aes",
0405     .base.cra_module    = THIS_MODULE,
0406 
0407     .ivsize         = AES_BLOCK_SIZE,
0408     .min_keysize        = AES_MIN_KEY_SIZE,
0409     .max_keysize        = AES_MAX_KEY_SIZE,
0410     .setkey         = cvm_cfb_aes_setkey,
0411     .encrypt        = cvm_encrypt,
0412     .decrypt        = cvm_decrypt,
0413     .init           = cvm_enc_dec_init,
0414 }, {
0415     .base.cra_flags     = CRYPTO_ALG_ASYNC |
0416                   CRYPTO_ALG_ALLOCATES_MEMORY,
0417     .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
0418     .base.cra_ctxsize   = sizeof(struct cvm_des3_ctx),
0419     .base.cra_alignmask = 7,
0420     .base.cra_priority  = 4001,
0421     .base.cra_name      = "cbc(des3_ede)",
0422     .base.cra_driver_name   = "cavium-cbc-des3_ede",
0423     .base.cra_module    = THIS_MODULE,
0424 
0425     .min_keysize        = DES3_EDE_KEY_SIZE,
0426     .max_keysize        = DES3_EDE_KEY_SIZE,
0427     .ivsize         = DES_BLOCK_SIZE,
0428     .setkey         = cvm_cbc_des3_setkey,
0429     .encrypt        = cvm_encrypt,
0430     .decrypt        = cvm_decrypt,
0431     .init           = cvm_enc_dec_init,
0432 }, {
0433     .base.cra_flags     = CRYPTO_ALG_ASYNC |
0434                   CRYPTO_ALG_ALLOCATES_MEMORY,
0435     .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
0436     .base.cra_ctxsize   = sizeof(struct cvm_des3_ctx),
0437     .base.cra_alignmask = 7,
0438     .base.cra_priority  = 4001,
0439     .base.cra_name      = "ecb(des3_ede)",
0440     .base.cra_driver_name   = "cavium-ecb-des3_ede",
0441     .base.cra_module    = THIS_MODULE,
0442 
0443     .min_keysize        = DES3_EDE_KEY_SIZE,
0444     .max_keysize        = DES3_EDE_KEY_SIZE,
0445     .ivsize         = DES_BLOCK_SIZE,
0446     .setkey         = cvm_ecb_des3_setkey,
0447     .encrypt        = cvm_encrypt,
0448     .decrypt        = cvm_decrypt,
0449     .init           = cvm_enc_dec_init,
0450 } };
0451 
0452 static inline int cav_register_algs(void)
0453 {
0454     return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
0455 }
0456 
0457 static inline void cav_unregister_algs(void)
0458 {
0459     crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
0460 }
0461 
0462 int cvm_crypto_init(struct cpt_vf *cptvf)
0463 {
0464     struct pci_dev *pdev = cptvf->pdev;
0465     u32 dev_count;
0466 
0467     dev_count = dev_handle.dev_count;
0468     dev_handle.cdev[dev_count] = cptvf;
0469     dev_handle.dev_count++;
0470 
0471     if (dev_count == 3) {
0472         if (cav_register_algs()) {
0473             dev_err(&pdev->dev, "Error in registering crypto algorithms\n");
0474             return -EINVAL;
0475         }
0476     }
0477 
0478     return 0;
0479 }
0480 
0481 void cvm_crypto_exit(void)
0482 {
0483     u32 dev_count;
0484 
0485     dev_count = --dev_handle.dev_count;
0486     if (!dev_count)
0487         cav_unregister_algs();
0488 }