Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0
0002 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
0003 
0004 #include <linux/kernel.h>
0005 #include <linux/module.h>
0006 #include <crypto/algapi.h>
0007 #include <crypto/internal/skcipher.h>
0008 #include <crypto/internal/des.h>
0009 #include <crypto/xts.h>
0010 #include <crypto/sm4.h>
0011 #include <crypto/scatterwalk.h>
0012 
0013 #include "cc_driver.h"
0014 #include "cc_lli_defs.h"
0015 #include "cc_buffer_mgr.h"
0016 #include "cc_cipher.h"
0017 #include "cc_request_mgr.h"
0018 
0019 #define MAX_SKCIPHER_SEQ_LEN 6
0020 
0021 #define template_skcipher   template_u.skcipher
0022 
0023 struct cc_user_key_info {
0024     u8 *key;
0025     dma_addr_t key_dma_addr;
0026 };
0027 
0028 struct cc_hw_key_info {
0029     enum cc_hw_crypto_key key1_slot;
0030     enum cc_hw_crypto_key key2_slot;
0031 };
0032 
0033 struct cc_cpp_key_info {
0034     u8 slot;
0035     enum cc_cpp_alg alg;
0036 };
0037 
0038 enum cc_key_type {
0039     CC_UNPROTECTED_KEY,     /* User key */
0040     CC_HW_PROTECTED_KEY,        /* HW (FDE) key */
0041     CC_POLICY_PROTECTED_KEY,    /* CPP key */
0042     CC_INVALID_PROTECTED_KEY    /* Invalid key */
0043 };
0044 
0045 struct cc_cipher_ctx {
0046     struct cc_drvdata *drvdata;
0047     int keylen;
0048     int cipher_mode;
0049     int flow_mode;
0050     unsigned int flags;
0051     enum cc_key_type key_type;
0052     struct cc_user_key_info user;
0053     union {
0054         struct cc_hw_key_info hw;
0055         struct cc_cpp_key_info cpp;
0056     };
0057     struct crypto_shash *shash_tfm;
0058     struct crypto_skcipher *fallback_tfm;
0059     bool fallback_on;
0060 };
0061 
0062 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
0063 
0064 static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
0065 {
0066     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0067 
0068     return ctx_p->key_type;
0069 }
0070 
0071 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
0072 {
0073     switch (ctx_p->flow_mode) {
0074     case S_DIN_to_AES:
0075         switch (size) {
0076         case CC_AES_128_BIT_KEY_SIZE:
0077         case CC_AES_192_BIT_KEY_SIZE:
0078             if (ctx_p->cipher_mode != DRV_CIPHER_XTS)
0079                 return 0;
0080             break;
0081         case CC_AES_256_BIT_KEY_SIZE:
0082             return 0;
0083         case (CC_AES_192_BIT_KEY_SIZE * 2):
0084         case (CC_AES_256_BIT_KEY_SIZE * 2):
0085             if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
0086                 ctx_p->cipher_mode == DRV_CIPHER_ESSIV)
0087                 return 0;
0088             break;
0089         default:
0090             break;
0091         }
0092         break;
0093     case S_DIN_to_DES:
0094         if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
0095             return 0;
0096         break;
0097     case S_DIN_to_SM4:
0098         if (size == SM4_KEY_SIZE)
0099             return 0;
0100         break;
0101     default:
0102         break;
0103     }
0104     return -EINVAL;
0105 }
0106 
0107 static int validate_data_size(struct cc_cipher_ctx *ctx_p,
0108                   unsigned int size)
0109 {
0110     switch (ctx_p->flow_mode) {
0111     case S_DIN_to_AES:
0112         switch (ctx_p->cipher_mode) {
0113         case DRV_CIPHER_XTS:
0114         case DRV_CIPHER_CBC_CTS:
0115             if (size >= AES_BLOCK_SIZE)
0116                 return 0;
0117             break;
0118         case DRV_CIPHER_OFB:
0119         case DRV_CIPHER_CTR:
0120                 return 0;
0121         case DRV_CIPHER_ECB:
0122         case DRV_CIPHER_CBC:
0123         case DRV_CIPHER_ESSIV:
0124             if (IS_ALIGNED(size, AES_BLOCK_SIZE))
0125                 return 0;
0126             break;
0127         default:
0128             break;
0129         }
0130         break;
0131     case S_DIN_to_DES:
0132         if (IS_ALIGNED(size, DES_BLOCK_SIZE))
0133             return 0;
0134         break;
0135     case S_DIN_to_SM4:
0136         switch (ctx_p->cipher_mode) {
0137         case DRV_CIPHER_CTR:
0138             return 0;
0139         case DRV_CIPHER_ECB:
0140         case DRV_CIPHER_CBC:
0141             if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
0142                 return 0;
0143             break;
0144         default:
0145             break;
0146         }
0147         break;
0148     default:
0149         break;
0150     }
0151     return -EINVAL;
0152 }
0153 
0154 static int cc_cipher_init(struct crypto_tfm *tfm)
0155 {
0156     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0157     struct cc_crypto_alg *cc_alg =
0158             container_of(tfm->__crt_alg, struct cc_crypto_alg,
0159                      skcipher_alg.base);
0160     struct device *dev = drvdata_to_dev(cc_alg->drvdata);
0161     unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
0162     unsigned int fallback_req_size = 0;
0163 
0164     dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
0165         crypto_tfm_alg_name(tfm));
0166 
0167     ctx_p->cipher_mode = cc_alg->cipher_mode;
0168     ctx_p->flow_mode = cc_alg->flow_mode;
0169     ctx_p->drvdata = cc_alg->drvdata;
0170 
0171     if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
0172         const char *name = crypto_tfm_alg_name(tfm);
0173 
0174         /* Alloc hash tfm for essiv */
0175         ctx_p->shash_tfm = crypto_alloc_shash("sha256", 0, 0);
0176         if (IS_ERR(ctx_p->shash_tfm)) {
0177             dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
0178             return PTR_ERR(ctx_p->shash_tfm);
0179         }
0180         max_key_buf_size <<= 1;
0181 
0182         /* Alloc fallabck tfm or essiv when key size != 256 bit */
0183         ctx_p->fallback_tfm =
0184             crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC);
0185 
0186         if (IS_ERR(ctx_p->fallback_tfm)) {
0187             /* Note we're still allowing registration with no fallback since it's
0188              * better to have most modes supported than none at all.
0189              */
0190             dev_warn(dev, "Error allocating fallback algo %s. Some modes may be available.\n",
0191                    name);
0192             ctx_p->fallback_tfm = NULL;
0193         } else {
0194             fallback_req_size = crypto_skcipher_reqsize(ctx_p->fallback_tfm);
0195         }
0196     }
0197 
0198     crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
0199                     sizeof(struct cipher_req_ctx) + fallback_req_size);
0200 
0201     /* Allocate key buffer, cache line aligned */
0202     ctx_p->user.key = kzalloc(max_key_buf_size, GFP_KERNEL);
0203     if (!ctx_p->user.key)
0204         goto free_fallback;
0205 
0206     dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
0207         ctx_p->user.key);
0208 
0209     /* Map key buffer */
0210     ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key,
0211                           max_key_buf_size,
0212                           DMA_TO_DEVICE);
0213     if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
0214         dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
0215             max_key_buf_size, ctx_p->user.key);
0216         goto free_key;
0217     }
0218     dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
0219         max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
0220 
0221     return 0;
0222 
0223 free_key:
0224     kfree(ctx_p->user.key);
0225 free_fallback:
0226     crypto_free_skcipher(ctx_p->fallback_tfm);
0227     crypto_free_shash(ctx_p->shash_tfm);
0228 
0229     return -ENOMEM;
0230 }
0231 
0232 static void cc_cipher_exit(struct crypto_tfm *tfm)
0233 {
0234     struct crypto_alg *alg = tfm->__crt_alg;
0235     struct cc_crypto_alg *cc_alg =
0236             container_of(alg, struct cc_crypto_alg,
0237                      skcipher_alg.base);
0238     unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
0239     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0240     struct device *dev = drvdata_to_dev(ctx_p->drvdata);
0241 
0242     dev_dbg(dev, "Clearing context @%p for %s\n",
0243         crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
0244 
0245     if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
0246         /* Free hash tfm for essiv */
0247         crypto_free_shash(ctx_p->shash_tfm);
0248         ctx_p->shash_tfm = NULL;
0249         crypto_free_skcipher(ctx_p->fallback_tfm);
0250         ctx_p->fallback_tfm = NULL;
0251     }
0252 
0253     /* Unmap key buffer */
0254     dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
0255              DMA_TO_DEVICE);
0256     dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
0257         &ctx_p->user.key_dma_addr);
0258 
0259     /* Free key buffer in context */
0260     dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
0261     kfree_sensitive(ctx_p->user.key);
0262 }
0263 
0264 struct tdes_keys {
0265     u8  key1[DES_KEY_SIZE];
0266     u8  key2[DES_KEY_SIZE];
0267     u8  key3[DES_KEY_SIZE];
0268 };
0269 
0270 static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
0271 {
0272     switch (slot_num) {
0273     case 0:
0274         return KFDE0_KEY;
0275     case 1:
0276         return KFDE1_KEY;
0277     case 2:
0278         return KFDE2_KEY;
0279     case 3:
0280         return KFDE3_KEY;
0281     }
0282     return END_OF_KEYS;
0283 }
0284 
0285 static u8 cc_slot_to_cpp_key(u8 slot_num)
0286 {
0287     return (slot_num - CC_FIRST_CPP_KEY_SLOT);
0288 }
0289 
0290 static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
0291 {
0292     if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
0293         return CC_HW_PROTECTED_KEY;
0294     else if (slot_num >=  CC_FIRST_CPP_KEY_SLOT &&
0295          slot_num <=  CC_LAST_CPP_KEY_SLOT)
0296         return CC_POLICY_PROTECTED_KEY;
0297     else
0298         return CC_INVALID_PROTECTED_KEY;
0299 }
0300 
0301 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
0302                  unsigned int keylen)
0303 {
0304     struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
0305     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0306     struct device *dev = drvdata_to_dev(ctx_p->drvdata);
0307     struct cc_hkey_info hki;
0308 
0309     dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
0310         ctx_p, crypto_tfm_alg_name(tfm), keylen);
0311     dump_byte_array("key", key, keylen);
0312 
0313     /* STAT_PHASE_0: Init and sanity checks */
0314 
0315     /* This check the size of the protected key token */
0316     if (keylen != sizeof(hki)) {
0317         dev_err(dev, "Unsupported protected key size %d.\n", keylen);
0318         return -EINVAL;
0319     }
0320 
0321     memcpy(&hki, key, keylen);
0322 
0323     /* The real key len for crypto op is the size of the HW key
0324      * referenced by the HW key slot, not the hardware key token
0325      */
0326     keylen = hki.keylen;
0327 
0328     if (validate_keys_sizes(ctx_p, keylen)) {
0329         dev_dbg(dev, "Unsupported key size %d.\n", keylen);
0330         return -EINVAL;
0331     }
0332 
0333     ctx_p->keylen = keylen;
0334     ctx_p->fallback_on = false;
0335 
0336     switch (cc_slot_to_key_type(hki.hw_key1)) {
0337     case CC_HW_PROTECTED_KEY:
0338         if (ctx_p->flow_mode == S_DIN_to_SM4) {
0339             dev_err(dev, "Only AES HW protected keys are supported\n");
0340             return -EINVAL;
0341         }
0342 
0343         ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
0344         if (ctx_p->hw.key1_slot == END_OF_KEYS) {
0345             dev_err(dev, "Unsupported hw key1 number (%d)\n",
0346                 hki.hw_key1);
0347             return -EINVAL;
0348         }
0349 
0350         if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
0351             ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
0352             if (hki.hw_key1 == hki.hw_key2) {
0353                 dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
0354                     hki.hw_key1, hki.hw_key2);
0355                 return -EINVAL;
0356             }
0357 
0358             ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
0359             if (ctx_p->hw.key2_slot == END_OF_KEYS) {
0360                 dev_err(dev, "Unsupported hw key2 number (%d)\n",
0361                     hki.hw_key2);
0362                 return -EINVAL;
0363             }
0364         }
0365 
0366         ctx_p->key_type = CC_HW_PROTECTED_KEY;
0367         dev_dbg(dev, "HW protected key  %d/%d set\n.",
0368             ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
0369         break;
0370 
0371     case CC_POLICY_PROTECTED_KEY:
0372         if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
0373             dev_err(dev, "CPP keys not supported in this hardware revision.\n");
0374             return -EINVAL;
0375         }
0376 
0377         if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
0378             ctx_p->cipher_mode != DRV_CIPHER_CTR) {
0379             dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
0380             return -EINVAL;
0381         }
0382 
0383         ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
0384         if (ctx_p->flow_mode == S_DIN_to_AES)
0385             ctx_p->cpp.alg = CC_CPP_AES;
0386         else /* Must be SM4 since due to sethkey registration */
0387             ctx_p->cpp.alg = CC_CPP_SM4;
0388         ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
0389         dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
0390             ctx_p->cpp.alg, ctx_p->cpp.slot);
0391         break;
0392 
0393     default:
0394         dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
0395         return -EINVAL;
0396     }
0397 
0398     return 0;
0399 }
0400 
0401 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
0402                 unsigned int keylen)
0403 {
0404     struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
0405     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0406     struct device *dev = drvdata_to_dev(ctx_p->drvdata);
0407     struct cc_crypto_alg *cc_alg =
0408             container_of(tfm->__crt_alg, struct cc_crypto_alg,
0409                      skcipher_alg.base);
0410     unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
0411 
0412     dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
0413         ctx_p, crypto_tfm_alg_name(tfm), keylen);
0414     dump_byte_array("key", key, keylen);
0415 
0416     /* STAT_PHASE_0: Init and sanity checks */
0417 
0418     if (validate_keys_sizes(ctx_p, keylen)) {
0419         dev_dbg(dev, "Invalid key size %d.\n", keylen);
0420         return -EINVAL;
0421     }
0422 
0423     if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
0424 
0425         /* We only support 256 bit ESSIV-CBC-AES keys */
0426         if (keylen != AES_KEYSIZE_256)  {
0427             unsigned int flags = crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_MASK;
0428 
0429             if (likely(ctx_p->fallback_tfm)) {
0430                 ctx_p->fallback_on = true;
0431                 crypto_skcipher_clear_flags(ctx_p->fallback_tfm,
0432                                 CRYPTO_TFM_REQ_MASK);
0433                 crypto_skcipher_clear_flags(ctx_p->fallback_tfm, flags);
0434                 return crypto_skcipher_setkey(ctx_p->fallback_tfm, key, keylen);
0435             }
0436 
0437             dev_dbg(dev, "Unsupported key size %d and no fallback.\n", keylen);
0438             return -EINVAL;
0439         }
0440 
0441         /* Internal ESSIV key buffer is double sized */
0442         max_key_buf_size <<= 1;
0443     }
0444 
0445     ctx_p->fallback_on = false;
0446     ctx_p->key_type = CC_UNPROTECTED_KEY;
0447 
0448     /*
0449      * Verify DES weak keys
0450      * Note that we're dropping the expanded key since the
0451      * HW does the expansion on its own.
0452      */
0453     if (ctx_p->flow_mode == S_DIN_to_DES) {
0454         if ((keylen == DES3_EDE_KEY_SIZE &&
0455              verify_skcipher_des3_key(sktfm, key)) ||
0456             verify_skcipher_des_key(sktfm, key)) {
0457             dev_dbg(dev, "weak DES key");
0458             return -EINVAL;
0459         }
0460     }
0461 
0462     if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
0463         xts_check_key(tfm, key, keylen)) {
0464         dev_dbg(dev, "weak XTS key");
0465         return -EINVAL;
0466     }
0467 
0468     /* STAT_PHASE_1: Copy key to ctx */
0469     dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
0470                 max_key_buf_size, DMA_TO_DEVICE);
0471 
0472     memcpy(ctx_p->user.key, key, keylen);
0473 
0474     if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
0475         /* sha256 for key2 - use sw implementation */
0476         int err;
0477 
0478         err = crypto_shash_tfm_digest(ctx_p->shash_tfm,
0479                           ctx_p->user.key, keylen,
0480                           ctx_p->user.key + keylen);
0481         if (err) {
0482             dev_err(dev, "Failed to hash ESSIV key.\n");
0483             return err;
0484         }
0485 
0486         keylen <<= 1;
0487     }
0488     dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
0489                    max_key_buf_size, DMA_TO_DEVICE);
0490     ctx_p->keylen = keylen;
0491 
0492     dev_dbg(dev, "return safely");
0493     return 0;
0494 }
0495 
0496 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
0497 {
0498     switch (ctx_p->flow_mode) {
0499     case S_DIN_to_AES:
0500         return S_AES_to_DOUT;
0501     case S_DIN_to_DES:
0502         return S_DES_to_DOUT;
0503     case S_DIN_to_SM4:
0504         return S_SM4_to_DOUT;
0505     default:
0506         return ctx_p->flow_mode;
0507     }
0508 }
0509 
0510 static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
0511                  struct cipher_req_ctx *req_ctx,
0512                  unsigned int ivsize, struct cc_hw_desc desc[],
0513                  unsigned int *seq_size)
0514 {
0515     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0516     struct device *dev = drvdata_to_dev(ctx_p->drvdata);
0517     int cipher_mode = ctx_p->cipher_mode;
0518     int flow_mode = cc_out_setup_mode(ctx_p);
0519     int direction = req_ctx->gen_ctx.op_type;
0520     dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
0521 
0522     if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
0523         return;
0524 
0525     switch (cipher_mode) {
0526     case DRV_CIPHER_ECB:
0527         break;
0528     case DRV_CIPHER_CBC:
0529     case DRV_CIPHER_CBC_CTS:
0530     case DRV_CIPHER_CTR:
0531     case DRV_CIPHER_OFB:
0532         /* Read next IV */
0533         hw_desc_init(&desc[*seq_size]);
0534         set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
0535         set_cipher_config0(&desc[*seq_size], direction);
0536         set_flow_mode(&desc[*seq_size], flow_mode);
0537         set_cipher_mode(&desc[*seq_size], cipher_mode);
0538         if (cipher_mode == DRV_CIPHER_CTR ||
0539             cipher_mode == DRV_CIPHER_OFB) {
0540             set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
0541         } else {
0542             set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
0543         }
0544         set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
0545         (*seq_size)++;
0546         break;
0547     case DRV_CIPHER_XTS:
0548     case DRV_CIPHER_ESSIV:
0549         /*  IV */
0550         hw_desc_init(&desc[*seq_size]);
0551         set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
0552         set_cipher_mode(&desc[*seq_size], cipher_mode);
0553         set_cipher_config0(&desc[*seq_size], direction);
0554         set_flow_mode(&desc[*seq_size], flow_mode);
0555         set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
0556                  NS_BIT, 1);
0557         set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
0558         (*seq_size)++;
0559         break;
0560     default:
0561         dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
0562     }
0563 }
0564 
0565 
0566 static void cc_setup_state_desc(struct crypto_tfm *tfm,
0567                  struct cipher_req_ctx *req_ctx,
0568                  unsigned int ivsize, unsigned int nbytes,
0569                  struct cc_hw_desc desc[],
0570                  unsigned int *seq_size)
0571 {
0572     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0573     struct device *dev = drvdata_to_dev(ctx_p->drvdata);
0574     int cipher_mode = ctx_p->cipher_mode;
0575     int flow_mode = ctx_p->flow_mode;
0576     int direction = req_ctx->gen_ctx.op_type;
0577     dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
0578 
0579     switch (cipher_mode) {
0580     case DRV_CIPHER_ECB:
0581         break;
0582     case DRV_CIPHER_CBC:
0583     case DRV_CIPHER_CBC_CTS:
0584     case DRV_CIPHER_CTR:
0585     case DRV_CIPHER_OFB:
0586         /* Load IV */
0587         hw_desc_init(&desc[*seq_size]);
0588         set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
0589                  NS_BIT);
0590         set_cipher_config0(&desc[*seq_size], direction);
0591         set_flow_mode(&desc[*seq_size], flow_mode);
0592         set_cipher_mode(&desc[*seq_size], cipher_mode);
0593         if (cipher_mode == DRV_CIPHER_CTR ||
0594             cipher_mode == DRV_CIPHER_OFB) {
0595             set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
0596         } else {
0597             set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
0598         }
0599         (*seq_size)++;
0600         break;
0601     case DRV_CIPHER_XTS:
0602     case DRV_CIPHER_ESSIV:
0603         break;
0604     default:
0605         dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
0606     }
0607 }
0608 
0609 
0610 static void cc_setup_xex_state_desc(struct crypto_tfm *tfm,
0611                  struct cipher_req_ctx *req_ctx,
0612                  unsigned int ivsize, unsigned int nbytes,
0613                  struct cc_hw_desc desc[],
0614                  unsigned int *seq_size)
0615 {
0616     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0617     struct device *dev = drvdata_to_dev(ctx_p->drvdata);
0618     int cipher_mode = ctx_p->cipher_mode;
0619     int flow_mode = ctx_p->flow_mode;
0620     int direction = req_ctx->gen_ctx.op_type;
0621     dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
0622     unsigned int key_len = (ctx_p->keylen / 2);
0623     dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
0624     unsigned int key_offset = key_len;
0625 
0626     switch (cipher_mode) {
0627     case DRV_CIPHER_ECB:
0628         break;
0629     case DRV_CIPHER_CBC:
0630     case DRV_CIPHER_CBC_CTS:
0631     case DRV_CIPHER_CTR:
0632     case DRV_CIPHER_OFB:
0633         break;
0634     case DRV_CIPHER_XTS:
0635     case DRV_CIPHER_ESSIV:
0636 
0637         if (cipher_mode == DRV_CIPHER_ESSIV)
0638             key_len = SHA256_DIGEST_SIZE;
0639 
0640         /* load XEX key */
0641         hw_desc_init(&desc[*seq_size]);
0642         set_cipher_mode(&desc[*seq_size], cipher_mode);
0643         set_cipher_config0(&desc[*seq_size], direction);
0644         if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
0645             set_hw_crypto_key(&desc[*seq_size],
0646                       ctx_p->hw.key2_slot);
0647         } else {
0648             set_din_type(&desc[*seq_size], DMA_DLLI,
0649                      (key_dma_addr + key_offset),
0650                      key_len, NS_BIT);
0651         }
0652         set_xex_data_unit_size(&desc[*seq_size], nbytes);
0653         set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
0654         set_key_size_aes(&desc[*seq_size], key_len);
0655         set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
0656         (*seq_size)++;
0657 
0658         /* Load IV */
0659         hw_desc_init(&desc[*seq_size]);
0660         set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
0661         set_cipher_mode(&desc[*seq_size], cipher_mode);
0662         set_cipher_config0(&desc[*seq_size], direction);
0663         set_key_size_aes(&desc[*seq_size], key_len);
0664         set_flow_mode(&desc[*seq_size], flow_mode);
0665         set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
0666                  CC_AES_BLOCK_SIZE, NS_BIT);
0667         (*seq_size)++;
0668         break;
0669     default:
0670         dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
0671     }
0672 }
0673 
0674 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
0675 {
0676     switch (ctx_p->flow_mode) {
0677     case S_DIN_to_AES:
0678         return DIN_AES_DOUT;
0679     case S_DIN_to_DES:
0680         return DIN_DES_DOUT;
0681     case S_DIN_to_SM4:
0682         return DIN_SM4_DOUT;
0683     default:
0684         return ctx_p->flow_mode;
0685     }
0686 }
0687 
0688 static void cc_setup_key_desc(struct crypto_tfm *tfm,
0689                   struct cipher_req_ctx *req_ctx,
0690                   unsigned int nbytes, struct cc_hw_desc desc[],
0691                   unsigned int *seq_size)
0692 {
0693     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0694     struct device *dev = drvdata_to_dev(ctx_p->drvdata);
0695     int cipher_mode = ctx_p->cipher_mode;
0696     int flow_mode = ctx_p->flow_mode;
0697     int direction = req_ctx->gen_ctx.op_type;
0698     dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
0699     unsigned int key_len = ctx_p->keylen;
0700     unsigned int din_size;
0701 
0702     switch (cipher_mode) {
0703     case DRV_CIPHER_CBC:
0704     case DRV_CIPHER_CBC_CTS:
0705     case DRV_CIPHER_CTR:
0706     case DRV_CIPHER_OFB:
0707     case DRV_CIPHER_ECB:
0708         /* Load key */
0709         hw_desc_init(&desc[*seq_size]);
0710         set_cipher_mode(&desc[*seq_size], cipher_mode);
0711         set_cipher_config0(&desc[*seq_size], direction);
0712 
0713         if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
0714             /* We use the AES key size coding for all CPP algs */
0715             set_key_size_aes(&desc[*seq_size], key_len);
0716             set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
0717             flow_mode = cc_out_flow_mode(ctx_p);
0718         } else {
0719             if (flow_mode == S_DIN_to_AES) {
0720                 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
0721                     set_hw_crypto_key(&desc[*seq_size],
0722                               ctx_p->hw.key1_slot);
0723                 } else {
0724                     /* CC_POLICY_UNPROTECTED_KEY
0725                      * Invalid keys are filtered out in
0726                      * sethkey()
0727                      */
0728                     din_size = (key_len == 24) ?
0729                         AES_MAX_KEY_SIZE : key_len;
0730 
0731                     set_din_type(&desc[*seq_size], DMA_DLLI,
0732                              key_dma_addr, din_size,
0733                              NS_BIT);
0734                 }
0735                 set_key_size_aes(&desc[*seq_size], key_len);
0736             } else {
0737                 /*des*/
0738                 set_din_type(&desc[*seq_size], DMA_DLLI,
0739                          key_dma_addr, key_len, NS_BIT);
0740                 set_key_size_des(&desc[*seq_size], key_len);
0741             }
0742             set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
0743         }
0744         set_flow_mode(&desc[*seq_size], flow_mode);
0745         (*seq_size)++;
0746         break;
0747     case DRV_CIPHER_XTS:
0748     case DRV_CIPHER_ESSIV:
0749         /* Load AES key */
0750         hw_desc_init(&desc[*seq_size]);
0751         set_cipher_mode(&desc[*seq_size], cipher_mode);
0752         set_cipher_config0(&desc[*seq_size], direction);
0753         if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
0754             set_hw_crypto_key(&desc[*seq_size],
0755                       ctx_p->hw.key1_slot);
0756         } else {
0757             set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
0758                      (key_len / 2), NS_BIT);
0759         }
0760         set_key_size_aes(&desc[*seq_size], (key_len / 2));
0761         set_flow_mode(&desc[*seq_size], flow_mode);
0762         set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
0763         (*seq_size)++;
0764         break;
0765     default:
0766         dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
0767     }
0768 }
0769 
0770 static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
0771                    struct cipher_req_ctx *req_ctx,
0772                    struct scatterlist *dst, struct scatterlist *src,
0773                    unsigned int nbytes, void *areq,
0774                    struct cc_hw_desc desc[], unsigned int *seq_size)
0775 {
0776     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0777     struct device *dev = drvdata_to_dev(ctx_p->drvdata);
0778 
0779     if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
0780         /* bypass */
0781         dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
0782             &req_ctx->mlli_params.mlli_dma_addr,
0783             req_ctx->mlli_params.mlli_len,
0784             ctx_p->drvdata->mlli_sram_addr);
0785         hw_desc_init(&desc[*seq_size]);
0786         set_din_type(&desc[*seq_size], DMA_DLLI,
0787                  req_ctx->mlli_params.mlli_dma_addr,
0788                  req_ctx->mlli_params.mlli_len, NS_BIT);
0789         set_dout_sram(&desc[*seq_size],
0790                   ctx_p->drvdata->mlli_sram_addr,
0791                   req_ctx->mlli_params.mlli_len);
0792         set_flow_mode(&desc[*seq_size], BYPASS);
0793         (*seq_size)++;
0794     }
0795 }
0796 
0797 static void cc_setup_flow_desc(struct crypto_tfm *tfm,
0798                    struct cipher_req_ctx *req_ctx,
0799                    struct scatterlist *dst, struct scatterlist *src,
0800                    unsigned int nbytes, struct cc_hw_desc desc[],
0801                    unsigned int *seq_size)
0802 {
0803     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0804     struct device *dev = drvdata_to_dev(ctx_p->drvdata);
0805     unsigned int flow_mode = cc_out_flow_mode(ctx_p);
0806     bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
0807               ctx_p->cipher_mode == DRV_CIPHER_ECB);
0808 
0809     /* Process */
0810     if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
0811         dev_dbg(dev, " data params addr %pad length 0x%X\n",
0812             &sg_dma_address(src), nbytes);
0813         dev_dbg(dev, " data params addr %pad length 0x%X\n",
0814             &sg_dma_address(dst), nbytes);
0815         hw_desc_init(&desc[*seq_size]);
0816         set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
0817                  nbytes, NS_BIT);
0818         set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
0819                   nbytes, NS_BIT, (!last_desc ? 0 : 1));
0820         if (last_desc)
0821             set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
0822 
0823         set_flow_mode(&desc[*seq_size], flow_mode);
0824         (*seq_size)++;
0825     } else {
0826         hw_desc_init(&desc[*seq_size]);
0827         set_din_type(&desc[*seq_size], DMA_MLLI,
0828                  ctx_p->drvdata->mlli_sram_addr,
0829                  req_ctx->in_mlli_nents, NS_BIT);
0830         if (req_ctx->out_nents == 0) {
0831             dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
0832                 ctx_p->drvdata->mlli_sram_addr,
0833                 ctx_p->drvdata->mlli_sram_addr);
0834             set_dout_mlli(&desc[*seq_size],
0835                       ctx_p->drvdata->mlli_sram_addr,
0836                       req_ctx->in_mlli_nents, NS_BIT,
0837                       (!last_desc ? 0 : 1));
0838         } else {
0839             dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
0840                 ctx_p->drvdata->mlli_sram_addr,
0841                 ctx_p->drvdata->mlli_sram_addr +
0842                 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
0843             set_dout_mlli(&desc[*seq_size],
0844                       (ctx_p->drvdata->mlli_sram_addr +
0845                        (LLI_ENTRY_BYTE_SIZE *
0846                     req_ctx->in_mlli_nents)),
0847                       req_ctx->out_mlli_nents, NS_BIT,
0848                       (!last_desc ? 0 : 1));
0849         }
0850         if (last_desc)
0851             set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
0852 
0853         set_flow_mode(&desc[*seq_size], flow_mode);
0854         (*seq_size)++;
0855     }
0856 }
0857 
0858 static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
0859 {
0860     struct skcipher_request *req = (struct skcipher_request *)cc_req;
0861     struct scatterlist *dst = req->dst;
0862     struct scatterlist *src = req->src;
0863     struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
0864     struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
0865     unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
0866 
0867     if (err != -EINPROGRESS) {
0868         /* Not a BACKLOG notification */
0869         cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
0870         memcpy(req->iv, req_ctx->iv, ivsize);
0871         kfree_sensitive(req_ctx->iv);
0872     }
0873 
0874     skcipher_request_complete(req, err);
0875 }
0876 
0877 static int cc_cipher_process(struct skcipher_request *req,
0878                  enum drv_crypto_direction direction)
0879 {
0880     struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
0881     struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
0882     struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
0883     unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
0884     struct scatterlist *dst = req->dst;
0885     struct scatterlist *src = req->src;
0886     unsigned int nbytes = req->cryptlen;
0887     void *iv = req->iv;
0888     struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
0889     struct device *dev = drvdata_to_dev(ctx_p->drvdata);
0890     struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN];
0891     struct cc_crypto_req cc_req = {};
0892     int rc;
0893     unsigned int seq_len = 0;
0894     gfp_t flags = cc_gfp_flags(&req->base);
0895 
0896     dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
0897         ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
0898         "Encrypt" : "Decrypt"), req, iv, nbytes);
0899 
0900     /* STAT_PHASE_0: Init and sanity checks */
0901 
0902     if (validate_data_size(ctx_p, nbytes)) {
0903         dev_dbg(dev, "Unsupported data size %d.\n", nbytes);
0904         rc = -EINVAL;
0905         goto exit_process;
0906     }
0907     if (nbytes == 0) {
0908         /* No data to process is valid */
0909         rc = 0;
0910         goto exit_process;
0911     }
0912 
0913     if (ctx_p->fallback_on) {
0914         struct skcipher_request *subreq = skcipher_request_ctx(req);
0915 
0916         *subreq = *req;
0917         skcipher_request_set_tfm(subreq, ctx_p->fallback_tfm);
0918         if (direction == DRV_CRYPTO_DIRECTION_ENCRYPT)
0919             return crypto_skcipher_encrypt(subreq);
0920         else
0921             return crypto_skcipher_decrypt(subreq);
0922     }
0923 
0924     /* The IV we are handed may be allocated from the stack so
0925      * we must copy it to a DMAable buffer before use.
0926      */
0927     req_ctx->iv = kmemdup(iv, ivsize, flags);
0928     if (!req_ctx->iv) {
0929         rc = -ENOMEM;
0930         goto exit_process;
0931     }
0932 
0933     /* Setup request structure */
0934     cc_req.user_cb = cc_cipher_complete;
0935     cc_req.user_arg = req;
0936 
0937     /* Setup CPP operation details */
0938     if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
0939         cc_req.cpp.is_cpp = true;
0940         cc_req.cpp.alg = ctx_p->cpp.alg;
0941         cc_req.cpp.slot = ctx_p->cpp.slot;
0942     }
0943 
0944     /* Setup request context */
0945     req_ctx->gen_ctx.op_type = direction;
0946 
0947     /* STAT_PHASE_1: Map buffers */
0948 
0949     rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
0950                       req_ctx->iv, src, dst, flags);
0951     if (rc) {
0952         dev_err(dev, "map_request() failed\n");
0953         goto exit_process;
0954     }
0955 
0956     /* STAT_PHASE_2: Create sequence */
0957 
0958     /* Setup state (IV)  */
0959     cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
0960     /* Setup MLLI line, if needed */
0961     cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
0962     /* Setup key */
0963     cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
0964     /* Setup state (IV and XEX key)  */
0965     cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
0966     /* Data processing */
0967     cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
0968     /* Read next IV */
0969     cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
0970 
0971     /* STAT_PHASE_3: Lock HW and push sequence */
0972 
0973     rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
0974                  &req->base);
0975     if (rc != -EINPROGRESS && rc != -EBUSY) {
0976         /* Failed to send the request or request completed
0977          * synchronously
0978          */
0979         cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
0980     }
0981 
0982 exit_process:
0983     if (rc != -EINPROGRESS && rc != -EBUSY) {
0984         kfree_sensitive(req_ctx->iv);
0985     }
0986 
0987     return rc;
0988 }
0989 
0990 static int cc_cipher_encrypt(struct skcipher_request *req)
0991 {
0992     struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
0993 
0994     memset(req_ctx, 0, sizeof(*req_ctx));
0995 
0996     return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
0997 }
0998 
0999 static int cc_cipher_decrypt(struct skcipher_request *req)
1000 {
1001     struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
1002 
1003     memset(req_ctx, 0, sizeof(*req_ctx));
1004 
1005     return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
1006 }
1007 
1008 /* Block cipher alg */
1009 static const struct cc_alg_template skcipher_algs[] = {
1010     {
1011         .name = "xts(paes)",
1012         .driver_name = "xts-paes-ccree",
1013         .blocksize = 1,
1014         .template_skcipher = {
1015             .setkey = cc_cipher_sethkey,
1016             .encrypt = cc_cipher_encrypt,
1017             .decrypt = cc_cipher_decrypt,
1018             .min_keysize = CC_HW_KEY_SIZE,
1019             .max_keysize = CC_HW_KEY_SIZE,
1020             .ivsize = AES_BLOCK_SIZE,
1021             },
1022         .cipher_mode = DRV_CIPHER_XTS,
1023         .flow_mode = S_DIN_to_AES,
1024         .min_hw_rev = CC_HW_REV_630,
1025         .std_body = CC_STD_NIST,
1026         .sec_func = true,
1027     },
1028     {
1029         .name = "essiv(cbc(paes),sha256)",
1030         .driver_name = "essiv-paes-ccree",
1031         .blocksize = AES_BLOCK_SIZE,
1032         .template_skcipher = {
1033             .setkey = cc_cipher_sethkey,
1034             .encrypt = cc_cipher_encrypt,
1035             .decrypt = cc_cipher_decrypt,
1036             .min_keysize = CC_HW_KEY_SIZE,
1037             .max_keysize = CC_HW_KEY_SIZE,
1038             .ivsize = AES_BLOCK_SIZE,
1039             },
1040         .cipher_mode = DRV_CIPHER_ESSIV,
1041         .flow_mode = S_DIN_to_AES,
1042         .min_hw_rev = CC_HW_REV_712,
1043         .std_body = CC_STD_NIST,
1044         .sec_func = true,
1045     },
1046     {
1047         .name = "ecb(paes)",
1048         .driver_name = "ecb-paes-ccree",
1049         .blocksize = AES_BLOCK_SIZE,
1050         .template_skcipher = {
1051             .setkey = cc_cipher_sethkey,
1052             .encrypt = cc_cipher_encrypt,
1053             .decrypt = cc_cipher_decrypt,
1054             .min_keysize = CC_HW_KEY_SIZE,
1055             .max_keysize = CC_HW_KEY_SIZE,
1056             .ivsize = 0,
1057             },
1058         .cipher_mode = DRV_CIPHER_ECB,
1059         .flow_mode = S_DIN_to_AES,
1060         .min_hw_rev = CC_HW_REV_712,
1061         .std_body = CC_STD_NIST,
1062         .sec_func = true,
1063     },
1064     {
1065         .name = "cbc(paes)",
1066         .driver_name = "cbc-paes-ccree",
1067         .blocksize = AES_BLOCK_SIZE,
1068         .template_skcipher = {
1069             .setkey = cc_cipher_sethkey,
1070             .encrypt = cc_cipher_encrypt,
1071             .decrypt = cc_cipher_decrypt,
1072             .min_keysize = CC_HW_KEY_SIZE,
1073             .max_keysize = CC_HW_KEY_SIZE,
1074             .ivsize = AES_BLOCK_SIZE,
1075         },
1076         .cipher_mode = DRV_CIPHER_CBC,
1077         .flow_mode = S_DIN_to_AES,
1078         .min_hw_rev = CC_HW_REV_712,
1079         .std_body = CC_STD_NIST,
1080         .sec_func = true,
1081     },
1082     {
1083         .name = "ofb(paes)",
1084         .driver_name = "ofb-paes-ccree",
1085         .blocksize = AES_BLOCK_SIZE,
1086         .template_skcipher = {
1087             .setkey = cc_cipher_sethkey,
1088             .encrypt = cc_cipher_encrypt,
1089             .decrypt = cc_cipher_decrypt,
1090             .min_keysize = CC_HW_KEY_SIZE,
1091             .max_keysize = CC_HW_KEY_SIZE,
1092             .ivsize = AES_BLOCK_SIZE,
1093             },
1094         .cipher_mode = DRV_CIPHER_OFB,
1095         .flow_mode = S_DIN_to_AES,
1096         .min_hw_rev = CC_HW_REV_712,
1097         .std_body = CC_STD_NIST,
1098         .sec_func = true,
1099     },
1100     {
1101         .name = "cts(cbc(paes))",
1102         .driver_name = "cts-cbc-paes-ccree",
1103         .blocksize = AES_BLOCK_SIZE,
1104         .template_skcipher = {
1105             .setkey = cc_cipher_sethkey,
1106             .encrypt = cc_cipher_encrypt,
1107             .decrypt = cc_cipher_decrypt,
1108             .min_keysize = CC_HW_KEY_SIZE,
1109             .max_keysize = CC_HW_KEY_SIZE,
1110             .ivsize = AES_BLOCK_SIZE,
1111             },
1112         .cipher_mode = DRV_CIPHER_CBC_CTS,
1113         .flow_mode = S_DIN_to_AES,
1114         .min_hw_rev = CC_HW_REV_712,
1115         .std_body = CC_STD_NIST,
1116         .sec_func = true,
1117     },
1118     {
1119         .name = "ctr(paes)",
1120         .driver_name = "ctr-paes-ccree",
1121         .blocksize = 1,
1122         .template_skcipher = {
1123             .setkey = cc_cipher_sethkey,
1124             .encrypt = cc_cipher_encrypt,
1125             .decrypt = cc_cipher_decrypt,
1126             .min_keysize = CC_HW_KEY_SIZE,
1127             .max_keysize = CC_HW_KEY_SIZE,
1128             .ivsize = AES_BLOCK_SIZE,
1129             },
1130         .cipher_mode = DRV_CIPHER_CTR,
1131         .flow_mode = S_DIN_to_AES,
1132         .min_hw_rev = CC_HW_REV_712,
1133         .std_body = CC_STD_NIST,
1134         .sec_func = true,
1135     },
1136     {
1137         /* See https://www.mail-archive.com/linux-crypto@vger.kernel.org/msg40576.html
1138          * for the reason why this differs from the generic
1139          * implementation.
1140          */
1141         .name = "xts(aes)",
1142         .driver_name = "xts-aes-ccree",
1143         .blocksize = 1,
1144         .template_skcipher = {
1145             .setkey = cc_cipher_setkey,
1146             .encrypt = cc_cipher_encrypt,
1147             .decrypt = cc_cipher_decrypt,
1148             .min_keysize = AES_MIN_KEY_SIZE * 2,
1149             .max_keysize = AES_MAX_KEY_SIZE * 2,
1150             .ivsize = AES_BLOCK_SIZE,
1151             },
1152         .cipher_mode = DRV_CIPHER_XTS,
1153         .flow_mode = S_DIN_to_AES,
1154         .min_hw_rev = CC_HW_REV_630,
1155         .std_body = CC_STD_NIST,
1156     },
1157     {
1158         .name = "essiv(cbc(aes),sha256)",
1159         .driver_name = "essiv-aes-ccree",
1160         .blocksize = AES_BLOCK_SIZE,
1161         .template_skcipher = {
1162             .setkey = cc_cipher_setkey,
1163             .encrypt = cc_cipher_encrypt,
1164             .decrypt = cc_cipher_decrypt,
1165             .min_keysize = AES_MIN_KEY_SIZE,
1166             .max_keysize = AES_MAX_KEY_SIZE,
1167             .ivsize = AES_BLOCK_SIZE,
1168             },
1169         .cipher_mode = DRV_CIPHER_ESSIV,
1170         .flow_mode = S_DIN_to_AES,
1171         .min_hw_rev = CC_HW_REV_712,
1172         .std_body = CC_STD_NIST,
1173     },
1174     {
1175         .name = "ecb(aes)",
1176         .driver_name = "ecb-aes-ccree",
1177         .blocksize = AES_BLOCK_SIZE,
1178         .template_skcipher = {
1179             .setkey = cc_cipher_setkey,
1180             .encrypt = cc_cipher_encrypt,
1181             .decrypt = cc_cipher_decrypt,
1182             .min_keysize = AES_MIN_KEY_SIZE,
1183             .max_keysize = AES_MAX_KEY_SIZE,
1184             .ivsize = 0,
1185             },
1186         .cipher_mode = DRV_CIPHER_ECB,
1187         .flow_mode = S_DIN_to_AES,
1188         .min_hw_rev = CC_HW_REV_630,
1189         .std_body = CC_STD_NIST,
1190     },
1191     {
1192         .name = "cbc(aes)",
1193         .driver_name = "cbc-aes-ccree",
1194         .blocksize = AES_BLOCK_SIZE,
1195         .template_skcipher = {
1196             .setkey = cc_cipher_setkey,
1197             .encrypt = cc_cipher_encrypt,
1198             .decrypt = cc_cipher_decrypt,
1199             .min_keysize = AES_MIN_KEY_SIZE,
1200             .max_keysize = AES_MAX_KEY_SIZE,
1201             .ivsize = AES_BLOCK_SIZE,
1202         },
1203         .cipher_mode = DRV_CIPHER_CBC,
1204         .flow_mode = S_DIN_to_AES,
1205         .min_hw_rev = CC_HW_REV_630,
1206         .std_body = CC_STD_NIST,
1207     },
1208     {
1209         .name = "ofb(aes)",
1210         .driver_name = "ofb-aes-ccree",
1211         .blocksize = 1,
1212         .template_skcipher = {
1213             .setkey = cc_cipher_setkey,
1214             .encrypt = cc_cipher_encrypt,
1215             .decrypt = cc_cipher_decrypt,
1216             .min_keysize = AES_MIN_KEY_SIZE,
1217             .max_keysize = AES_MAX_KEY_SIZE,
1218             .ivsize = AES_BLOCK_SIZE,
1219             },
1220         .cipher_mode = DRV_CIPHER_OFB,
1221         .flow_mode = S_DIN_to_AES,
1222         .min_hw_rev = CC_HW_REV_630,
1223         .std_body = CC_STD_NIST,
1224     },
1225     {
1226         .name = "cts(cbc(aes))",
1227         .driver_name = "cts-cbc-aes-ccree",
1228         .blocksize = AES_BLOCK_SIZE,
1229         .template_skcipher = {
1230             .setkey = cc_cipher_setkey,
1231             .encrypt = cc_cipher_encrypt,
1232             .decrypt = cc_cipher_decrypt,
1233             .min_keysize = AES_MIN_KEY_SIZE,
1234             .max_keysize = AES_MAX_KEY_SIZE,
1235             .ivsize = AES_BLOCK_SIZE,
1236             },
1237         .cipher_mode = DRV_CIPHER_CBC_CTS,
1238         .flow_mode = S_DIN_to_AES,
1239         .min_hw_rev = CC_HW_REV_630,
1240         .std_body = CC_STD_NIST,
1241     },
1242     {
1243         .name = "ctr(aes)",
1244         .driver_name = "ctr-aes-ccree",
1245         .blocksize = 1,
1246         .template_skcipher = {
1247             .setkey = cc_cipher_setkey,
1248             .encrypt = cc_cipher_encrypt,
1249             .decrypt = cc_cipher_decrypt,
1250             .min_keysize = AES_MIN_KEY_SIZE,
1251             .max_keysize = AES_MAX_KEY_SIZE,
1252             .ivsize = AES_BLOCK_SIZE,
1253             },
1254         .cipher_mode = DRV_CIPHER_CTR,
1255         .flow_mode = S_DIN_to_AES,
1256         .min_hw_rev = CC_HW_REV_630,
1257         .std_body = CC_STD_NIST,
1258     },
1259     {
1260         .name = "cbc(des3_ede)",
1261         .driver_name = "cbc-3des-ccree",
1262         .blocksize = DES3_EDE_BLOCK_SIZE,
1263         .template_skcipher = {
1264             .setkey = cc_cipher_setkey,
1265             .encrypt = cc_cipher_encrypt,
1266             .decrypt = cc_cipher_decrypt,
1267             .min_keysize = DES3_EDE_KEY_SIZE,
1268             .max_keysize = DES3_EDE_KEY_SIZE,
1269             .ivsize = DES3_EDE_BLOCK_SIZE,
1270             },
1271         .cipher_mode = DRV_CIPHER_CBC,
1272         .flow_mode = S_DIN_to_DES,
1273         .min_hw_rev = CC_HW_REV_630,
1274         .std_body = CC_STD_NIST,
1275     },
1276     {
1277         .name = "ecb(des3_ede)",
1278         .driver_name = "ecb-3des-ccree",
1279         .blocksize = DES3_EDE_BLOCK_SIZE,
1280         .template_skcipher = {
1281             .setkey = cc_cipher_setkey,
1282             .encrypt = cc_cipher_encrypt,
1283             .decrypt = cc_cipher_decrypt,
1284             .min_keysize = DES3_EDE_KEY_SIZE,
1285             .max_keysize = DES3_EDE_KEY_SIZE,
1286             .ivsize = 0,
1287             },
1288         .cipher_mode = DRV_CIPHER_ECB,
1289         .flow_mode = S_DIN_to_DES,
1290         .min_hw_rev = CC_HW_REV_630,
1291         .std_body = CC_STD_NIST,
1292     },
1293     {
1294         .name = "cbc(des)",
1295         .driver_name = "cbc-des-ccree",
1296         .blocksize = DES_BLOCK_SIZE,
1297         .template_skcipher = {
1298             .setkey = cc_cipher_setkey,
1299             .encrypt = cc_cipher_encrypt,
1300             .decrypt = cc_cipher_decrypt,
1301             .min_keysize = DES_KEY_SIZE,
1302             .max_keysize = DES_KEY_SIZE,
1303             .ivsize = DES_BLOCK_SIZE,
1304             },
1305         .cipher_mode = DRV_CIPHER_CBC,
1306         .flow_mode = S_DIN_to_DES,
1307         .min_hw_rev = CC_HW_REV_630,
1308         .std_body = CC_STD_NIST,
1309     },
1310     {
1311         .name = "ecb(des)",
1312         .driver_name = "ecb-des-ccree",
1313         .blocksize = DES_BLOCK_SIZE,
1314         .template_skcipher = {
1315             .setkey = cc_cipher_setkey,
1316             .encrypt = cc_cipher_encrypt,
1317             .decrypt = cc_cipher_decrypt,
1318             .min_keysize = DES_KEY_SIZE,
1319             .max_keysize = DES_KEY_SIZE,
1320             .ivsize = 0,
1321             },
1322         .cipher_mode = DRV_CIPHER_ECB,
1323         .flow_mode = S_DIN_to_DES,
1324         .min_hw_rev = CC_HW_REV_630,
1325         .std_body = CC_STD_NIST,
1326     },
1327     {
1328         .name = "cbc(sm4)",
1329         .driver_name = "cbc-sm4-ccree",
1330         .blocksize = SM4_BLOCK_SIZE,
1331         .template_skcipher = {
1332             .setkey = cc_cipher_setkey,
1333             .encrypt = cc_cipher_encrypt,
1334             .decrypt = cc_cipher_decrypt,
1335             .min_keysize = SM4_KEY_SIZE,
1336             .max_keysize = SM4_KEY_SIZE,
1337             .ivsize = SM4_BLOCK_SIZE,
1338             },
1339         .cipher_mode = DRV_CIPHER_CBC,
1340         .flow_mode = S_DIN_to_SM4,
1341         .min_hw_rev = CC_HW_REV_713,
1342         .std_body = CC_STD_OSCCA,
1343     },
1344     {
1345         .name = "ecb(sm4)",
1346         .driver_name = "ecb-sm4-ccree",
1347         .blocksize = SM4_BLOCK_SIZE,
1348         .template_skcipher = {
1349             .setkey = cc_cipher_setkey,
1350             .encrypt = cc_cipher_encrypt,
1351             .decrypt = cc_cipher_decrypt,
1352             .min_keysize = SM4_KEY_SIZE,
1353             .max_keysize = SM4_KEY_SIZE,
1354             .ivsize = 0,
1355             },
1356         .cipher_mode = DRV_CIPHER_ECB,
1357         .flow_mode = S_DIN_to_SM4,
1358         .min_hw_rev = CC_HW_REV_713,
1359         .std_body = CC_STD_OSCCA,
1360     },
1361     {
1362         .name = "ctr(sm4)",
1363         .driver_name = "ctr-sm4-ccree",
1364         .blocksize = 1,
1365         .template_skcipher = {
1366             .setkey = cc_cipher_setkey,
1367             .encrypt = cc_cipher_encrypt,
1368             .decrypt = cc_cipher_decrypt,
1369             .min_keysize = SM4_KEY_SIZE,
1370             .max_keysize = SM4_KEY_SIZE,
1371             .ivsize = SM4_BLOCK_SIZE,
1372             },
1373         .cipher_mode = DRV_CIPHER_CTR,
1374         .flow_mode = S_DIN_to_SM4,
1375         .min_hw_rev = CC_HW_REV_713,
1376         .std_body = CC_STD_OSCCA,
1377     },
1378     {
1379         .name = "cbc(psm4)",
1380         .driver_name = "cbc-psm4-ccree",
1381         .blocksize = SM4_BLOCK_SIZE,
1382         .template_skcipher = {
1383             .setkey = cc_cipher_sethkey,
1384             .encrypt = cc_cipher_encrypt,
1385             .decrypt = cc_cipher_decrypt,
1386             .min_keysize = CC_HW_KEY_SIZE,
1387             .max_keysize = CC_HW_KEY_SIZE,
1388             .ivsize = SM4_BLOCK_SIZE,
1389             },
1390         .cipher_mode = DRV_CIPHER_CBC,
1391         .flow_mode = S_DIN_to_SM4,
1392         .min_hw_rev = CC_HW_REV_713,
1393         .std_body = CC_STD_OSCCA,
1394         .sec_func = true,
1395     },
1396     {
1397         .name = "ctr(psm4)",
1398         .driver_name = "ctr-psm4-ccree",
1399         .blocksize = SM4_BLOCK_SIZE,
1400         .template_skcipher = {
1401             .setkey = cc_cipher_sethkey,
1402             .encrypt = cc_cipher_encrypt,
1403             .decrypt = cc_cipher_decrypt,
1404             .min_keysize = CC_HW_KEY_SIZE,
1405             .max_keysize = CC_HW_KEY_SIZE,
1406             .ivsize = SM4_BLOCK_SIZE,
1407             },
1408         .cipher_mode = DRV_CIPHER_CTR,
1409         .flow_mode = S_DIN_to_SM4,
1410         .min_hw_rev = CC_HW_REV_713,
1411         .std_body = CC_STD_OSCCA,
1412         .sec_func = true,
1413     },
1414 };
1415 
1416 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1417                        struct device *dev)
1418 {
1419     struct cc_crypto_alg *t_alg;
1420     struct skcipher_alg *alg;
1421 
1422     t_alg = devm_kzalloc(dev, sizeof(*t_alg), GFP_KERNEL);
1423     if (!t_alg)
1424         return ERR_PTR(-ENOMEM);
1425 
1426     alg = &t_alg->skcipher_alg;
1427 
1428     memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1429 
1430     snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1431     snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1432          tmpl->driver_name);
1433     alg->base.cra_module = THIS_MODULE;
1434     alg->base.cra_priority = CC_CRA_PRIO;
1435     alg->base.cra_blocksize = tmpl->blocksize;
1436     alg->base.cra_alignmask = 0;
1437     alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1438 
1439     alg->base.cra_init = cc_cipher_init;
1440     alg->base.cra_exit = cc_cipher_exit;
1441     alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1442 
1443     t_alg->cipher_mode = tmpl->cipher_mode;
1444     t_alg->flow_mode = tmpl->flow_mode;
1445 
1446     return t_alg;
1447 }
1448 
1449 int cc_cipher_free(struct cc_drvdata *drvdata)
1450 {
1451     struct cc_crypto_alg *t_alg, *n;
1452 
1453     /* Remove registered algs */
1454     list_for_each_entry_safe(t_alg, n, &drvdata->alg_list, entry) {
1455         crypto_unregister_skcipher(&t_alg->skcipher_alg);
1456         list_del(&t_alg->entry);
1457     }
1458     return 0;
1459 }
1460 
1461 int cc_cipher_alloc(struct cc_drvdata *drvdata)
1462 {
1463     struct cc_crypto_alg *t_alg;
1464     struct device *dev = drvdata_to_dev(drvdata);
1465     int rc = -ENOMEM;
1466     int alg;
1467 
1468     INIT_LIST_HEAD(&drvdata->alg_list);
1469 
1470     /* Linux crypto */
1471     dev_dbg(dev, "Number of algorithms = %zu\n",
1472         ARRAY_SIZE(skcipher_algs));
1473     for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1474         if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1475             !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1476             (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
1477             continue;
1478 
1479         dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1480         t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1481         if (IS_ERR(t_alg)) {
1482             rc = PTR_ERR(t_alg);
1483             dev_err(dev, "%s alg allocation failed\n",
1484                 skcipher_algs[alg].driver_name);
1485             goto fail0;
1486         }
1487         t_alg->drvdata = drvdata;
1488 
1489         dev_dbg(dev, "registering %s\n",
1490             skcipher_algs[alg].driver_name);
1491         rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1492         dev_dbg(dev, "%s alg registration rc = %x\n",
1493             t_alg->skcipher_alg.base.cra_driver_name, rc);
1494         if (rc) {
1495             dev_err(dev, "%s alg registration failed\n",
1496                 t_alg->skcipher_alg.base.cra_driver_name);
1497             goto fail0;
1498         }
1499 
1500         list_add_tail(&t_alg->entry, &drvdata->alg_list);
1501         dev_dbg(dev, "Registered %s\n",
1502             t_alg->skcipher_alg.base.cra_driver_name);
1503     }
1504     return 0;
1505 
1506 fail0:
1507     cc_cipher_free(drvdata);
1508     return rc;
1509 }