Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0
0002 /*
0003  * Copyright (C) 2017 Marvell
0004  *
0005  * Antoine Tenart <antoine.tenart@free-electrons.com>
0006  */
0007 
0008 #include <asm/unaligned.h>
0009 #include <linux/device.h>
0010 #include <linux/dma-mapping.h>
0011 #include <linux/dmapool.h>
0012 #include <crypto/aead.h>
0013 #include <crypto/aes.h>
0014 #include <crypto/authenc.h>
0015 #include <crypto/chacha.h>
0016 #include <crypto/ctr.h>
0017 #include <crypto/internal/des.h>
0018 #include <crypto/gcm.h>
0019 #include <crypto/ghash.h>
0020 #include <crypto/poly1305.h>
0021 #include <crypto/sha1.h>
0022 #include <crypto/sha2.h>
0023 #include <crypto/sm3.h>
0024 #include <crypto/sm4.h>
0025 #include <crypto/xts.h>
0026 #include <crypto/skcipher.h>
0027 #include <crypto/internal/aead.h>
0028 #include <crypto/internal/skcipher.h>
0029 
0030 #include "safexcel.h"
0031 
0032 enum safexcel_cipher_direction {
0033     SAFEXCEL_ENCRYPT,
0034     SAFEXCEL_DECRYPT,
0035 };
0036 
0037 enum safexcel_cipher_alg {
0038     SAFEXCEL_DES,
0039     SAFEXCEL_3DES,
0040     SAFEXCEL_AES,
0041     SAFEXCEL_CHACHA20,
0042     SAFEXCEL_SM4,
0043 };
0044 
0045 struct safexcel_cipher_ctx {
0046     struct safexcel_context base;
0047     struct safexcel_crypto_priv *priv;
0048 
0049     u32 mode;
0050     enum safexcel_cipher_alg alg;
0051     u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
0052     u8 xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
0053     u8 aadskip;
0054     u8 blocksz;
0055     u32 ivmask;
0056     u32 ctrinit;
0057 
0058     __le32 key[16];
0059     u32 nonce;
0060     unsigned int key_len, xts;
0061 
0062     /* All the below is AEAD specific */
0063     u32 hash_alg;
0064     u32 state_sz;
0065 
0066     struct crypto_cipher *hkaes;
0067     struct crypto_aead *fback;
0068 };
0069 
0070 struct safexcel_cipher_req {
0071     enum safexcel_cipher_direction direction;
0072     /* Number of result descriptors associated to the request */
0073     unsigned int rdescs;
0074     bool needs_inv;
0075     int  nr_src, nr_dst;
0076 };
0077 
0078 static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
0079                 struct safexcel_command_desc *cdesc)
0080 {
0081     if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
0082         cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
0083         /* 32 bit nonce */
0084         cdesc->control_data.token[0] = ctx->nonce;
0085         /* 64 bit IV part */
0086         memcpy(&cdesc->control_data.token[1], iv, 8);
0087         /* 32 bit counter, start at 0 or 1 (big endian!) */
0088         cdesc->control_data.token[3] =
0089             (__force u32)cpu_to_be32(ctx->ctrinit);
0090         return 4;
0091     }
0092     if (ctx->alg == SAFEXCEL_CHACHA20) {
0093         cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
0094         /* 96 bit nonce part */
0095         memcpy(&cdesc->control_data.token[0], &iv[4], 12);
0096         /* 32 bit counter */
0097         cdesc->control_data.token[3] = *(u32 *)iv;
0098         return 4;
0099     }
0100 
0101     cdesc->control_data.options |= ctx->ivmask;
0102     memcpy(cdesc->control_data.token, iv, ctx->blocksz);
0103     return ctx->blocksz / sizeof(u32);
0104 }
0105 
0106 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
0107                     struct safexcel_command_desc *cdesc,
0108                     struct safexcel_token *atoken,
0109                     u32 length)
0110 {
0111     struct safexcel_token *token;
0112     int ivlen;
0113 
0114     ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
0115     if (ivlen == 4) {
0116         /* No space in cdesc, instruction moves to atoken */
0117         cdesc->additional_cdata_size = 1;
0118         token = atoken;
0119     } else {
0120         /* Everything fits in cdesc */
0121         token = (struct safexcel_token *)(cdesc->control_data.token + 2);
0122         /* Need to pad with NOP */
0123         eip197_noop_token(&token[1]);
0124     }
0125 
0126     token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
0127     token->packet_length = length;
0128     token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
0129               EIP197_TOKEN_STAT_LAST_HASH;
0130     token->instructions = EIP197_TOKEN_INS_LAST |
0131                   EIP197_TOKEN_INS_TYPE_CRYPTO |
0132                   EIP197_TOKEN_INS_TYPE_OUTPUT;
0133 }
0134 
0135 static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
0136                  struct safexcel_command_desc *cdesc)
0137 {
0138     if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
0139         ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
0140         /* 32 bit nonce */
0141         cdesc->control_data.token[0] = ctx->nonce;
0142         /* 64 bit IV part */
0143         memcpy(&cdesc->control_data.token[1], iv, 8);
0144         /* 32 bit counter, start at 0 or 1 (big endian!) */
0145         cdesc->control_data.token[3] =
0146             (__force u32)cpu_to_be32(ctx->ctrinit);
0147         return;
0148     }
0149     if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
0150         /* 96 bit IV part */
0151         memcpy(&cdesc->control_data.token[0], iv, 12);
0152         /* 32 bit counter, start at 0 or 1 (big endian!) */
0153         cdesc->control_data.token[3] =
0154             (__force u32)cpu_to_be32(ctx->ctrinit);
0155         return;
0156     }
0157     /* CBC */
0158     memcpy(cdesc->control_data.token, iv, ctx->blocksz);
0159 }
0160 
0161 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
0162                 struct safexcel_command_desc *cdesc,
0163                 struct safexcel_token *atoken,
0164                 enum safexcel_cipher_direction direction,
0165                 u32 cryptlen, u32 assoclen, u32 digestsize)
0166 {
0167     struct safexcel_token *aadref;
0168     int atoksize = 2; /* Start with minimum size */
0169     int assocadj = assoclen - ctx->aadskip, aadalign;
0170 
0171     /* Always 4 dwords of embedded IV  for AEAD modes */
0172     cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
0173 
0174     if (direction == SAFEXCEL_DECRYPT)
0175         cryptlen -= digestsize;
0176 
0177     if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
0178         /* Construct IV block B0 for the CBC-MAC */
0179         u8 *final_iv = (u8 *)cdesc->control_data.token;
0180         u8 *cbcmaciv = (u8 *)&atoken[1];
0181         __le32 *aadlen = (__le32 *)&atoken[5];
0182 
0183         if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
0184             /* Length + nonce */
0185             cdesc->control_data.token[0] = ctx->nonce;
0186             /* Fixup flags byte */
0187             *(__le32 *)cbcmaciv =
0188                 cpu_to_le32(ctx->nonce |
0189                         ((assocadj > 0) << 6) |
0190                         ((digestsize - 2) << 2));
0191             /* 64 bit IV part */
0192             memcpy(&cdesc->control_data.token[1], iv, 8);
0193             memcpy(cbcmaciv + 4, iv, 8);
0194             /* Start counter at 0 */
0195             cdesc->control_data.token[3] = 0;
0196             /* Message length */
0197             *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
0198         } else {
0199             /* Variable length IV part */
0200             memcpy(final_iv, iv, 15 - iv[0]);
0201             memcpy(cbcmaciv, iv, 15 - iv[0]);
0202             /* Start variable length counter at 0 */
0203             memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
0204             memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
0205             /* fixup flags byte */
0206             cbcmaciv[0] |= ((assocadj > 0) << 6) |
0207                        ((digestsize - 2) << 2);
0208             /* insert lower 2 bytes of message length */
0209             cbcmaciv[14] = cryptlen >> 8;
0210             cbcmaciv[15] = cryptlen & 255;
0211         }
0212 
0213         atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
0214         atoken->packet_length = AES_BLOCK_SIZE +
0215                     ((assocadj > 0) << 1);
0216         atoken->stat = 0;
0217         atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
0218                        EIP197_TOKEN_INS_TYPE_HASH;
0219 
0220         if (likely(assocadj)) {
0221             *aadlen = cpu_to_le32((assocadj >> 8) |
0222                           (assocadj & 255) << 8);
0223             atoken += 6;
0224             atoksize += 7;
0225         } else {
0226             atoken += 5;
0227             atoksize += 6;
0228         }
0229 
0230         /* Process AAD data */
0231         aadref = atoken;
0232         atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
0233         atoken->packet_length = assocadj;
0234         atoken->stat = 0;
0235         atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
0236         atoken++;
0237 
0238         /* For CCM only, align AAD data towards hash engine */
0239         atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
0240         aadalign = (assocadj + 2) & 15;
0241         atoken->packet_length = assocadj && aadalign ?
0242                         16 - aadalign :
0243                         0;
0244         if (likely(cryptlen)) {
0245             atoken->stat = 0;
0246             atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
0247         } else {
0248             atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
0249             atoken->instructions = EIP197_TOKEN_INS_LAST |
0250                            EIP197_TOKEN_INS_TYPE_HASH;
0251         }
0252     } else {
0253         safexcel_aead_iv(ctx, iv, cdesc);
0254 
0255         /* Process AAD data */
0256         aadref = atoken;
0257         atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
0258         atoken->packet_length = assocadj;
0259         atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
0260         atoken->instructions = EIP197_TOKEN_INS_LAST |
0261                        EIP197_TOKEN_INS_TYPE_HASH;
0262     }
0263     atoken++;
0264 
0265     if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
0266         /* For ESP mode (and not GMAC), skip over the IV */
0267         atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
0268         atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
0269         atoken->stat = 0;
0270         atoken->instructions = 0;
0271         atoken++;
0272         atoksize++;
0273     } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
0274                 direction == SAFEXCEL_DECRYPT)) {
0275         /* Poly-chacha decryption needs a dummy NOP here ... */
0276         atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
0277         atoken->packet_length = 16; /* According to Op Manual */
0278         atoken->stat = 0;
0279         atoken->instructions = 0;
0280         atoken++;
0281         atoksize++;
0282     }
0283 
0284     if  (ctx->xcm) {
0285         /* For GCM and CCM, obtain enc(Y0) */
0286         atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
0287         atoken->packet_length = 0;
0288         atoken->stat = 0;
0289         atoken->instructions = AES_BLOCK_SIZE;
0290         atoken++;
0291 
0292         atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
0293         atoken->packet_length = AES_BLOCK_SIZE;
0294         atoken->stat = 0;
0295         atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
0296                        EIP197_TOKEN_INS_TYPE_CRYPTO;
0297         atoken++;
0298         atoksize += 2;
0299     }
0300 
0301     if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
0302         /* Fixup stat field for AAD direction instruction */
0303         aadref->stat = 0;
0304 
0305         /* Process crypto data */
0306         atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
0307         atoken->packet_length = cryptlen;
0308 
0309         if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
0310             /* Fixup instruction field for AAD dir instruction */
0311             aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
0312 
0313             /* Do not send to crypt engine in case of GMAC */
0314             atoken->instructions = EIP197_TOKEN_INS_LAST |
0315                            EIP197_TOKEN_INS_TYPE_HASH |
0316                            EIP197_TOKEN_INS_TYPE_OUTPUT;
0317         } else {
0318             atoken->instructions = EIP197_TOKEN_INS_LAST |
0319                            EIP197_TOKEN_INS_TYPE_CRYPTO |
0320                            EIP197_TOKEN_INS_TYPE_HASH |
0321                            EIP197_TOKEN_INS_TYPE_OUTPUT;
0322         }
0323 
0324         cryptlen &= 15;
0325         if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
0326             atoken->stat = 0;
0327             /* For CCM only, pad crypto data to the hash engine */
0328             atoken++;
0329             atoksize++;
0330             atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
0331             atoken->packet_length = 16 - cryptlen;
0332             atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
0333             atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
0334         } else {
0335             atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
0336         }
0337         atoken++;
0338         atoksize++;
0339     }
0340 
0341     if (direction == SAFEXCEL_ENCRYPT) {
0342         /* Append ICV */
0343         atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
0344         atoken->packet_length = digestsize;
0345         atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
0346                    EIP197_TOKEN_STAT_LAST_PACKET;
0347         atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
0348                        EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
0349     } else {
0350         /* Extract ICV */
0351         atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
0352         atoken->packet_length = digestsize;
0353         atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
0354                    EIP197_TOKEN_STAT_LAST_PACKET;
0355         atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
0356         atoken++;
0357         atoksize++;
0358 
0359         /* Verify ICV */
0360         atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
0361         atoken->packet_length = digestsize |
0362                     EIP197_TOKEN_HASH_RESULT_VERIFY;
0363         atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
0364                    EIP197_TOKEN_STAT_LAST_PACKET;
0365         atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
0366     }
0367 
0368     /* Fixup length of the token in the command descriptor */
0369     cdesc->additional_cdata_size = atoksize;
0370 }
0371 
0372 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
0373                     const u8 *key, unsigned int len)
0374 {
0375     struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
0376     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
0377     struct safexcel_crypto_priv *priv = ctx->base.priv;
0378     struct crypto_aes_ctx aes;
0379     int ret, i;
0380 
0381     ret = aes_expandkey(&aes, key, len);
0382     if (ret)
0383         return ret;
0384 
0385     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
0386         for (i = 0; i < len / sizeof(u32); i++) {
0387             if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
0388                 ctx->base.needs_inv = true;
0389                 break;
0390             }
0391         }
0392     }
0393 
0394     for (i = 0; i < len / sizeof(u32); i++)
0395         ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
0396 
0397     ctx->key_len = len;
0398 
0399     memzero_explicit(&aes, sizeof(aes));
0400     return 0;
0401 }
0402 
0403 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
0404                 unsigned int len)
0405 {
0406     struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
0407     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
0408     struct safexcel_crypto_priv *priv = ctx->base.priv;
0409     struct crypto_authenc_keys keys;
0410     struct crypto_aes_ctx aes;
0411     int err = -EINVAL, i;
0412     const char *alg;
0413 
0414     if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
0415         goto badkey;
0416 
0417     if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
0418         /* Must have at least space for the nonce here */
0419         if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
0420             goto badkey;
0421         /* last 4 bytes of key are the nonce! */
0422         ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
0423                       CTR_RFC3686_NONCE_SIZE);
0424         /* exclude the nonce here */
0425         keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
0426     }
0427 
0428     /* Encryption key */
0429     switch (ctx->alg) {
0430     case SAFEXCEL_DES:
0431         err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
0432         if (unlikely(err))
0433             goto badkey;
0434         break;
0435     case SAFEXCEL_3DES:
0436         err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
0437         if (unlikely(err))
0438             goto badkey;
0439         break;
0440     case SAFEXCEL_AES:
0441         err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
0442         if (unlikely(err))
0443             goto badkey;
0444         break;
0445     case SAFEXCEL_SM4:
0446         if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
0447             goto badkey;
0448         break;
0449     default:
0450         dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
0451         goto badkey;
0452     }
0453 
0454     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
0455         for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
0456             if (le32_to_cpu(ctx->key[i]) !=
0457                 ((u32 *)keys.enckey)[i]) {
0458                 ctx->base.needs_inv = true;
0459                 break;
0460             }
0461         }
0462     }
0463 
0464     /* Auth key */
0465     switch (ctx->hash_alg) {
0466     case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
0467         alg = "safexcel-sha1";
0468         break;
0469     case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
0470         alg = "safexcel-sha224";
0471         break;
0472     case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
0473         alg = "safexcel-sha256";
0474         break;
0475     case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
0476         alg = "safexcel-sha384";
0477         break;
0478     case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
0479         alg = "safexcel-sha512";
0480         break;
0481     case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
0482         alg = "safexcel-sm3";
0483         break;
0484     default:
0485         dev_err(priv->dev, "aead: unsupported hash algorithm\n");
0486         goto badkey;
0487     }
0488 
0489     if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
0490                  alg, ctx->state_sz))
0491         goto badkey;
0492 
0493     /* Now copy the keys into the context */
0494     for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
0495         ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
0496     ctx->key_len = keys.enckeylen;
0497 
0498     memzero_explicit(&keys, sizeof(keys));
0499     return 0;
0500 
0501 badkey:
0502     memzero_explicit(&keys, sizeof(keys));
0503     return err;
0504 }
0505 
0506 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
0507                     struct crypto_async_request *async,
0508                     struct safexcel_cipher_req *sreq,
0509                     struct safexcel_command_desc *cdesc)
0510 {
0511     struct safexcel_crypto_priv *priv = ctx->base.priv;
0512     int ctrl_size = ctx->key_len / sizeof(u32);
0513 
0514     cdesc->control_data.control1 = ctx->mode;
0515 
0516     if (ctx->aead) {
0517         /* Take in account the ipad+opad digests */
0518         if (ctx->xcm) {
0519             ctrl_size += ctx->state_sz / sizeof(u32);
0520             cdesc->control_data.control0 =
0521                 CONTEXT_CONTROL_KEY_EN |
0522                 CONTEXT_CONTROL_DIGEST_XCM |
0523                 ctx->hash_alg |
0524                 CONTEXT_CONTROL_SIZE(ctrl_size);
0525         } else if (ctx->alg == SAFEXCEL_CHACHA20) {
0526             /* Chacha20-Poly1305 */
0527             cdesc->control_data.control0 =
0528                 CONTEXT_CONTROL_KEY_EN |
0529                 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
0530                 (sreq->direction == SAFEXCEL_ENCRYPT ?
0531                     CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
0532                     CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
0533                 ctx->hash_alg |
0534                 CONTEXT_CONTROL_SIZE(ctrl_size);
0535             return 0;
0536         } else {
0537             ctrl_size += ctx->state_sz / sizeof(u32) * 2;
0538             cdesc->control_data.control0 =
0539                 CONTEXT_CONTROL_KEY_EN |
0540                 CONTEXT_CONTROL_DIGEST_HMAC |
0541                 ctx->hash_alg |
0542                 CONTEXT_CONTROL_SIZE(ctrl_size);
0543         }
0544 
0545         if (sreq->direction == SAFEXCEL_ENCRYPT &&
0546             (ctx->xcm == EIP197_XCM_MODE_CCM ||
0547              ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
0548             cdesc->control_data.control0 |=
0549                 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
0550         else if (sreq->direction == SAFEXCEL_ENCRYPT)
0551             cdesc->control_data.control0 |=
0552                 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
0553         else if (ctx->xcm == EIP197_XCM_MODE_CCM)
0554             cdesc->control_data.control0 |=
0555                 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
0556         else
0557             cdesc->control_data.control0 |=
0558                 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
0559     } else {
0560         if (sreq->direction == SAFEXCEL_ENCRYPT)
0561             cdesc->control_data.control0 =
0562                 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
0563                 CONTEXT_CONTROL_KEY_EN |
0564                 CONTEXT_CONTROL_SIZE(ctrl_size);
0565         else
0566             cdesc->control_data.control0 =
0567                 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
0568                 CONTEXT_CONTROL_KEY_EN |
0569                 CONTEXT_CONTROL_SIZE(ctrl_size);
0570     }
0571 
0572     if (ctx->alg == SAFEXCEL_DES) {
0573         cdesc->control_data.control0 |=
0574             CONTEXT_CONTROL_CRYPTO_ALG_DES;
0575     } else if (ctx->alg == SAFEXCEL_3DES) {
0576         cdesc->control_data.control0 |=
0577             CONTEXT_CONTROL_CRYPTO_ALG_3DES;
0578     } else if (ctx->alg == SAFEXCEL_AES) {
0579         switch (ctx->key_len >> ctx->xts) {
0580         case AES_KEYSIZE_128:
0581             cdesc->control_data.control0 |=
0582                 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
0583             break;
0584         case AES_KEYSIZE_192:
0585             cdesc->control_data.control0 |=
0586                 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
0587             break;
0588         case AES_KEYSIZE_256:
0589             cdesc->control_data.control0 |=
0590                 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
0591             break;
0592         default:
0593             dev_err(priv->dev, "aes keysize not supported: %u\n",
0594                 ctx->key_len >> ctx->xts);
0595             return -EINVAL;
0596         }
0597     } else if (ctx->alg == SAFEXCEL_CHACHA20) {
0598         cdesc->control_data.control0 |=
0599             CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
0600     } else if (ctx->alg == SAFEXCEL_SM4) {
0601         cdesc->control_data.control0 |=
0602             CONTEXT_CONTROL_CRYPTO_ALG_SM4;
0603     }
0604 
0605     return 0;
0606 }
0607 
0608 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
0609                       struct crypto_async_request *async,
0610                       struct scatterlist *src,
0611                       struct scatterlist *dst,
0612                       unsigned int cryptlen,
0613                       struct safexcel_cipher_req *sreq,
0614                       bool *should_complete, int *ret)
0615 {
0616     struct skcipher_request *areq = skcipher_request_cast(async);
0617     struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
0618     struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
0619     struct safexcel_result_desc *rdesc;
0620     int ndesc = 0;
0621 
0622     *ret = 0;
0623 
0624     if (unlikely(!sreq->rdescs))
0625         return 0;
0626 
0627     while (sreq->rdescs--) {
0628         rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
0629         if (IS_ERR(rdesc)) {
0630             dev_err(priv->dev,
0631                 "cipher: result: could not retrieve the result descriptor\n");
0632             *ret = PTR_ERR(rdesc);
0633             break;
0634         }
0635 
0636         if (likely(!*ret))
0637             *ret = safexcel_rdesc_check_errors(priv, rdesc);
0638 
0639         ndesc++;
0640     }
0641 
0642     safexcel_complete(priv, ring);
0643 
0644     if (src == dst) {
0645         dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
0646     } else {
0647         dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
0648         dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
0649     }
0650 
0651     /*
0652      * Update IV in req from last crypto output word for CBC modes
0653      */
0654     if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
0655         (sreq->direction == SAFEXCEL_ENCRYPT)) {
0656         /* For encrypt take the last output word */
0657         sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
0658                    crypto_skcipher_ivsize(skcipher),
0659                    (cryptlen -
0660                     crypto_skcipher_ivsize(skcipher)));
0661     }
0662 
0663     *should_complete = true;
0664 
0665     return ndesc;
0666 }
0667 
0668 static int safexcel_send_req(struct crypto_async_request *base, int ring,
0669                  struct safexcel_cipher_req *sreq,
0670                  struct scatterlist *src, struct scatterlist *dst,
0671                  unsigned int cryptlen, unsigned int assoclen,
0672                  unsigned int digestsize, u8 *iv, int *commands,
0673                  int *results)
0674 {
0675     struct skcipher_request *areq = skcipher_request_cast(base);
0676     struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
0677     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
0678     struct safexcel_crypto_priv *priv = ctx->base.priv;
0679     struct safexcel_command_desc *cdesc;
0680     struct safexcel_command_desc *first_cdesc = NULL;
0681     struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
0682     struct scatterlist *sg;
0683     unsigned int totlen;
0684     unsigned int totlen_src = cryptlen + assoclen;
0685     unsigned int totlen_dst = totlen_src;
0686     struct safexcel_token *atoken;
0687     int n_cdesc = 0, n_rdesc = 0;
0688     int queued, i, ret = 0;
0689     bool first = true;
0690 
0691     sreq->nr_src = sg_nents_for_len(src, totlen_src);
0692 
0693     if (ctx->aead) {
0694         /*
0695          * AEAD has auth tag appended to output for encrypt and
0696          * removed from the output for decrypt!
0697          */
0698         if (sreq->direction == SAFEXCEL_DECRYPT)
0699             totlen_dst -= digestsize;
0700         else
0701             totlen_dst += digestsize;
0702 
0703         memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
0704                &ctx->base.ipad, ctx->state_sz);
0705         if (!ctx->xcm)
0706             memcpy(ctx->base.ctxr->data + (ctx->key_len +
0707                    ctx->state_sz) / sizeof(u32), &ctx->base.opad,
0708                    ctx->state_sz);
0709     } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
0710            (sreq->direction == SAFEXCEL_DECRYPT)) {
0711         /*
0712          * Save IV from last crypto input word for CBC modes in decrypt
0713          * direction. Need to do this first in case of inplace operation
0714          * as it will be overwritten.
0715          */
0716         sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
0717                    crypto_skcipher_ivsize(skcipher),
0718                    (totlen_src -
0719                     crypto_skcipher_ivsize(skcipher)));
0720     }
0721 
0722     sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
0723 
0724     /*
0725      * Remember actual input length, source buffer length may be
0726      * updated in case of inline operation below.
0727      */
0728     totlen = totlen_src;
0729     queued = totlen_src;
0730 
0731     if (src == dst) {
0732         sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
0733         sreq->nr_dst = sreq->nr_src;
0734         if (unlikely((totlen_src || totlen_dst) &&
0735             (sreq->nr_src <= 0))) {
0736             dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
0737                 max(totlen_src, totlen_dst));
0738             return -EINVAL;
0739         }
0740         dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
0741     } else {
0742         if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
0743             dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
0744                 totlen_src);
0745             return -EINVAL;
0746         }
0747         dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
0748 
0749         if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
0750             dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
0751                 totlen_dst);
0752             dma_unmap_sg(priv->dev, src, sreq->nr_src,
0753                      DMA_TO_DEVICE);
0754             return -EINVAL;
0755         }
0756         dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
0757     }
0758 
0759     memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
0760 
0761     if (!totlen) {
0762         /*
0763          * The EIP97 cannot deal with zero length input packets!
0764          * So stuff a dummy command descriptor indicating a 1 byte
0765          * (dummy) input packet, using the context record as source.
0766          */
0767         first_cdesc = safexcel_add_cdesc(priv, ring,
0768                          1, 1, ctx->base.ctxr_dma,
0769                          1, 1, ctx->base.ctxr_dma,
0770                          &atoken);
0771         if (IS_ERR(first_cdesc)) {
0772             /* No space left in the command descriptor ring */
0773             ret = PTR_ERR(first_cdesc);
0774             goto cdesc_rollback;
0775         }
0776         n_cdesc = 1;
0777         goto skip_cdesc;
0778     }
0779 
0780     /* command descriptors */
0781     for_each_sg(src, sg, sreq->nr_src, i) {
0782         int len = sg_dma_len(sg);
0783 
0784         /* Do not overflow the request */
0785         if (queued < len)
0786             len = queued;
0787 
0788         cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
0789                        !(queued - len),
0790                        sg_dma_address(sg), len, totlen,
0791                        ctx->base.ctxr_dma, &atoken);
0792         if (IS_ERR(cdesc)) {
0793             /* No space left in the command descriptor ring */
0794             ret = PTR_ERR(cdesc);
0795             goto cdesc_rollback;
0796         }
0797 
0798         if (!n_cdesc)
0799             first_cdesc = cdesc;
0800 
0801         n_cdesc++;
0802         queued -= len;
0803         if (!queued)
0804             break;
0805     }
0806 skip_cdesc:
0807     /* Add context control words and token to first command descriptor */
0808     safexcel_context_control(ctx, base, sreq, first_cdesc);
0809     if (ctx->aead)
0810         safexcel_aead_token(ctx, iv, first_cdesc, atoken,
0811                     sreq->direction, cryptlen,
0812                     assoclen, digestsize);
0813     else
0814         safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
0815                     cryptlen);
0816 
0817     /* result descriptors */
0818     for_each_sg(dst, sg, sreq->nr_dst, i) {
0819         bool last = (i == sreq->nr_dst - 1);
0820         u32 len = sg_dma_len(sg);
0821 
0822         /* only allow the part of the buffer we know we need */
0823         if (len > totlen_dst)
0824             len = totlen_dst;
0825         if (unlikely(!len))
0826             break;
0827         totlen_dst -= len;
0828 
0829         /* skip over AAD space in buffer - not written */
0830         if (assoclen) {
0831             if (assoclen >= len) {
0832                 assoclen -= len;
0833                 continue;
0834             }
0835             rdesc = safexcel_add_rdesc(priv, ring, first, last,
0836                            sg_dma_address(sg) +
0837                            assoclen,
0838                            len - assoclen);
0839             assoclen = 0;
0840         } else {
0841             rdesc = safexcel_add_rdesc(priv, ring, first, last,
0842                            sg_dma_address(sg),
0843                            len);
0844         }
0845         if (IS_ERR(rdesc)) {
0846             /* No space left in the result descriptor ring */
0847             ret = PTR_ERR(rdesc);
0848             goto rdesc_rollback;
0849         }
0850         if (first) {
0851             first_rdesc = rdesc;
0852             first = false;
0853         }
0854         n_rdesc++;
0855     }
0856 
0857     if (unlikely(first)) {
0858         /*
0859          * Special case: AEAD decrypt with only AAD data.
0860          * In this case there is NO output data from the engine,
0861          * but the engine still needs a result descriptor!
0862          * Create a dummy one just for catching the result token.
0863          */
0864         rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
0865         if (IS_ERR(rdesc)) {
0866             /* No space left in the result descriptor ring */
0867             ret = PTR_ERR(rdesc);
0868             goto rdesc_rollback;
0869         }
0870         first_rdesc = rdesc;
0871         n_rdesc = 1;
0872     }
0873 
0874     safexcel_rdr_req_set(priv, ring, first_rdesc, base);
0875 
0876     *commands = n_cdesc;
0877     *results = n_rdesc;
0878     return 0;
0879 
0880 rdesc_rollback:
0881     for (i = 0; i < n_rdesc; i++)
0882         safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
0883 cdesc_rollback:
0884     for (i = 0; i < n_cdesc; i++)
0885         safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
0886 
0887     if (src == dst) {
0888         dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
0889     } else {
0890         dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
0891         dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
0892     }
0893 
0894     return ret;
0895 }
0896 
0897 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
0898                       int ring,
0899                       struct crypto_async_request *base,
0900                       struct safexcel_cipher_req *sreq,
0901                       bool *should_complete, int *ret)
0902 {
0903     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
0904     struct safexcel_result_desc *rdesc;
0905     int ndesc = 0, enq_ret;
0906 
0907     *ret = 0;
0908 
0909     if (unlikely(!sreq->rdescs))
0910         return 0;
0911 
0912     while (sreq->rdescs--) {
0913         rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
0914         if (IS_ERR(rdesc)) {
0915             dev_err(priv->dev,
0916                 "cipher: invalidate: could not retrieve the result descriptor\n");
0917             *ret = PTR_ERR(rdesc);
0918             break;
0919         }
0920 
0921         if (likely(!*ret))
0922             *ret = safexcel_rdesc_check_errors(priv, rdesc);
0923 
0924         ndesc++;
0925     }
0926 
0927     safexcel_complete(priv, ring);
0928 
0929     if (ctx->base.exit_inv) {
0930         dma_pool_free(priv->context_pool, ctx->base.ctxr,
0931                   ctx->base.ctxr_dma);
0932 
0933         *should_complete = true;
0934 
0935         return ndesc;
0936     }
0937 
0938     ring = safexcel_select_ring(priv);
0939     ctx->base.ring = ring;
0940 
0941     spin_lock_bh(&priv->ring[ring].queue_lock);
0942     enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
0943     spin_unlock_bh(&priv->ring[ring].queue_lock);
0944 
0945     if (enq_ret != -EINPROGRESS)
0946         *ret = enq_ret;
0947 
0948     queue_work(priv->ring[ring].workqueue,
0949            &priv->ring[ring].work_data.work);
0950 
0951     *should_complete = false;
0952 
0953     return ndesc;
0954 }
0955 
0956 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
0957                        int ring,
0958                        struct crypto_async_request *async,
0959                        bool *should_complete, int *ret)
0960 {
0961     struct skcipher_request *req = skcipher_request_cast(async);
0962     struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
0963     int err;
0964 
0965     if (sreq->needs_inv) {
0966         sreq->needs_inv = false;
0967         err = safexcel_handle_inv_result(priv, ring, async, sreq,
0968                          should_complete, ret);
0969     } else {
0970         err = safexcel_handle_req_result(priv, ring, async, req->src,
0971                          req->dst, req->cryptlen, sreq,
0972                          should_complete, ret);
0973     }
0974 
0975     return err;
0976 }
0977 
0978 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
0979                        int ring,
0980                        struct crypto_async_request *async,
0981                        bool *should_complete, int *ret)
0982 {
0983     struct aead_request *req = aead_request_cast(async);
0984     struct crypto_aead *tfm = crypto_aead_reqtfm(req);
0985     struct safexcel_cipher_req *sreq = aead_request_ctx(req);
0986     int err;
0987 
0988     if (sreq->needs_inv) {
0989         sreq->needs_inv = false;
0990         err = safexcel_handle_inv_result(priv, ring, async, sreq,
0991                          should_complete, ret);
0992     } else {
0993         err = safexcel_handle_req_result(priv, ring, async, req->src,
0994                          req->dst,
0995                          req->cryptlen + crypto_aead_authsize(tfm),
0996                          sreq, should_complete, ret);
0997     }
0998 
0999     return err;
1000 }
1001 
1002 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1003                     int ring, int *commands, int *results)
1004 {
1005     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1006     struct safexcel_crypto_priv *priv = ctx->base.priv;
1007     int ret;
1008 
1009     ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1010     if (unlikely(ret))
1011         return ret;
1012 
1013     *commands = 1;
1014     *results = 1;
1015 
1016     return 0;
1017 }
1018 
1019 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1020                   int *commands, int *results)
1021 {
1022     struct skcipher_request *req = skcipher_request_cast(async);
1023     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1024     struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1025     struct safexcel_crypto_priv *priv = ctx->base.priv;
1026     int ret;
1027 
1028     BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1029 
1030     if (sreq->needs_inv) {
1031         ret = safexcel_cipher_send_inv(async, ring, commands, results);
1032     } else {
1033         struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1034         u8 input_iv[AES_BLOCK_SIZE];
1035 
1036         /*
1037          * Save input IV in case of CBC decrypt mode
1038          * Will be overwritten with output IV prior to use!
1039          */
1040         memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1041 
1042         ret = safexcel_send_req(async, ring, sreq, req->src,
1043                     req->dst, req->cryptlen, 0, 0, input_iv,
1044                     commands, results);
1045     }
1046 
1047     sreq->rdescs = *results;
1048     return ret;
1049 }
1050 
1051 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1052                   int *commands, int *results)
1053 {
1054     struct aead_request *req = aead_request_cast(async);
1055     struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1056     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1057     struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1058     struct safexcel_crypto_priv *priv = ctx->base.priv;
1059     int ret;
1060 
1061     BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1062 
1063     if (sreq->needs_inv)
1064         ret = safexcel_cipher_send_inv(async, ring, commands, results);
1065     else
1066         ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1067                     req->cryptlen, req->assoclen,
1068                     crypto_aead_authsize(tfm), req->iv,
1069                     commands, results);
1070     sreq->rdescs = *results;
1071     return ret;
1072 }
1073 
1074 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1075                     struct crypto_async_request *base,
1076                     struct safexcel_cipher_req *sreq,
1077                     struct safexcel_inv_result *result)
1078 {
1079     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1080     struct safexcel_crypto_priv *priv = ctx->base.priv;
1081     int ring = ctx->base.ring;
1082 
1083     init_completion(&result->completion);
1084 
1085     ctx = crypto_tfm_ctx(base->tfm);
1086     ctx->base.exit_inv = true;
1087     sreq->needs_inv = true;
1088 
1089     spin_lock_bh(&priv->ring[ring].queue_lock);
1090     crypto_enqueue_request(&priv->ring[ring].queue, base);
1091     spin_unlock_bh(&priv->ring[ring].queue_lock);
1092 
1093     queue_work(priv->ring[ring].workqueue,
1094            &priv->ring[ring].work_data.work);
1095 
1096     wait_for_completion(&result->completion);
1097 
1098     if (result->error) {
1099         dev_warn(priv->dev,
1100             "cipher: sync: invalidate: completion error %d\n",
1101              result->error);
1102         return result->error;
1103     }
1104 
1105     return 0;
1106 }
1107 
1108 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1109 {
1110     EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1111     struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1112     struct safexcel_inv_result result = {};
1113 
1114     memset(req, 0, sizeof(struct skcipher_request));
1115 
1116     skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1117                       safexcel_inv_complete, &result);
1118     skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1119 
1120     return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1121 }
1122 
1123 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1124 {
1125     EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1126     struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1127     struct safexcel_inv_result result = {};
1128 
1129     memset(req, 0, sizeof(struct aead_request));
1130 
1131     aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1132                   safexcel_inv_complete, &result);
1133     aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1134 
1135     return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1136 }
1137 
1138 static int safexcel_queue_req(struct crypto_async_request *base,
1139             struct safexcel_cipher_req *sreq,
1140             enum safexcel_cipher_direction dir)
1141 {
1142     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1143     struct safexcel_crypto_priv *priv = ctx->base.priv;
1144     int ret, ring;
1145 
1146     sreq->needs_inv = false;
1147     sreq->direction = dir;
1148 
1149     if (ctx->base.ctxr) {
1150         if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1151             sreq->needs_inv = true;
1152             ctx->base.needs_inv = false;
1153         }
1154     } else {
1155         ctx->base.ring = safexcel_select_ring(priv);
1156         ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1157                          EIP197_GFP_FLAGS(*base),
1158                          &ctx->base.ctxr_dma);
1159         if (!ctx->base.ctxr)
1160             return -ENOMEM;
1161     }
1162 
1163     ring = ctx->base.ring;
1164 
1165     spin_lock_bh(&priv->ring[ring].queue_lock);
1166     ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1167     spin_unlock_bh(&priv->ring[ring].queue_lock);
1168 
1169     queue_work(priv->ring[ring].workqueue,
1170            &priv->ring[ring].work_data.work);
1171 
1172     return ret;
1173 }
1174 
1175 static int safexcel_encrypt(struct skcipher_request *req)
1176 {
1177     return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1178             SAFEXCEL_ENCRYPT);
1179 }
1180 
1181 static int safexcel_decrypt(struct skcipher_request *req)
1182 {
1183     return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1184             SAFEXCEL_DECRYPT);
1185 }
1186 
1187 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1188 {
1189     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1190     struct safexcel_alg_template *tmpl =
1191         container_of(tfm->__crt_alg, struct safexcel_alg_template,
1192                  alg.skcipher.base);
1193 
1194     crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1195                     sizeof(struct safexcel_cipher_req));
1196 
1197     ctx->base.priv = tmpl->priv;
1198 
1199     ctx->base.send = safexcel_skcipher_send;
1200     ctx->base.handle_result = safexcel_skcipher_handle_result;
1201     ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1202     ctx->ctrinit = 1;
1203     return 0;
1204 }
1205 
1206 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1207 {
1208     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1209 
1210     memzero_explicit(ctx->key, sizeof(ctx->key));
1211 
1212     /* context not allocated, skip invalidation */
1213     if (!ctx->base.ctxr)
1214         return -ENOMEM;
1215 
1216     memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1217     return 0;
1218 }
1219 
1220 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1221 {
1222     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1223     struct safexcel_crypto_priv *priv = ctx->base.priv;
1224     int ret;
1225 
1226     if (safexcel_cipher_cra_exit(tfm))
1227         return;
1228 
1229     if (priv->flags & EIP197_TRC_CACHE) {
1230         ret = safexcel_skcipher_exit_inv(tfm);
1231         if (ret)
1232             dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1233                  ret);
1234     } else {
1235         dma_pool_free(priv->context_pool, ctx->base.ctxr,
1236                   ctx->base.ctxr_dma);
1237     }
1238 }
1239 
1240 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1241 {
1242     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1243     struct safexcel_crypto_priv *priv = ctx->base.priv;
1244     int ret;
1245 
1246     if (safexcel_cipher_cra_exit(tfm))
1247         return;
1248 
1249     if (priv->flags & EIP197_TRC_CACHE) {
1250         ret = safexcel_aead_exit_inv(tfm);
1251         if (ret)
1252             dev_warn(priv->dev, "aead: invalidation error %d\n",
1253                  ret);
1254     } else {
1255         dma_pool_free(priv->context_pool, ctx->base.ctxr,
1256                   ctx->base.ctxr_dma);
1257     }
1258 }
1259 
1260 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1261 {
1262     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1263 
1264     safexcel_skcipher_cra_init(tfm);
1265     ctx->alg  = SAFEXCEL_AES;
1266     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1267     ctx->blocksz = 0;
1268     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1269     return 0;
1270 }
1271 
1272 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1273     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1274     .algo_mask = SAFEXCEL_ALG_AES,
1275     .alg.skcipher = {
1276         .setkey = safexcel_skcipher_aes_setkey,
1277         .encrypt = safexcel_encrypt,
1278         .decrypt = safexcel_decrypt,
1279         .min_keysize = AES_MIN_KEY_SIZE,
1280         .max_keysize = AES_MAX_KEY_SIZE,
1281         .base = {
1282             .cra_name = "ecb(aes)",
1283             .cra_driver_name = "safexcel-ecb-aes",
1284             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1285             .cra_flags = CRYPTO_ALG_ASYNC |
1286                      CRYPTO_ALG_ALLOCATES_MEMORY |
1287                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1288             .cra_blocksize = AES_BLOCK_SIZE,
1289             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1290             .cra_alignmask = 0,
1291             .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1292             .cra_exit = safexcel_skcipher_cra_exit,
1293             .cra_module = THIS_MODULE,
1294         },
1295     },
1296 };
1297 
1298 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1299 {
1300     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1301 
1302     safexcel_skcipher_cra_init(tfm);
1303     ctx->alg  = SAFEXCEL_AES;
1304     ctx->blocksz = AES_BLOCK_SIZE;
1305     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1306     return 0;
1307 }
1308 
1309 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1310     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1311     .algo_mask = SAFEXCEL_ALG_AES,
1312     .alg.skcipher = {
1313         .setkey = safexcel_skcipher_aes_setkey,
1314         .encrypt = safexcel_encrypt,
1315         .decrypt = safexcel_decrypt,
1316         .min_keysize = AES_MIN_KEY_SIZE,
1317         .max_keysize = AES_MAX_KEY_SIZE,
1318         .ivsize = AES_BLOCK_SIZE,
1319         .base = {
1320             .cra_name = "cbc(aes)",
1321             .cra_driver_name = "safexcel-cbc-aes",
1322             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1323             .cra_flags = CRYPTO_ALG_ASYNC |
1324                      CRYPTO_ALG_ALLOCATES_MEMORY |
1325                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1326             .cra_blocksize = AES_BLOCK_SIZE,
1327             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1328             .cra_alignmask = 0,
1329             .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1330             .cra_exit = safexcel_skcipher_cra_exit,
1331             .cra_module = THIS_MODULE,
1332         },
1333     },
1334 };
1335 
1336 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1337 {
1338     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1339 
1340     safexcel_skcipher_cra_init(tfm);
1341     ctx->alg  = SAFEXCEL_AES;
1342     ctx->blocksz = AES_BLOCK_SIZE;
1343     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1344     return 0;
1345 }
1346 
1347 struct safexcel_alg_template safexcel_alg_cfb_aes = {
1348     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1349     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1350     .alg.skcipher = {
1351         .setkey = safexcel_skcipher_aes_setkey,
1352         .encrypt = safexcel_encrypt,
1353         .decrypt = safexcel_decrypt,
1354         .min_keysize = AES_MIN_KEY_SIZE,
1355         .max_keysize = AES_MAX_KEY_SIZE,
1356         .ivsize = AES_BLOCK_SIZE,
1357         .base = {
1358             .cra_name = "cfb(aes)",
1359             .cra_driver_name = "safexcel-cfb-aes",
1360             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1361             .cra_flags = CRYPTO_ALG_ASYNC |
1362                      CRYPTO_ALG_ALLOCATES_MEMORY |
1363                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1364             .cra_blocksize = 1,
1365             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1366             .cra_alignmask = 0,
1367             .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1368             .cra_exit = safexcel_skcipher_cra_exit,
1369             .cra_module = THIS_MODULE,
1370         },
1371     },
1372 };
1373 
1374 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1375 {
1376     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1377 
1378     safexcel_skcipher_cra_init(tfm);
1379     ctx->alg  = SAFEXCEL_AES;
1380     ctx->blocksz = AES_BLOCK_SIZE;
1381     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1382     return 0;
1383 }
1384 
1385 struct safexcel_alg_template safexcel_alg_ofb_aes = {
1386     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1387     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1388     .alg.skcipher = {
1389         .setkey = safexcel_skcipher_aes_setkey,
1390         .encrypt = safexcel_encrypt,
1391         .decrypt = safexcel_decrypt,
1392         .min_keysize = AES_MIN_KEY_SIZE,
1393         .max_keysize = AES_MAX_KEY_SIZE,
1394         .ivsize = AES_BLOCK_SIZE,
1395         .base = {
1396             .cra_name = "ofb(aes)",
1397             .cra_driver_name = "safexcel-ofb-aes",
1398             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1399             .cra_flags = CRYPTO_ALG_ASYNC |
1400                      CRYPTO_ALG_ALLOCATES_MEMORY |
1401                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1402             .cra_blocksize = 1,
1403             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1404             .cra_alignmask = 0,
1405             .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1406             .cra_exit = safexcel_skcipher_cra_exit,
1407             .cra_module = THIS_MODULE,
1408         },
1409     },
1410 };
1411 
1412 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1413                        const u8 *key, unsigned int len)
1414 {
1415     struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1416     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1417     struct safexcel_crypto_priv *priv = ctx->base.priv;
1418     struct crypto_aes_ctx aes;
1419     int ret, i;
1420     unsigned int keylen;
1421 
1422     /* last 4 bytes of key are the nonce! */
1423     ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1424     /* exclude the nonce here */
1425     keylen = len - CTR_RFC3686_NONCE_SIZE;
1426     ret = aes_expandkey(&aes, key, keylen);
1427     if (ret)
1428         return ret;
1429 
1430     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1431         for (i = 0; i < keylen / sizeof(u32); i++) {
1432             if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1433                 ctx->base.needs_inv = true;
1434                 break;
1435             }
1436         }
1437     }
1438 
1439     for (i = 0; i < keylen / sizeof(u32); i++)
1440         ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1441 
1442     ctx->key_len = keylen;
1443 
1444     memzero_explicit(&aes, sizeof(aes));
1445     return 0;
1446 }
1447 
1448 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1449 {
1450     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1451 
1452     safexcel_skcipher_cra_init(tfm);
1453     ctx->alg  = SAFEXCEL_AES;
1454     ctx->blocksz = AES_BLOCK_SIZE;
1455     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1456     return 0;
1457 }
1458 
1459 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1460     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1461     .algo_mask = SAFEXCEL_ALG_AES,
1462     .alg.skcipher = {
1463         .setkey = safexcel_skcipher_aesctr_setkey,
1464         .encrypt = safexcel_encrypt,
1465         .decrypt = safexcel_decrypt,
1466         /* Add nonce size */
1467         .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1468         .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1469         .ivsize = CTR_RFC3686_IV_SIZE,
1470         .base = {
1471             .cra_name = "rfc3686(ctr(aes))",
1472             .cra_driver_name = "safexcel-ctr-aes",
1473             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1474             .cra_flags = CRYPTO_ALG_ASYNC |
1475                      CRYPTO_ALG_ALLOCATES_MEMORY |
1476                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1477             .cra_blocksize = 1,
1478             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1479             .cra_alignmask = 0,
1480             .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1481             .cra_exit = safexcel_skcipher_cra_exit,
1482             .cra_module = THIS_MODULE,
1483         },
1484     },
1485 };
1486 
1487 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1488                    unsigned int len)
1489 {
1490     struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1491     struct safexcel_crypto_priv *priv = ctx->base.priv;
1492     int ret;
1493 
1494     ret = verify_skcipher_des_key(ctfm, key);
1495     if (ret)
1496         return ret;
1497 
1498     /* if context exits and key changed, need to invalidate it */
1499     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1500         if (memcmp(ctx->key, key, len))
1501             ctx->base.needs_inv = true;
1502 
1503     memcpy(ctx->key, key, len);
1504     ctx->key_len = len;
1505 
1506     return 0;
1507 }
1508 
1509 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1510 {
1511     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1512 
1513     safexcel_skcipher_cra_init(tfm);
1514     ctx->alg  = SAFEXCEL_DES;
1515     ctx->blocksz = DES_BLOCK_SIZE;
1516     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1517     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1518     return 0;
1519 }
1520 
1521 struct safexcel_alg_template safexcel_alg_cbc_des = {
1522     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1523     .algo_mask = SAFEXCEL_ALG_DES,
1524     .alg.skcipher = {
1525         .setkey = safexcel_des_setkey,
1526         .encrypt = safexcel_encrypt,
1527         .decrypt = safexcel_decrypt,
1528         .min_keysize = DES_KEY_SIZE,
1529         .max_keysize = DES_KEY_SIZE,
1530         .ivsize = DES_BLOCK_SIZE,
1531         .base = {
1532             .cra_name = "cbc(des)",
1533             .cra_driver_name = "safexcel-cbc-des",
1534             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1535             .cra_flags = CRYPTO_ALG_ASYNC |
1536                      CRYPTO_ALG_ALLOCATES_MEMORY |
1537                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1538             .cra_blocksize = DES_BLOCK_SIZE,
1539             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1540             .cra_alignmask = 0,
1541             .cra_init = safexcel_skcipher_des_cbc_cra_init,
1542             .cra_exit = safexcel_skcipher_cra_exit,
1543             .cra_module = THIS_MODULE,
1544         },
1545     },
1546 };
1547 
1548 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1549 {
1550     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1551 
1552     safexcel_skcipher_cra_init(tfm);
1553     ctx->alg  = SAFEXCEL_DES;
1554     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1555     ctx->blocksz = 0;
1556     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1557     return 0;
1558 }
1559 
1560 struct safexcel_alg_template safexcel_alg_ecb_des = {
1561     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1562     .algo_mask = SAFEXCEL_ALG_DES,
1563     .alg.skcipher = {
1564         .setkey = safexcel_des_setkey,
1565         .encrypt = safexcel_encrypt,
1566         .decrypt = safexcel_decrypt,
1567         .min_keysize = DES_KEY_SIZE,
1568         .max_keysize = DES_KEY_SIZE,
1569         .base = {
1570             .cra_name = "ecb(des)",
1571             .cra_driver_name = "safexcel-ecb-des",
1572             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1573             .cra_flags = CRYPTO_ALG_ASYNC |
1574                      CRYPTO_ALG_ALLOCATES_MEMORY |
1575                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1576             .cra_blocksize = DES_BLOCK_SIZE,
1577             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1578             .cra_alignmask = 0,
1579             .cra_init = safexcel_skcipher_des_ecb_cra_init,
1580             .cra_exit = safexcel_skcipher_cra_exit,
1581             .cra_module = THIS_MODULE,
1582         },
1583     },
1584 };
1585 
1586 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1587                    const u8 *key, unsigned int len)
1588 {
1589     struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1590     struct safexcel_crypto_priv *priv = ctx->base.priv;
1591     int err;
1592 
1593     err = verify_skcipher_des3_key(ctfm, key);
1594     if (err)
1595         return err;
1596 
1597     /* if context exits and key changed, need to invalidate it */
1598     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1599         if (memcmp(ctx->key, key, len))
1600             ctx->base.needs_inv = true;
1601 
1602     memcpy(ctx->key, key, len);
1603     ctx->key_len = len;
1604 
1605     return 0;
1606 }
1607 
1608 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1609 {
1610     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1611 
1612     safexcel_skcipher_cra_init(tfm);
1613     ctx->alg  = SAFEXCEL_3DES;
1614     ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1615     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1616     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1617     return 0;
1618 }
1619 
1620 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1621     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1622     .algo_mask = SAFEXCEL_ALG_DES,
1623     .alg.skcipher = {
1624         .setkey = safexcel_des3_ede_setkey,
1625         .encrypt = safexcel_encrypt,
1626         .decrypt = safexcel_decrypt,
1627         .min_keysize = DES3_EDE_KEY_SIZE,
1628         .max_keysize = DES3_EDE_KEY_SIZE,
1629         .ivsize = DES3_EDE_BLOCK_SIZE,
1630         .base = {
1631             .cra_name = "cbc(des3_ede)",
1632             .cra_driver_name = "safexcel-cbc-des3_ede",
1633             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1634             .cra_flags = CRYPTO_ALG_ASYNC |
1635                      CRYPTO_ALG_ALLOCATES_MEMORY |
1636                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1637             .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1638             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1639             .cra_alignmask = 0,
1640             .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1641             .cra_exit = safexcel_skcipher_cra_exit,
1642             .cra_module = THIS_MODULE,
1643         },
1644     },
1645 };
1646 
1647 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1648 {
1649     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1650 
1651     safexcel_skcipher_cra_init(tfm);
1652     ctx->alg  = SAFEXCEL_3DES;
1653     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1654     ctx->blocksz = 0;
1655     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1656     return 0;
1657 }
1658 
1659 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1660     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1661     .algo_mask = SAFEXCEL_ALG_DES,
1662     .alg.skcipher = {
1663         .setkey = safexcel_des3_ede_setkey,
1664         .encrypt = safexcel_encrypt,
1665         .decrypt = safexcel_decrypt,
1666         .min_keysize = DES3_EDE_KEY_SIZE,
1667         .max_keysize = DES3_EDE_KEY_SIZE,
1668         .base = {
1669             .cra_name = "ecb(des3_ede)",
1670             .cra_driver_name = "safexcel-ecb-des3_ede",
1671             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1672             .cra_flags = CRYPTO_ALG_ASYNC |
1673                      CRYPTO_ALG_ALLOCATES_MEMORY |
1674                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1675             .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1676             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1677             .cra_alignmask = 0,
1678             .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1679             .cra_exit = safexcel_skcipher_cra_exit,
1680             .cra_module = THIS_MODULE,
1681         },
1682     },
1683 };
1684 
1685 static int safexcel_aead_encrypt(struct aead_request *req)
1686 {
1687     struct safexcel_cipher_req *creq = aead_request_ctx(req);
1688 
1689     return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1690 }
1691 
1692 static int safexcel_aead_decrypt(struct aead_request *req)
1693 {
1694     struct safexcel_cipher_req *creq = aead_request_ctx(req);
1695 
1696     return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1697 }
1698 
1699 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1700 {
1701     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1702     struct safexcel_alg_template *tmpl =
1703         container_of(tfm->__crt_alg, struct safexcel_alg_template,
1704                  alg.aead.base);
1705 
1706     crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1707                 sizeof(struct safexcel_cipher_req));
1708 
1709     ctx->base.priv = tmpl->priv;
1710 
1711     ctx->alg  = SAFEXCEL_AES; /* default */
1712     ctx->blocksz = AES_BLOCK_SIZE;
1713     ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1714     ctx->ctrinit = 1;
1715     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1716     ctx->aead = true;
1717     ctx->base.send = safexcel_aead_send;
1718     ctx->base.handle_result = safexcel_aead_handle_result;
1719     return 0;
1720 }
1721 
1722 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1723 {
1724     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1725 
1726     safexcel_aead_cra_init(tfm);
1727     ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1728     ctx->state_sz = SHA1_DIGEST_SIZE;
1729     return 0;
1730 }
1731 
1732 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1733     .type = SAFEXCEL_ALG_TYPE_AEAD,
1734     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1735     .alg.aead = {
1736         .setkey = safexcel_aead_setkey,
1737         .encrypt = safexcel_aead_encrypt,
1738         .decrypt = safexcel_aead_decrypt,
1739         .ivsize = AES_BLOCK_SIZE,
1740         .maxauthsize = SHA1_DIGEST_SIZE,
1741         .base = {
1742             .cra_name = "authenc(hmac(sha1),cbc(aes))",
1743             .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1744             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1745             .cra_flags = CRYPTO_ALG_ASYNC |
1746                      CRYPTO_ALG_ALLOCATES_MEMORY |
1747                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1748             .cra_blocksize = AES_BLOCK_SIZE,
1749             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1750             .cra_alignmask = 0,
1751             .cra_init = safexcel_aead_sha1_cra_init,
1752             .cra_exit = safexcel_aead_cra_exit,
1753             .cra_module = THIS_MODULE,
1754         },
1755     },
1756 };
1757 
1758 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1759 {
1760     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1761 
1762     safexcel_aead_cra_init(tfm);
1763     ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1764     ctx->state_sz = SHA256_DIGEST_SIZE;
1765     return 0;
1766 }
1767 
1768 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1769     .type = SAFEXCEL_ALG_TYPE_AEAD,
1770     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1771     .alg.aead = {
1772         .setkey = safexcel_aead_setkey,
1773         .encrypt = safexcel_aead_encrypt,
1774         .decrypt = safexcel_aead_decrypt,
1775         .ivsize = AES_BLOCK_SIZE,
1776         .maxauthsize = SHA256_DIGEST_SIZE,
1777         .base = {
1778             .cra_name = "authenc(hmac(sha256),cbc(aes))",
1779             .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1780             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1781             .cra_flags = CRYPTO_ALG_ASYNC |
1782                      CRYPTO_ALG_ALLOCATES_MEMORY |
1783                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1784             .cra_blocksize = AES_BLOCK_SIZE,
1785             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1786             .cra_alignmask = 0,
1787             .cra_init = safexcel_aead_sha256_cra_init,
1788             .cra_exit = safexcel_aead_cra_exit,
1789             .cra_module = THIS_MODULE,
1790         },
1791     },
1792 };
1793 
1794 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1795 {
1796     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1797 
1798     safexcel_aead_cra_init(tfm);
1799     ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1800     ctx->state_sz = SHA256_DIGEST_SIZE;
1801     return 0;
1802 }
1803 
1804 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1805     .type = SAFEXCEL_ALG_TYPE_AEAD,
1806     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1807     .alg.aead = {
1808         .setkey = safexcel_aead_setkey,
1809         .encrypt = safexcel_aead_encrypt,
1810         .decrypt = safexcel_aead_decrypt,
1811         .ivsize = AES_BLOCK_SIZE,
1812         .maxauthsize = SHA224_DIGEST_SIZE,
1813         .base = {
1814             .cra_name = "authenc(hmac(sha224),cbc(aes))",
1815             .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1816             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1817             .cra_flags = CRYPTO_ALG_ASYNC |
1818                      CRYPTO_ALG_ALLOCATES_MEMORY |
1819                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1820             .cra_blocksize = AES_BLOCK_SIZE,
1821             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1822             .cra_alignmask = 0,
1823             .cra_init = safexcel_aead_sha224_cra_init,
1824             .cra_exit = safexcel_aead_cra_exit,
1825             .cra_module = THIS_MODULE,
1826         },
1827     },
1828 };
1829 
1830 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1831 {
1832     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1833 
1834     safexcel_aead_cra_init(tfm);
1835     ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1836     ctx->state_sz = SHA512_DIGEST_SIZE;
1837     return 0;
1838 }
1839 
1840 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1841     .type = SAFEXCEL_ALG_TYPE_AEAD,
1842     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1843     .alg.aead = {
1844         .setkey = safexcel_aead_setkey,
1845         .encrypt = safexcel_aead_encrypt,
1846         .decrypt = safexcel_aead_decrypt,
1847         .ivsize = AES_BLOCK_SIZE,
1848         .maxauthsize = SHA512_DIGEST_SIZE,
1849         .base = {
1850             .cra_name = "authenc(hmac(sha512),cbc(aes))",
1851             .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1852             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1853             .cra_flags = CRYPTO_ALG_ASYNC |
1854                      CRYPTO_ALG_ALLOCATES_MEMORY |
1855                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1856             .cra_blocksize = AES_BLOCK_SIZE,
1857             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1858             .cra_alignmask = 0,
1859             .cra_init = safexcel_aead_sha512_cra_init,
1860             .cra_exit = safexcel_aead_cra_exit,
1861             .cra_module = THIS_MODULE,
1862         },
1863     },
1864 };
1865 
1866 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1867 {
1868     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1869 
1870     safexcel_aead_cra_init(tfm);
1871     ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1872     ctx->state_sz = SHA512_DIGEST_SIZE;
1873     return 0;
1874 }
1875 
1876 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1877     .type = SAFEXCEL_ALG_TYPE_AEAD,
1878     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1879     .alg.aead = {
1880         .setkey = safexcel_aead_setkey,
1881         .encrypt = safexcel_aead_encrypt,
1882         .decrypt = safexcel_aead_decrypt,
1883         .ivsize = AES_BLOCK_SIZE,
1884         .maxauthsize = SHA384_DIGEST_SIZE,
1885         .base = {
1886             .cra_name = "authenc(hmac(sha384),cbc(aes))",
1887             .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1888             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1889             .cra_flags = CRYPTO_ALG_ASYNC |
1890                      CRYPTO_ALG_ALLOCATES_MEMORY |
1891                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1892             .cra_blocksize = AES_BLOCK_SIZE,
1893             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1894             .cra_alignmask = 0,
1895             .cra_init = safexcel_aead_sha384_cra_init,
1896             .cra_exit = safexcel_aead_cra_exit,
1897             .cra_module = THIS_MODULE,
1898         },
1899     },
1900 };
1901 
1902 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1903 {
1904     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1905 
1906     safexcel_aead_sha1_cra_init(tfm);
1907     ctx->alg = SAFEXCEL_3DES; /* override default */
1908     ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1909     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1910     return 0;
1911 }
1912 
1913 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1914     .type = SAFEXCEL_ALG_TYPE_AEAD,
1915     .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1916     .alg.aead = {
1917         .setkey = safexcel_aead_setkey,
1918         .encrypt = safexcel_aead_encrypt,
1919         .decrypt = safexcel_aead_decrypt,
1920         .ivsize = DES3_EDE_BLOCK_SIZE,
1921         .maxauthsize = SHA1_DIGEST_SIZE,
1922         .base = {
1923             .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1924             .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1925             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1926             .cra_flags = CRYPTO_ALG_ASYNC |
1927                      CRYPTO_ALG_ALLOCATES_MEMORY |
1928                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1929             .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1930             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1931             .cra_alignmask = 0,
1932             .cra_init = safexcel_aead_sha1_des3_cra_init,
1933             .cra_exit = safexcel_aead_cra_exit,
1934             .cra_module = THIS_MODULE,
1935         },
1936     },
1937 };
1938 
1939 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1940 {
1941     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1942 
1943     safexcel_aead_sha256_cra_init(tfm);
1944     ctx->alg = SAFEXCEL_3DES; /* override default */
1945     ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1946     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1947     return 0;
1948 }
1949 
1950 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1951     .type = SAFEXCEL_ALG_TYPE_AEAD,
1952     .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1953     .alg.aead = {
1954         .setkey = safexcel_aead_setkey,
1955         .encrypt = safexcel_aead_encrypt,
1956         .decrypt = safexcel_aead_decrypt,
1957         .ivsize = DES3_EDE_BLOCK_SIZE,
1958         .maxauthsize = SHA256_DIGEST_SIZE,
1959         .base = {
1960             .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1961             .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1962             .cra_priority = SAFEXCEL_CRA_PRIORITY,
1963             .cra_flags = CRYPTO_ALG_ASYNC |
1964                      CRYPTO_ALG_ALLOCATES_MEMORY |
1965                      CRYPTO_ALG_KERN_DRIVER_ONLY,
1966             .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1967             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1968             .cra_alignmask = 0,
1969             .cra_init = safexcel_aead_sha256_des3_cra_init,
1970             .cra_exit = safexcel_aead_cra_exit,
1971             .cra_module = THIS_MODULE,
1972         },
1973     },
1974 };
1975 
1976 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1977 {
1978     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1979 
1980     safexcel_aead_sha224_cra_init(tfm);
1981     ctx->alg = SAFEXCEL_3DES; /* override default */
1982     ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1983     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1984     return 0;
1985 }
1986 
1987 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1988     .type = SAFEXCEL_ALG_TYPE_AEAD,
1989     .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1990     .alg.aead = {
1991         .setkey = safexcel_aead_setkey,
1992         .encrypt = safexcel_aead_encrypt,
1993         .decrypt = safexcel_aead_decrypt,
1994         .ivsize = DES3_EDE_BLOCK_SIZE,
1995         .maxauthsize = SHA224_DIGEST_SIZE,
1996         .base = {
1997             .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1998             .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1999             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2000             .cra_flags = CRYPTO_ALG_ASYNC |
2001                      CRYPTO_ALG_ALLOCATES_MEMORY |
2002                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2003             .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2004             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2005             .cra_alignmask = 0,
2006             .cra_init = safexcel_aead_sha224_des3_cra_init,
2007             .cra_exit = safexcel_aead_cra_exit,
2008             .cra_module = THIS_MODULE,
2009         },
2010     },
2011 };
2012 
2013 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2014 {
2015     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2016 
2017     safexcel_aead_sha512_cra_init(tfm);
2018     ctx->alg = SAFEXCEL_3DES; /* override default */
2019     ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2020     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2021     return 0;
2022 }
2023 
2024 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2025     .type = SAFEXCEL_ALG_TYPE_AEAD,
2026     .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2027     .alg.aead = {
2028         .setkey = safexcel_aead_setkey,
2029         .encrypt = safexcel_aead_encrypt,
2030         .decrypt = safexcel_aead_decrypt,
2031         .ivsize = DES3_EDE_BLOCK_SIZE,
2032         .maxauthsize = SHA512_DIGEST_SIZE,
2033         .base = {
2034             .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2035             .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2036             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2037             .cra_flags = CRYPTO_ALG_ASYNC |
2038                      CRYPTO_ALG_ALLOCATES_MEMORY |
2039                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2040             .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2041             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2042             .cra_alignmask = 0,
2043             .cra_init = safexcel_aead_sha512_des3_cra_init,
2044             .cra_exit = safexcel_aead_cra_exit,
2045             .cra_module = THIS_MODULE,
2046         },
2047     },
2048 };
2049 
2050 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2051 {
2052     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2053 
2054     safexcel_aead_sha384_cra_init(tfm);
2055     ctx->alg = SAFEXCEL_3DES; /* override default */
2056     ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2057     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2058     return 0;
2059 }
2060 
2061 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2062     .type = SAFEXCEL_ALG_TYPE_AEAD,
2063     .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2064     .alg.aead = {
2065         .setkey = safexcel_aead_setkey,
2066         .encrypt = safexcel_aead_encrypt,
2067         .decrypt = safexcel_aead_decrypt,
2068         .ivsize = DES3_EDE_BLOCK_SIZE,
2069         .maxauthsize = SHA384_DIGEST_SIZE,
2070         .base = {
2071             .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2072             .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2073             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2074             .cra_flags = CRYPTO_ALG_ASYNC |
2075                      CRYPTO_ALG_ALLOCATES_MEMORY |
2076                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2077             .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2078             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2079             .cra_alignmask = 0,
2080             .cra_init = safexcel_aead_sha384_des3_cra_init,
2081             .cra_exit = safexcel_aead_cra_exit,
2082             .cra_module = THIS_MODULE,
2083         },
2084     },
2085 };
2086 
2087 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2088 {
2089     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2090 
2091     safexcel_aead_sha1_cra_init(tfm);
2092     ctx->alg = SAFEXCEL_DES; /* override default */
2093     ctx->blocksz = DES_BLOCK_SIZE;
2094     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2095     return 0;
2096 }
2097 
2098 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2099     .type = SAFEXCEL_ALG_TYPE_AEAD,
2100     .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2101     .alg.aead = {
2102         .setkey = safexcel_aead_setkey,
2103         .encrypt = safexcel_aead_encrypt,
2104         .decrypt = safexcel_aead_decrypt,
2105         .ivsize = DES_BLOCK_SIZE,
2106         .maxauthsize = SHA1_DIGEST_SIZE,
2107         .base = {
2108             .cra_name = "authenc(hmac(sha1),cbc(des))",
2109             .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2110             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2111             .cra_flags = CRYPTO_ALG_ASYNC |
2112                      CRYPTO_ALG_ALLOCATES_MEMORY |
2113                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2114             .cra_blocksize = DES_BLOCK_SIZE,
2115             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2116             .cra_alignmask = 0,
2117             .cra_init = safexcel_aead_sha1_des_cra_init,
2118             .cra_exit = safexcel_aead_cra_exit,
2119             .cra_module = THIS_MODULE,
2120         },
2121     },
2122 };
2123 
2124 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2125 {
2126     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2127 
2128     safexcel_aead_sha256_cra_init(tfm);
2129     ctx->alg = SAFEXCEL_DES; /* override default */
2130     ctx->blocksz = DES_BLOCK_SIZE;
2131     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2132     return 0;
2133 }
2134 
2135 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2136     .type = SAFEXCEL_ALG_TYPE_AEAD,
2137     .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2138     .alg.aead = {
2139         .setkey = safexcel_aead_setkey,
2140         .encrypt = safexcel_aead_encrypt,
2141         .decrypt = safexcel_aead_decrypt,
2142         .ivsize = DES_BLOCK_SIZE,
2143         .maxauthsize = SHA256_DIGEST_SIZE,
2144         .base = {
2145             .cra_name = "authenc(hmac(sha256),cbc(des))",
2146             .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2147             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2148             .cra_flags = CRYPTO_ALG_ASYNC |
2149                      CRYPTO_ALG_ALLOCATES_MEMORY |
2150                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2151             .cra_blocksize = DES_BLOCK_SIZE,
2152             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2153             .cra_alignmask = 0,
2154             .cra_init = safexcel_aead_sha256_des_cra_init,
2155             .cra_exit = safexcel_aead_cra_exit,
2156             .cra_module = THIS_MODULE,
2157         },
2158     },
2159 };
2160 
2161 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2162 {
2163     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2164 
2165     safexcel_aead_sha224_cra_init(tfm);
2166     ctx->alg = SAFEXCEL_DES; /* override default */
2167     ctx->blocksz = DES_BLOCK_SIZE;
2168     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2169     return 0;
2170 }
2171 
2172 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2173     .type = SAFEXCEL_ALG_TYPE_AEAD,
2174     .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2175     .alg.aead = {
2176         .setkey = safexcel_aead_setkey,
2177         .encrypt = safexcel_aead_encrypt,
2178         .decrypt = safexcel_aead_decrypt,
2179         .ivsize = DES_BLOCK_SIZE,
2180         .maxauthsize = SHA224_DIGEST_SIZE,
2181         .base = {
2182             .cra_name = "authenc(hmac(sha224),cbc(des))",
2183             .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2184             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2185             .cra_flags = CRYPTO_ALG_ASYNC |
2186                      CRYPTO_ALG_ALLOCATES_MEMORY |
2187                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2188             .cra_blocksize = DES_BLOCK_SIZE,
2189             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2190             .cra_alignmask = 0,
2191             .cra_init = safexcel_aead_sha224_des_cra_init,
2192             .cra_exit = safexcel_aead_cra_exit,
2193             .cra_module = THIS_MODULE,
2194         },
2195     },
2196 };
2197 
2198 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2199 {
2200     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2201 
2202     safexcel_aead_sha512_cra_init(tfm);
2203     ctx->alg = SAFEXCEL_DES; /* override default */
2204     ctx->blocksz = DES_BLOCK_SIZE;
2205     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2206     return 0;
2207 }
2208 
2209 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2210     .type = SAFEXCEL_ALG_TYPE_AEAD,
2211     .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2212     .alg.aead = {
2213         .setkey = safexcel_aead_setkey,
2214         .encrypt = safexcel_aead_encrypt,
2215         .decrypt = safexcel_aead_decrypt,
2216         .ivsize = DES_BLOCK_SIZE,
2217         .maxauthsize = SHA512_DIGEST_SIZE,
2218         .base = {
2219             .cra_name = "authenc(hmac(sha512),cbc(des))",
2220             .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2221             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2222             .cra_flags = CRYPTO_ALG_ASYNC |
2223                      CRYPTO_ALG_ALLOCATES_MEMORY |
2224                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2225             .cra_blocksize = DES_BLOCK_SIZE,
2226             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2227             .cra_alignmask = 0,
2228             .cra_init = safexcel_aead_sha512_des_cra_init,
2229             .cra_exit = safexcel_aead_cra_exit,
2230             .cra_module = THIS_MODULE,
2231         },
2232     },
2233 };
2234 
2235 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2236 {
2237     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2238 
2239     safexcel_aead_sha384_cra_init(tfm);
2240     ctx->alg = SAFEXCEL_DES; /* override default */
2241     ctx->blocksz = DES_BLOCK_SIZE;
2242     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2243     return 0;
2244 }
2245 
2246 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2247     .type = SAFEXCEL_ALG_TYPE_AEAD,
2248     .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2249     .alg.aead = {
2250         .setkey = safexcel_aead_setkey,
2251         .encrypt = safexcel_aead_encrypt,
2252         .decrypt = safexcel_aead_decrypt,
2253         .ivsize = DES_BLOCK_SIZE,
2254         .maxauthsize = SHA384_DIGEST_SIZE,
2255         .base = {
2256             .cra_name = "authenc(hmac(sha384),cbc(des))",
2257             .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2258             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2259             .cra_flags = CRYPTO_ALG_ASYNC |
2260                      CRYPTO_ALG_ALLOCATES_MEMORY |
2261                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2262             .cra_blocksize = DES_BLOCK_SIZE,
2263             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2264             .cra_alignmask = 0,
2265             .cra_init = safexcel_aead_sha384_des_cra_init,
2266             .cra_exit = safexcel_aead_cra_exit,
2267             .cra_module = THIS_MODULE,
2268         },
2269     },
2270 };
2271 
2272 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2273 {
2274     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2275 
2276     safexcel_aead_sha1_cra_init(tfm);
2277     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2278     return 0;
2279 }
2280 
2281 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2282     .type = SAFEXCEL_ALG_TYPE_AEAD,
2283     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2284     .alg.aead = {
2285         .setkey = safexcel_aead_setkey,
2286         .encrypt = safexcel_aead_encrypt,
2287         .decrypt = safexcel_aead_decrypt,
2288         .ivsize = CTR_RFC3686_IV_SIZE,
2289         .maxauthsize = SHA1_DIGEST_SIZE,
2290         .base = {
2291             .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2292             .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2293             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2294             .cra_flags = CRYPTO_ALG_ASYNC |
2295                      CRYPTO_ALG_ALLOCATES_MEMORY |
2296                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2297             .cra_blocksize = 1,
2298             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2299             .cra_alignmask = 0,
2300             .cra_init = safexcel_aead_sha1_ctr_cra_init,
2301             .cra_exit = safexcel_aead_cra_exit,
2302             .cra_module = THIS_MODULE,
2303         },
2304     },
2305 };
2306 
2307 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2308 {
2309     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2310 
2311     safexcel_aead_sha256_cra_init(tfm);
2312     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2313     return 0;
2314 }
2315 
2316 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2317     .type = SAFEXCEL_ALG_TYPE_AEAD,
2318     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2319     .alg.aead = {
2320         .setkey = safexcel_aead_setkey,
2321         .encrypt = safexcel_aead_encrypt,
2322         .decrypt = safexcel_aead_decrypt,
2323         .ivsize = CTR_RFC3686_IV_SIZE,
2324         .maxauthsize = SHA256_DIGEST_SIZE,
2325         .base = {
2326             .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2327             .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2328             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2329             .cra_flags = CRYPTO_ALG_ASYNC |
2330                      CRYPTO_ALG_ALLOCATES_MEMORY |
2331                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2332             .cra_blocksize = 1,
2333             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2334             .cra_alignmask = 0,
2335             .cra_init = safexcel_aead_sha256_ctr_cra_init,
2336             .cra_exit = safexcel_aead_cra_exit,
2337             .cra_module = THIS_MODULE,
2338         },
2339     },
2340 };
2341 
2342 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2343 {
2344     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2345 
2346     safexcel_aead_sha224_cra_init(tfm);
2347     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2348     return 0;
2349 }
2350 
2351 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2352     .type = SAFEXCEL_ALG_TYPE_AEAD,
2353     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2354     .alg.aead = {
2355         .setkey = safexcel_aead_setkey,
2356         .encrypt = safexcel_aead_encrypt,
2357         .decrypt = safexcel_aead_decrypt,
2358         .ivsize = CTR_RFC3686_IV_SIZE,
2359         .maxauthsize = SHA224_DIGEST_SIZE,
2360         .base = {
2361             .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2362             .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2363             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2364             .cra_flags = CRYPTO_ALG_ASYNC |
2365                      CRYPTO_ALG_ALLOCATES_MEMORY |
2366                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2367             .cra_blocksize = 1,
2368             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2369             .cra_alignmask = 0,
2370             .cra_init = safexcel_aead_sha224_ctr_cra_init,
2371             .cra_exit = safexcel_aead_cra_exit,
2372             .cra_module = THIS_MODULE,
2373         },
2374     },
2375 };
2376 
2377 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2378 {
2379     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2380 
2381     safexcel_aead_sha512_cra_init(tfm);
2382     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2383     return 0;
2384 }
2385 
2386 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2387     .type = SAFEXCEL_ALG_TYPE_AEAD,
2388     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2389     .alg.aead = {
2390         .setkey = safexcel_aead_setkey,
2391         .encrypt = safexcel_aead_encrypt,
2392         .decrypt = safexcel_aead_decrypt,
2393         .ivsize = CTR_RFC3686_IV_SIZE,
2394         .maxauthsize = SHA512_DIGEST_SIZE,
2395         .base = {
2396             .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2397             .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2398             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2399             .cra_flags = CRYPTO_ALG_ASYNC |
2400                      CRYPTO_ALG_ALLOCATES_MEMORY |
2401                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2402             .cra_blocksize = 1,
2403             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2404             .cra_alignmask = 0,
2405             .cra_init = safexcel_aead_sha512_ctr_cra_init,
2406             .cra_exit = safexcel_aead_cra_exit,
2407             .cra_module = THIS_MODULE,
2408         },
2409     },
2410 };
2411 
2412 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2413 {
2414     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2415 
2416     safexcel_aead_sha384_cra_init(tfm);
2417     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2418     return 0;
2419 }
2420 
2421 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2422     .type = SAFEXCEL_ALG_TYPE_AEAD,
2423     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2424     .alg.aead = {
2425         .setkey = safexcel_aead_setkey,
2426         .encrypt = safexcel_aead_encrypt,
2427         .decrypt = safexcel_aead_decrypt,
2428         .ivsize = CTR_RFC3686_IV_SIZE,
2429         .maxauthsize = SHA384_DIGEST_SIZE,
2430         .base = {
2431             .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2432             .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2433             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2434             .cra_flags = CRYPTO_ALG_ASYNC |
2435                      CRYPTO_ALG_ALLOCATES_MEMORY |
2436                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2437             .cra_blocksize = 1,
2438             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2439             .cra_alignmask = 0,
2440             .cra_init = safexcel_aead_sha384_ctr_cra_init,
2441             .cra_exit = safexcel_aead_cra_exit,
2442             .cra_module = THIS_MODULE,
2443         },
2444     },
2445 };
2446 
2447 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2448                        const u8 *key, unsigned int len)
2449 {
2450     struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2451     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2452     struct safexcel_crypto_priv *priv = ctx->base.priv;
2453     struct crypto_aes_ctx aes;
2454     int ret, i;
2455     unsigned int keylen;
2456 
2457     /* Check for illegal XTS keys */
2458     ret = xts_verify_key(ctfm, key, len);
2459     if (ret)
2460         return ret;
2461 
2462     /* Only half of the key data is cipher key */
2463     keylen = (len >> 1);
2464     ret = aes_expandkey(&aes, key, keylen);
2465     if (ret)
2466         return ret;
2467 
2468     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2469         for (i = 0; i < keylen / sizeof(u32); i++) {
2470             if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2471                 ctx->base.needs_inv = true;
2472                 break;
2473             }
2474         }
2475     }
2476 
2477     for (i = 0; i < keylen / sizeof(u32); i++)
2478         ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2479 
2480     /* The other half is the tweak key */
2481     ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2482     if (ret)
2483         return ret;
2484 
2485     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2486         for (i = 0; i < keylen / sizeof(u32); i++) {
2487             if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2488                 aes.key_enc[i]) {
2489                 ctx->base.needs_inv = true;
2490                 break;
2491             }
2492         }
2493     }
2494 
2495     for (i = 0; i < keylen / sizeof(u32); i++)
2496         ctx->key[i + keylen / sizeof(u32)] =
2497             cpu_to_le32(aes.key_enc[i]);
2498 
2499     ctx->key_len = keylen << 1;
2500 
2501     memzero_explicit(&aes, sizeof(aes));
2502     return 0;
2503 }
2504 
2505 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2506 {
2507     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2508 
2509     safexcel_skcipher_cra_init(tfm);
2510     ctx->alg  = SAFEXCEL_AES;
2511     ctx->blocksz = AES_BLOCK_SIZE;
2512     ctx->xts  = 1;
2513     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2514     return 0;
2515 }
2516 
2517 static int safexcel_encrypt_xts(struct skcipher_request *req)
2518 {
2519     if (req->cryptlen < XTS_BLOCK_SIZE)
2520         return -EINVAL;
2521     return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2522                   SAFEXCEL_ENCRYPT);
2523 }
2524 
2525 static int safexcel_decrypt_xts(struct skcipher_request *req)
2526 {
2527     if (req->cryptlen < XTS_BLOCK_SIZE)
2528         return -EINVAL;
2529     return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2530                   SAFEXCEL_DECRYPT);
2531 }
2532 
2533 struct safexcel_alg_template safexcel_alg_xts_aes = {
2534     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2535     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2536     .alg.skcipher = {
2537         .setkey = safexcel_skcipher_aesxts_setkey,
2538         .encrypt = safexcel_encrypt_xts,
2539         .decrypt = safexcel_decrypt_xts,
2540         /* XTS actually uses 2 AES keys glued together */
2541         .min_keysize = AES_MIN_KEY_SIZE * 2,
2542         .max_keysize = AES_MAX_KEY_SIZE * 2,
2543         .ivsize = XTS_BLOCK_SIZE,
2544         .base = {
2545             .cra_name = "xts(aes)",
2546             .cra_driver_name = "safexcel-xts-aes",
2547             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2548             .cra_flags = CRYPTO_ALG_ASYNC |
2549                      CRYPTO_ALG_ALLOCATES_MEMORY |
2550                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2551             .cra_blocksize = XTS_BLOCK_SIZE,
2552             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2553             .cra_alignmask = 0,
2554             .cra_init = safexcel_skcipher_aes_xts_cra_init,
2555             .cra_exit = safexcel_skcipher_cra_exit,
2556             .cra_module = THIS_MODULE,
2557         },
2558     },
2559 };
2560 
2561 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2562                     unsigned int len)
2563 {
2564     struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2565     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2566     struct safexcel_crypto_priv *priv = ctx->base.priv;
2567     struct crypto_aes_ctx aes;
2568     u32 hashkey[AES_BLOCK_SIZE >> 2];
2569     int ret, i;
2570 
2571     ret = aes_expandkey(&aes, key, len);
2572     if (ret) {
2573         memzero_explicit(&aes, sizeof(aes));
2574         return ret;
2575     }
2576 
2577     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2578         for (i = 0; i < len / sizeof(u32); i++) {
2579             if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2580                 ctx->base.needs_inv = true;
2581                 break;
2582             }
2583         }
2584     }
2585 
2586     for (i = 0; i < len / sizeof(u32); i++)
2587         ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2588 
2589     ctx->key_len = len;
2590 
2591     /* Compute hash key by encrypting zeroes with cipher key */
2592     crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2593     crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2594                 CRYPTO_TFM_REQ_MASK);
2595     ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2596     if (ret)
2597         return ret;
2598 
2599     memset(hashkey, 0, AES_BLOCK_SIZE);
2600     crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2601 
2602     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2603         for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2604             if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2605                 ctx->base.needs_inv = true;
2606                 break;
2607             }
2608         }
2609     }
2610 
2611     for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2612         ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2613 
2614     memzero_explicit(hashkey, AES_BLOCK_SIZE);
2615     memzero_explicit(&aes, sizeof(aes));
2616     return 0;
2617 }
2618 
2619 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2620 {
2621     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2622 
2623     safexcel_aead_cra_init(tfm);
2624     ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2625     ctx->state_sz = GHASH_BLOCK_SIZE;
2626     ctx->xcm = EIP197_XCM_MODE_GCM;
2627     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2628 
2629     ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2630     return PTR_ERR_OR_ZERO(ctx->hkaes);
2631 }
2632 
2633 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2634 {
2635     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2636 
2637     crypto_free_cipher(ctx->hkaes);
2638     safexcel_aead_cra_exit(tfm);
2639 }
2640 
2641 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2642                      unsigned int authsize)
2643 {
2644     return crypto_gcm_check_authsize(authsize);
2645 }
2646 
2647 struct safexcel_alg_template safexcel_alg_gcm = {
2648     .type = SAFEXCEL_ALG_TYPE_AEAD,
2649     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2650     .alg.aead = {
2651         .setkey = safexcel_aead_gcm_setkey,
2652         .setauthsize = safexcel_aead_gcm_setauthsize,
2653         .encrypt = safexcel_aead_encrypt,
2654         .decrypt = safexcel_aead_decrypt,
2655         .ivsize = GCM_AES_IV_SIZE,
2656         .maxauthsize = GHASH_DIGEST_SIZE,
2657         .base = {
2658             .cra_name = "gcm(aes)",
2659             .cra_driver_name = "safexcel-gcm-aes",
2660             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2661             .cra_flags = CRYPTO_ALG_ASYNC |
2662                      CRYPTO_ALG_ALLOCATES_MEMORY |
2663                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2664             .cra_blocksize = 1,
2665             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2666             .cra_alignmask = 0,
2667             .cra_init = safexcel_aead_gcm_cra_init,
2668             .cra_exit = safexcel_aead_gcm_cra_exit,
2669             .cra_module = THIS_MODULE,
2670         },
2671     },
2672 };
2673 
2674 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2675                     unsigned int len)
2676 {
2677     struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2678     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2679     struct safexcel_crypto_priv *priv = ctx->base.priv;
2680     struct crypto_aes_ctx aes;
2681     int ret, i;
2682 
2683     ret = aes_expandkey(&aes, key, len);
2684     if (ret) {
2685         memzero_explicit(&aes, sizeof(aes));
2686         return ret;
2687     }
2688 
2689     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2690         for (i = 0; i < len / sizeof(u32); i++) {
2691             if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2692                 ctx->base.needs_inv = true;
2693                 break;
2694             }
2695         }
2696     }
2697 
2698     for (i = 0; i < len / sizeof(u32); i++) {
2699         ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2700         ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2701             cpu_to_be32(aes.key_enc[i]);
2702     }
2703 
2704     ctx->key_len = len;
2705     ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2706 
2707     if (len == AES_KEYSIZE_192)
2708         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2709     else if (len == AES_KEYSIZE_256)
2710         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2711     else
2712         ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2713 
2714     memzero_explicit(&aes, sizeof(aes));
2715     return 0;
2716 }
2717 
2718 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2719 {
2720     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2721 
2722     safexcel_aead_cra_init(tfm);
2723     ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2724     ctx->state_sz = 3 * AES_BLOCK_SIZE;
2725     ctx->xcm = EIP197_XCM_MODE_CCM;
2726     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2727     ctx->ctrinit = 0;
2728     return 0;
2729 }
2730 
2731 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2732                      unsigned int authsize)
2733 {
2734     /* Borrowed from crypto/ccm.c */
2735     switch (authsize) {
2736     case 4:
2737     case 6:
2738     case 8:
2739     case 10:
2740     case 12:
2741     case 14:
2742     case 16:
2743         break;
2744     default:
2745         return -EINVAL;
2746     }
2747 
2748     return 0;
2749 }
2750 
2751 static int safexcel_ccm_encrypt(struct aead_request *req)
2752 {
2753     struct safexcel_cipher_req *creq = aead_request_ctx(req);
2754 
2755     if (req->iv[0] < 1 || req->iv[0] > 7)
2756         return -EINVAL;
2757 
2758     return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2759 }
2760 
2761 static int safexcel_ccm_decrypt(struct aead_request *req)
2762 {
2763     struct safexcel_cipher_req *creq = aead_request_ctx(req);
2764 
2765     if (req->iv[0] < 1 || req->iv[0] > 7)
2766         return -EINVAL;
2767 
2768     return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2769 }
2770 
2771 struct safexcel_alg_template safexcel_alg_ccm = {
2772     .type = SAFEXCEL_ALG_TYPE_AEAD,
2773     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2774     .alg.aead = {
2775         .setkey = safexcel_aead_ccm_setkey,
2776         .setauthsize = safexcel_aead_ccm_setauthsize,
2777         .encrypt = safexcel_ccm_encrypt,
2778         .decrypt = safexcel_ccm_decrypt,
2779         .ivsize = AES_BLOCK_SIZE,
2780         .maxauthsize = AES_BLOCK_SIZE,
2781         .base = {
2782             .cra_name = "ccm(aes)",
2783             .cra_driver_name = "safexcel-ccm-aes",
2784             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2785             .cra_flags = CRYPTO_ALG_ASYNC |
2786                      CRYPTO_ALG_ALLOCATES_MEMORY |
2787                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2788             .cra_blocksize = 1,
2789             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2790             .cra_alignmask = 0,
2791             .cra_init = safexcel_aead_ccm_cra_init,
2792             .cra_exit = safexcel_aead_cra_exit,
2793             .cra_module = THIS_MODULE,
2794         },
2795     },
2796 };
2797 
2798 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2799                      const u8 *key)
2800 {
2801     struct safexcel_crypto_priv *priv = ctx->base.priv;
2802 
2803     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2804         if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2805             ctx->base.needs_inv = true;
2806 
2807     memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2808     ctx->key_len = CHACHA_KEY_SIZE;
2809 }
2810 
2811 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2812                          const u8 *key, unsigned int len)
2813 {
2814     struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2815 
2816     if (len != CHACHA_KEY_SIZE)
2817         return -EINVAL;
2818 
2819     safexcel_chacha20_setkey(ctx, key);
2820 
2821     return 0;
2822 }
2823 
2824 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2825 {
2826     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2827 
2828     safexcel_skcipher_cra_init(tfm);
2829     ctx->alg  = SAFEXCEL_CHACHA20;
2830     ctx->ctrinit = 0;
2831     ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2832     return 0;
2833 }
2834 
2835 struct safexcel_alg_template safexcel_alg_chacha20 = {
2836     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2837     .algo_mask = SAFEXCEL_ALG_CHACHA20,
2838     .alg.skcipher = {
2839         .setkey = safexcel_skcipher_chacha20_setkey,
2840         .encrypt = safexcel_encrypt,
2841         .decrypt = safexcel_decrypt,
2842         .min_keysize = CHACHA_KEY_SIZE,
2843         .max_keysize = CHACHA_KEY_SIZE,
2844         .ivsize = CHACHA_IV_SIZE,
2845         .base = {
2846             .cra_name = "chacha20",
2847             .cra_driver_name = "safexcel-chacha20",
2848             .cra_priority = SAFEXCEL_CRA_PRIORITY,
2849             .cra_flags = CRYPTO_ALG_ASYNC |
2850                      CRYPTO_ALG_ALLOCATES_MEMORY |
2851                      CRYPTO_ALG_KERN_DRIVER_ONLY,
2852             .cra_blocksize = 1,
2853             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2854             .cra_alignmask = 0,
2855             .cra_init = safexcel_skcipher_chacha20_cra_init,
2856             .cra_exit = safexcel_skcipher_cra_exit,
2857             .cra_module = THIS_MODULE,
2858         },
2859     },
2860 };
2861 
2862 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2863                     const u8 *key, unsigned int len)
2864 {
2865     struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2866 
2867     if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2868         len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2869         /* ESP variant has nonce appended to key */
2870         len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2871         ctx->nonce = *(u32 *)(key + len);
2872     }
2873     if (len != CHACHA_KEY_SIZE)
2874         return -EINVAL;
2875 
2876     safexcel_chacha20_setkey(ctx, key);
2877 
2878     return 0;
2879 }
2880 
2881 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2882                      unsigned int authsize)
2883 {
2884     if (authsize != POLY1305_DIGEST_SIZE)
2885         return -EINVAL;
2886     return 0;
2887 }
2888 
2889 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2890                       enum safexcel_cipher_direction dir)
2891 {
2892     struct safexcel_cipher_req *creq = aead_request_ctx(req);
2893     struct crypto_aead *aead = crypto_aead_reqtfm(req);
2894     struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2895     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2896     struct aead_request *subreq = aead_request_ctx(req);
2897     u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2898     int ret = 0;
2899 
2900     /*
2901      * Instead of wasting time detecting umpteen silly corner cases,
2902      * just dump all "small" requests to the fallback implementation.
2903      * HW would not be faster on such small requests anyway.
2904      */
2905     if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2906             req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2907            req->cryptlen > POLY1305_DIGEST_SIZE)) {
2908         return safexcel_queue_req(&req->base, creq, dir);
2909     }
2910 
2911     /* HW cannot do full (AAD+payload) zero length, use fallback */
2912     memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2913     if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2914         /* ESP variant has nonce appended to the key */
2915         key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2916         ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2917                      CHACHA_KEY_SIZE +
2918                      EIP197_AEAD_IPSEC_NONCE_SIZE);
2919     } else {
2920         ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2921                      CHACHA_KEY_SIZE);
2922     }
2923     if (ret) {
2924         crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2925         crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2926                         CRYPTO_TFM_REQ_MASK);
2927         return ret;
2928     }
2929 
2930     aead_request_set_tfm(subreq, ctx->fback);
2931     aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2932                   req->base.data);
2933     aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2934                    req->iv);
2935     aead_request_set_ad(subreq, req->assoclen);
2936 
2937     return (dir ==  SAFEXCEL_ENCRYPT) ?
2938         crypto_aead_encrypt(subreq) :
2939         crypto_aead_decrypt(subreq);
2940 }
2941 
2942 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2943 {
2944     return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2945 }
2946 
2947 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2948 {
2949     return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2950 }
2951 
2952 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2953 {
2954     struct crypto_aead *aead = __crypto_aead_cast(tfm);
2955     struct aead_alg *alg = crypto_aead_alg(aead);
2956     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2957 
2958     safexcel_aead_cra_init(tfm);
2959 
2960     /* Allocate fallback implementation */
2961     ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2962                        CRYPTO_ALG_ASYNC |
2963                        CRYPTO_ALG_NEED_FALLBACK);
2964     if (IS_ERR(ctx->fback))
2965         return PTR_ERR(ctx->fback);
2966 
2967     crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2968                       sizeof(struct aead_request) +
2969                       crypto_aead_reqsize(ctx->fback)));
2970 
2971     return 0;
2972 }
2973 
2974 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2975 {
2976     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2977 
2978     safexcel_aead_fallback_cra_init(tfm);
2979     ctx->alg  = SAFEXCEL_CHACHA20;
2980     ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2981             CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2982     ctx->ctrinit = 0;
2983     ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2984     ctx->state_sz = 0; /* Precomputed by HW */
2985     return 0;
2986 }
2987 
2988 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2989 {
2990     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2991 
2992     crypto_free_aead(ctx->fback);
2993     safexcel_aead_cra_exit(tfm);
2994 }
2995 
2996 struct safexcel_alg_template safexcel_alg_chachapoly = {
2997     .type = SAFEXCEL_ALG_TYPE_AEAD,
2998     .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2999     .alg.aead = {
3000         .setkey = safexcel_aead_chachapoly_setkey,
3001         .setauthsize = safexcel_aead_chachapoly_setauthsize,
3002         .encrypt = safexcel_aead_chachapoly_encrypt,
3003         .decrypt = safexcel_aead_chachapoly_decrypt,
3004         .ivsize = CHACHAPOLY_IV_SIZE,
3005         .maxauthsize = POLY1305_DIGEST_SIZE,
3006         .base = {
3007             .cra_name = "rfc7539(chacha20,poly1305)",
3008             .cra_driver_name = "safexcel-chacha20-poly1305",
3009             /* +1 to put it above HW chacha + SW poly */
3010             .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3011             .cra_flags = CRYPTO_ALG_ASYNC |
3012                      CRYPTO_ALG_ALLOCATES_MEMORY |
3013                      CRYPTO_ALG_KERN_DRIVER_ONLY |
3014                      CRYPTO_ALG_NEED_FALLBACK,
3015             .cra_blocksize = 1,
3016             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3017             .cra_alignmask = 0,
3018             .cra_init = safexcel_aead_chachapoly_cra_init,
3019             .cra_exit = safexcel_aead_fallback_cra_exit,
3020             .cra_module = THIS_MODULE,
3021         },
3022     },
3023 };
3024 
3025 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3026 {
3027     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3028     int ret;
3029 
3030     ret = safexcel_aead_chachapoly_cra_init(tfm);
3031     ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3032     ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3033     return ret;
3034 }
3035 
3036 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3037     .type = SAFEXCEL_ALG_TYPE_AEAD,
3038     .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3039     .alg.aead = {
3040         .setkey = safexcel_aead_chachapoly_setkey,
3041         .setauthsize = safexcel_aead_chachapoly_setauthsize,
3042         .encrypt = safexcel_aead_chachapoly_encrypt,
3043         .decrypt = safexcel_aead_chachapoly_decrypt,
3044         .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3045         .maxauthsize = POLY1305_DIGEST_SIZE,
3046         .base = {
3047             .cra_name = "rfc7539esp(chacha20,poly1305)",
3048             .cra_driver_name = "safexcel-chacha20-poly1305-esp",
3049             /* +1 to put it above HW chacha + SW poly */
3050             .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3051             .cra_flags = CRYPTO_ALG_ASYNC |
3052                      CRYPTO_ALG_ALLOCATES_MEMORY |
3053                      CRYPTO_ALG_KERN_DRIVER_ONLY |
3054                      CRYPTO_ALG_NEED_FALLBACK,
3055             .cra_blocksize = 1,
3056             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3057             .cra_alignmask = 0,
3058             .cra_init = safexcel_aead_chachapolyesp_cra_init,
3059             .cra_exit = safexcel_aead_fallback_cra_exit,
3060             .cra_module = THIS_MODULE,
3061         },
3062     },
3063 };
3064 
3065 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3066                     const u8 *key, unsigned int len)
3067 {
3068     struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3069     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3070     struct safexcel_crypto_priv *priv = ctx->base.priv;
3071 
3072     if (len != SM4_KEY_SIZE)
3073         return -EINVAL;
3074 
3075     if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3076         if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3077             ctx->base.needs_inv = true;
3078 
3079     memcpy(ctx->key, key, SM4_KEY_SIZE);
3080     ctx->key_len = SM4_KEY_SIZE;
3081 
3082     return 0;
3083 }
3084 
3085 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3086 {
3087     /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3088     if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3089         return -EINVAL;
3090     else
3091         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3092                       SAFEXCEL_ENCRYPT);
3093 }
3094 
3095 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3096 {
3097     /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3098     if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3099         return -EINVAL;
3100     else
3101         return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3102                       SAFEXCEL_DECRYPT);
3103 }
3104 
3105 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3106 {
3107     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3108 
3109     safexcel_skcipher_cra_init(tfm);
3110     ctx->alg  = SAFEXCEL_SM4;
3111     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3112     ctx->blocksz = 0;
3113     ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3114     return 0;
3115 }
3116 
3117 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3118     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3119     .algo_mask = SAFEXCEL_ALG_SM4,
3120     .alg.skcipher = {
3121         .setkey = safexcel_skcipher_sm4_setkey,
3122         .encrypt = safexcel_sm4_blk_encrypt,
3123         .decrypt = safexcel_sm4_blk_decrypt,
3124         .min_keysize = SM4_KEY_SIZE,
3125         .max_keysize = SM4_KEY_SIZE,
3126         .base = {
3127             .cra_name = "ecb(sm4)",
3128             .cra_driver_name = "safexcel-ecb-sm4",
3129             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3130             .cra_flags = CRYPTO_ALG_ASYNC |
3131                      CRYPTO_ALG_ALLOCATES_MEMORY |
3132                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3133             .cra_blocksize = SM4_BLOCK_SIZE,
3134             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3135             .cra_alignmask = 0,
3136             .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3137             .cra_exit = safexcel_skcipher_cra_exit,
3138             .cra_module = THIS_MODULE,
3139         },
3140     },
3141 };
3142 
3143 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3144 {
3145     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3146 
3147     safexcel_skcipher_cra_init(tfm);
3148     ctx->alg  = SAFEXCEL_SM4;
3149     ctx->blocksz = SM4_BLOCK_SIZE;
3150     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3151     return 0;
3152 }
3153 
3154 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3155     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3156     .algo_mask = SAFEXCEL_ALG_SM4,
3157     .alg.skcipher = {
3158         .setkey = safexcel_skcipher_sm4_setkey,
3159         .encrypt = safexcel_sm4_blk_encrypt,
3160         .decrypt = safexcel_sm4_blk_decrypt,
3161         .min_keysize = SM4_KEY_SIZE,
3162         .max_keysize = SM4_KEY_SIZE,
3163         .ivsize = SM4_BLOCK_SIZE,
3164         .base = {
3165             .cra_name = "cbc(sm4)",
3166             .cra_driver_name = "safexcel-cbc-sm4",
3167             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3168             .cra_flags = CRYPTO_ALG_ASYNC |
3169                      CRYPTO_ALG_ALLOCATES_MEMORY |
3170                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3171             .cra_blocksize = SM4_BLOCK_SIZE,
3172             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3173             .cra_alignmask = 0,
3174             .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3175             .cra_exit = safexcel_skcipher_cra_exit,
3176             .cra_module = THIS_MODULE,
3177         },
3178     },
3179 };
3180 
3181 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3182 {
3183     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3184 
3185     safexcel_skcipher_cra_init(tfm);
3186     ctx->alg  = SAFEXCEL_SM4;
3187     ctx->blocksz = SM4_BLOCK_SIZE;
3188     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3189     return 0;
3190 }
3191 
3192 struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3193     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3194     .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3195     .alg.skcipher = {
3196         .setkey = safexcel_skcipher_sm4_setkey,
3197         .encrypt = safexcel_encrypt,
3198         .decrypt = safexcel_decrypt,
3199         .min_keysize = SM4_KEY_SIZE,
3200         .max_keysize = SM4_KEY_SIZE,
3201         .ivsize = SM4_BLOCK_SIZE,
3202         .base = {
3203             .cra_name = "ofb(sm4)",
3204             .cra_driver_name = "safexcel-ofb-sm4",
3205             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3206             .cra_flags = CRYPTO_ALG_ASYNC |
3207                      CRYPTO_ALG_ALLOCATES_MEMORY |
3208                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3209             .cra_blocksize = 1,
3210             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3211             .cra_alignmask = 0,
3212             .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3213             .cra_exit = safexcel_skcipher_cra_exit,
3214             .cra_module = THIS_MODULE,
3215         },
3216     },
3217 };
3218 
3219 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3220 {
3221     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3222 
3223     safexcel_skcipher_cra_init(tfm);
3224     ctx->alg  = SAFEXCEL_SM4;
3225     ctx->blocksz = SM4_BLOCK_SIZE;
3226     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3227     return 0;
3228 }
3229 
3230 struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3231     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3232     .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3233     .alg.skcipher = {
3234         .setkey = safexcel_skcipher_sm4_setkey,
3235         .encrypt = safexcel_encrypt,
3236         .decrypt = safexcel_decrypt,
3237         .min_keysize = SM4_KEY_SIZE,
3238         .max_keysize = SM4_KEY_SIZE,
3239         .ivsize = SM4_BLOCK_SIZE,
3240         .base = {
3241             .cra_name = "cfb(sm4)",
3242             .cra_driver_name = "safexcel-cfb-sm4",
3243             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3244             .cra_flags = CRYPTO_ALG_ASYNC |
3245                      CRYPTO_ALG_ALLOCATES_MEMORY |
3246                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3247             .cra_blocksize = 1,
3248             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3249             .cra_alignmask = 0,
3250             .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3251             .cra_exit = safexcel_skcipher_cra_exit,
3252             .cra_module = THIS_MODULE,
3253         },
3254     },
3255 };
3256 
3257 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3258                        const u8 *key, unsigned int len)
3259 {
3260     struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3261     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3262 
3263     /* last 4 bytes of key are the nonce! */
3264     ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3265     /* exclude the nonce here */
3266     len -= CTR_RFC3686_NONCE_SIZE;
3267 
3268     return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3269 }
3270 
3271 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3272 {
3273     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3274 
3275     safexcel_skcipher_cra_init(tfm);
3276     ctx->alg  = SAFEXCEL_SM4;
3277     ctx->blocksz = SM4_BLOCK_SIZE;
3278     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3279     return 0;
3280 }
3281 
3282 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3283     .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3284     .algo_mask = SAFEXCEL_ALG_SM4,
3285     .alg.skcipher = {
3286         .setkey = safexcel_skcipher_sm4ctr_setkey,
3287         .encrypt = safexcel_encrypt,
3288         .decrypt = safexcel_decrypt,
3289         /* Add nonce size */
3290         .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3291         .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3292         .ivsize = CTR_RFC3686_IV_SIZE,
3293         .base = {
3294             .cra_name = "rfc3686(ctr(sm4))",
3295             .cra_driver_name = "safexcel-ctr-sm4",
3296             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3297             .cra_flags = CRYPTO_ALG_ASYNC |
3298                      CRYPTO_ALG_ALLOCATES_MEMORY |
3299                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3300             .cra_blocksize = 1,
3301             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3302             .cra_alignmask = 0,
3303             .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3304             .cra_exit = safexcel_skcipher_cra_exit,
3305             .cra_module = THIS_MODULE,
3306         },
3307     },
3308 };
3309 
3310 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3311 {
3312     /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3313     if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3314         return -EINVAL;
3315 
3316     return safexcel_queue_req(&req->base, aead_request_ctx(req),
3317                   SAFEXCEL_ENCRYPT);
3318 }
3319 
3320 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3321 {
3322     struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3323 
3324     /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3325     if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3326         return -EINVAL;
3327 
3328     return safexcel_queue_req(&req->base, aead_request_ctx(req),
3329                   SAFEXCEL_DECRYPT);
3330 }
3331 
3332 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3333 {
3334     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3335 
3336     safexcel_aead_cra_init(tfm);
3337     ctx->alg = SAFEXCEL_SM4;
3338     ctx->blocksz = SM4_BLOCK_SIZE;
3339     ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3340     ctx->state_sz = SHA1_DIGEST_SIZE;
3341     return 0;
3342 }
3343 
3344 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3345     .type = SAFEXCEL_ALG_TYPE_AEAD,
3346     .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3347     .alg.aead = {
3348         .setkey = safexcel_aead_setkey,
3349         .encrypt = safexcel_aead_sm4_blk_encrypt,
3350         .decrypt = safexcel_aead_sm4_blk_decrypt,
3351         .ivsize = SM4_BLOCK_SIZE,
3352         .maxauthsize = SHA1_DIGEST_SIZE,
3353         .base = {
3354             .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3355             .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3356             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3357             .cra_flags = CRYPTO_ALG_ASYNC |
3358                      CRYPTO_ALG_ALLOCATES_MEMORY |
3359                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3360             .cra_blocksize = SM4_BLOCK_SIZE,
3361             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3362             .cra_alignmask = 0,
3363             .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3364             .cra_exit = safexcel_aead_cra_exit,
3365             .cra_module = THIS_MODULE,
3366         },
3367     },
3368 };
3369 
3370 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3371                      const u8 *key, unsigned int len)
3372 {
3373     struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3374     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3375 
3376     /* Keep fallback cipher synchronized */
3377     return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3378            safexcel_aead_setkey(ctfm, key, len);
3379 }
3380 
3381 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3382                           unsigned int authsize)
3383 {
3384     struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3385     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3386 
3387     /* Keep fallback cipher synchronized */
3388     return crypto_aead_setauthsize(ctx->fback, authsize);
3389 }
3390 
3391 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3392                     enum safexcel_cipher_direction dir)
3393 {
3394     struct crypto_aead *aead = crypto_aead_reqtfm(req);
3395     struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3396     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3397     struct aead_request *subreq = aead_request_ctx(req);
3398 
3399     aead_request_set_tfm(subreq, ctx->fback);
3400     aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3401                   req->base.data);
3402     aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3403                    req->iv);
3404     aead_request_set_ad(subreq, req->assoclen);
3405 
3406     return (dir ==  SAFEXCEL_ENCRYPT) ?
3407         crypto_aead_encrypt(subreq) :
3408         crypto_aead_decrypt(subreq);
3409 }
3410 
3411 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3412 {
3413     struct safexcel_cipher_req *creq = aead_request_ctx(req);
3414 
3415     /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3416     if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3417         return -EINVAL;
3418     else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3419         return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3420 
3421     /* HW cannot do full (AAD+payload) zero length, use fallback */
3422     return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3423 }
3424 
3425 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3426 {
3427     struct safexcel_cipher_req *creq = aead_request_ctx(req);
3428     struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3429 
3430     /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3431     if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3432         return -EINVAL;
3433     else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3434         /* If input length > 0 only */
3435         return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3436 
3437     /* HW cannot do full (AAD+payload) zero length, use fallback */
3438     return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3439 }
3440 
3441 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3442 {
3443     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3444 
3445     safexcel_aead_fallback_cra_init(tfm);
3446     ctx->alg = SAFEXCEL_SM4;
3447     ctx->blocksz = SM4_BLOCK_SIZE;
3448     ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3449     ctx->state_sz = SM3_DIGEST_SIZE;
3450     return 0;
3451 }
3452 
3453 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3454     .type = SAFEXCEL_ALG_TYPE_AEAD,
3455     .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3456     .alg.aead = {
3457         .setkey = safexcel_aead_fallback_setkey,
3458         .setauthsize = safexcel_aead_fallback_setauthsize,
3459         .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3460         .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3461         .ivsize = SM4_BLOCK_SIZE,
3462         .maxauthsize = SM3_DIGEST_SIZE,
3463         .base = {
3464             .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3465             .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3466             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3467             .cra_flags = CRYPTO_ALG_ASYNC |
3468                      CRYPTO_ALG_ALLOCATES_MEMORY |
3469                      CRYPTO_ALG_KERN_DRIVER_ONLY |
3470                      CRYPTO_ALG_NEED_FALLBACK,
3471             .cra_blocksize = SM4_BLOCK_SIZE,
3472             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3473             .cra_alignmask = 0,
3474             .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3475             .cra_exit = safexcel_aead_fallback_cra_exit,
3476             .cra_module = THIS_MODULE,
3477         },
3478     },
3479 };
3480 
3481 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3482 {
3483     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3484 
3485     safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3486     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3487     return 0;
3488 }
3489 
3490 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3491     .type = SAFEXCEL_ALG_TYPE_AEAD,
3492     .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3493     .alg.aead = {
3494         .setkey = safexcel_aead_setkey,
3495         .encrypt = safexcel_aead_encrypt,
3496         .decrypt = safexcel_aead_decrypt,
3497         .ivsize = CTR_RFC3686_IV_SIZE,
3498         .maxauthsize = SHA1_DIGEST_SIZE,
3499         .base = {
3500             .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3501             .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3502             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3503             .cra_flags = CRYPTO_ALG_ASYNC |
3504                      CRYPTO_ALG_ALLOCATES_MEMORY |
3505                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3506             .cra_blocksize = 1,
3507             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3508             .cra_alignmask = 0,
3509             .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3510             .cra_exit = safexcel_aead_cra_exit,
3511             .cra_module = THIS_MODULE,
3512         },
3513     },
3514 };
3515 
3516 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3517 {
3518     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3519 
3520     safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3521     ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3522     return 0;
3523 }
3524 
3525 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3526     .type = SAFEXCEL_ALG_TYPE_AEAD,
3527     .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3528     .alg.aead = {
3529         .setkey = safexcel_aead_setkey,
3530         .encrypt = safexcel_aead_encrypt,
3531         .decrypt = safexcel_aead_decrypt,
3532         .ivsize = CTR_RFC3686_IV_SIZE,
3533         .maxauthsize = SM3_DIGEST_SIZE,
3534         .base = {
3535             .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3536             .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3537             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3538             .cra_flags = CRYPTO_ALG_ASYNC |
3539                      CRYPTO_ALG_ALLOCATES_MEMORY |
3540                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3541             .cra_blocksize = 1,
3542             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3543             .cra_alignmask = 0,
3544             .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3545             .cra_exit = safexcel_aead_cra_exit,
3546             .cra_module = THIS_MODULE,
3547         },
3548     },
3549 };
3550 
3551 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3552                        unsigned int len)
3553 {
3554     struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3555     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3556 
3557     /* last 4 bytes of key are the nonce! */
3558     ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3559 
3560     len -= CTR_RFC3686_NONCE_SIZE;
3561     return safexcel_aead_gcm_setkey(ctfm, key, len);
3562 }
3563 
3564 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3565                         unsigned int authsize)
3566 {
3567     return crypto_rfc4106_check_authsize(authsize);
3568 }
3569 
3570 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3571 {
3572     return crypto_ipsec_check_assoclen(req->assoclen) ?:
3573            safexcel_aead_encrypt(req);
3574 }
3575 
3576 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3577 {
3578     return crypto_ipsec_check_assoclen(req->assoclen) ?:
3579            safexcel_aead_decrypt(req);
3580 }
3581 
3582 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3583 {
3584     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3585     int ret;
3586 
3587     ret = safexcel_aead_gcm_cra_init(tfm);
3588     ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3589     ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3590     return ret;
3591 }
3592 
3593 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3594     .type = SAFEXCEL_ALG_TYPE_AEAD,
3595     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3596     .alg.aead = {
3597         .setkey = safexcel_rfc4106_gcm_setkey,
3598         .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3599         .encrypt = safexcel_rfc4106_encrypt,
3600         .decrypt = safexcel_rfc4106_decrypt,
3601         .ivsize = GCM_RFC4106_IV_SIZE,
3602         .maxauthsize = GHASH_DIGEST_SIZE,
3603         .base = {
3604             .cra_name = "rfc4106(gcm(aes))",
3605             .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3606             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3607             .cra_flags = CRYPTO_ALG_ASYNC |
3608                      CRYPTO_ALG_ALLOCATES_MEMORY |
3609                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3610             .cra_blocksize = 1,
3611             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3612             .cra_alignmask = 0,
3613             .cra_init = safexcel_rfc4106_gcm_cra_init,
3614             .cra_exit = safexcel_aead_gcm_cra_exit,
3615         },
3616     },
3617 };
3618 
3619 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3620                         unsigned int authsize)
3621 {
3622     if (authsize != GHASH_DIGEST_SIZE)
3623         return -EINVAL;
3624 
3625     return 0;
3626 }
3627 
3628 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3629 {
3630     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3631     int ret;
3632 
3633     ret = safexcel_aead_gcm_cra_init(tfm);
3634     ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3635     return ret;
3636 }
3637 
3638 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3639     .type = SAFEXCEL_ALG_TYPE_AEAD,
3640     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3641     .alg.aead = {
3642         .setkey = safexcel_rfc4106_gcm_setkey,
3643         .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3644         .encrypt = safexcel_rfc4106_encrypt,
3645         .decrypt = safexcel_rfc4106_decrypt,
3646         .ivsize = GCM_RFC4543_IV_SIZE,
3647         .maxauthsize = GHASH_DIGEST_SIZE,
3648         .base = {
3649             .cra_name = "rfc4543(gcm(aes))",
3650             .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3651             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3652             .cra_flags = CRYPTO_ALG_ASYNC |
3653                      CRYPTO_ALG_ALLOCATES_MEMORY |
3654                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3655             .cra_blocksize = 1,
3656             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3657             .cra_alignmask = 0,
3658             .cra_init = safexcel_rfc4543_gcm_cra_init,
3659             .cra_exit = safexcel_aead_gcm_cra_exit,
3660         },
3661     },
3662 };
3663 
3664 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3665                        unsigned int len)
3666 {
3667     struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3668     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3669 
3670     /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3671     *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3672     /* last 3 bytes of key are the nonce! */
3673     memcpy((u8 *)&ctx->nonce + 1, key + len -
3674            EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3675            EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3676 
3677     len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3678     return safexcel_aead_ccm_setkey(ctfm, key, len);
3679 }
3680 
3681 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3682                         unsigned int authsize)
3683 {
3684     /* Borrowed from crypto/ccm.c */
3685     switch (authsize) {
3686     case 8:
3687     case 12:
3688     case 16:
3689         break;
3690     default:
3691         return -EINVAL;
3692     }
3693 
3694     return 0;
3695 }
3696 
3697 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3698 {
3699     struct safexcel_cipher_req *creq = aead_request_ctx(req);
3700 
3701     /* Borrowed from crypto/ccm.c */
3702     if (req->assoclen != 16 && req->assoclen != 20)
3703         return -EINVAL;
3704 
3705     return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3706 }
3707 
3708 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3709 {
3710     struct safexcel_cipher_req *creq = aead_request_ctx(req);
3711 
3712     /* Borrowed from crypto/ccm.c */
3713     if (req->assoclen != 16 && req->assoclen != 20)
3714         return -EINVAL;
3715 
3716     return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3717 }
3718 
3719 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3720 {
3721     struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3722     int ret;
3723 
3724     ret = safexcel_aead_ccm_cra_init(tfm);
3725     ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3726     ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3727     return ret;
3728 }
3729 
3730 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3731     .type = SAFEXCEL_ALG_TYPE_AEAD,
3732     .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3733     .alg.aead = {
3734         .setkey = safexcel_rfc4309_ccm_setkey,
3735         .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3736         .encrypt = safexcel_rfc4309_ccm_encrypt,
3737         .decrypt = safexcel_rfc4309_ccm_decrypt,
3738         .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3739         .maxauthsize = AES_BLOCK_SIZE,
3740         .base = {
3741             .cra_name = "rfc4309(ccm(aes))",
3742             .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3743             .cra_priority = SAFEXCEL_CRA_PRIORITY,
3744             .cra_flags = CRYPTO_ALG_ASYNC |
3745                      CRYPTO_ALG_ALLOCATES_MEMORY |
3746                      CRYPTO_ALG_KERN_DRIVER_ONLY,
3747             .cra_blocksize = 1,
3748             .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3749             .cra_alignmask = 0,
3750             .cra_init = safexcel_rfc4309_ccm_cra_init,
3751             .cra_exit = safexcel_aead_cra_exit,
3752             .cra_module = THIS_MODULE,
3753         },
3754     },
3755 };