0001
0002
0003
0004
0005
0006 #include <linux/err.h>
0007 #include <linux/interrupt.h>
0008 #include <linux/types.h>
0009 #include <crypto/scatterwalk.h>
0010 #include <crypto/sha1.h>
0011 #include <crypto/sha2.h>
0012
0013 #include "cipher.h"
0014 #include "common.h"
0015 #include "core.h"
0016 #include "regs-v5.h"
0017 #include "sha.h"
0018 #include "aead.h"
0019
0020 static inline u32 qce_read(struct qce_device *qce, u32 offset)
0021 {
0022 return readl(qce->base + offset);
0023 }
0024
0025 static inline void qce_write(struct qce_device *qce, u32 offset, u32 val)
0026 {
0027 writel(val, qce->base + offset);
0028 }
0029
0030 static inline void qce_write_array(struct qce_device *qce, u32 offset,
0031 const u32 *val, unsigned int len)
0032 {
0033 int i;
0034
0035 for (i = 0; i < len; i++)
0036 qce_write(qce, offset + i * sizeof(u32), val[i]);
0037 }
0038
0039 static inline void
0040 qce_clear_array(struct qce_device *qce, u32 offset, unsigned int len)
0041 {
0042 int i;
0043
0044 for (i = 0; i < len; i++)
0045 qce_write(qce, offset + i * sizeof(u32), 0);
0046 }
0047
0048 static u32 qce_config_reg(struct qce_device *qce, int little)
0049 {
0050 u32 beats = (qce->burst_size >> 3) - 1;
0051 u32 pipe_pair = qce->pipe_pair_id;
0052 u32 config;
0053
0054 config = (beats << REQ_SIZE_SHIFT) & REQ_SIZE_MASK;
0055 config |= BIT(MASK_DOUT_INTR_SHIFT) | BIT(MASK_DIN_INTR_SHIFT) |
0056 BIT(MASK_OP_DONE_INTR_SHIFT) | BIT(MASK_ERR_INTR_SHIFT);
0057 config |= (pipe_pair << PIPE_SET_SELECT_SHIFT) & PIPE_SET_SELECT_MASK;
0058 config &= ~HIGH_SPD_EN_N_SHIFT;
0059
0060 if (little)
0061 config |= BIT(LITTLE_ENDIAN_MODE_SHIFT);
0062
0063 return config;
0064 }
0065
0066 void qce_cpu_to_be32p_array(__be32 *dst, const u8 *src, unsigned int len)
0067 {
0068 __be32 *d = dst;
0069 const u8 *s = src;
0070 unsigned int n;
0071
0072 n = len / sizeof(u32);
0073 for (; n > 0; n--) {
0074 *d = cpu_to_be32p((const __u32 *) s);
0075 s += sizeof(__u32);
0076 d++;
0077 }
0078 }
0079
0080 static void qce_setup_config(struct qce_device *qce)
0081 {
0082 u32 config;
0083
0084
0085 config = qce_config_reg(qce, 0);
0086
0087
0088 qce_write(qce, REG_STATUS, 0);
0089 qce_write(qce, REG_CONFIG, config);
0090 }
0091
0092 static inline void qce_crypto_go(struct qce_device *qce, bool result_dump)
0093 {
0094 if (result_dump)
0095 qce_write(qce, REG_GOPROC, BIT(GO_SHIFT) | BIT(RESULTS_DUMP_SHIFT));
0096 else
0097 qce_write(qce, REG_GOPROC, BIT(GO_SHIFT));
0098 }
0099
0100 #if defined(CONFIG_CRYPTO_DEV_QCE_SHA) || defined(CONFIG_CRYPTO_DEV_QCE_AEAD)
0101 static u32 qce_auth_cfg(unsigned long flags, u32 key_size, u32 auth_size)
0102 {
0103 u32 cfg = 0;
0104
0105 if (IS_CCM(flags) || IS_CMAC(flags))
0106 cfg |= AUTH_ALG_AES << AUTH_ALG_SHIFT;
0107 else
0108 cfg |= AUTH_ALG_SHA << AUTH_ALG_SHIFT;
0109
0110 if (IS_CCM(flags) || IS_CMAC(flags)) {
0111 if (key_size == AES_KEYSIZE_128)
0112 cfg |= AUTH_KEY_SZ_AES128 << AUTH_KEY_SIZE_SHIFT;
0113 else if (key_size == AES_KEYSIZE_256)
0114 cfg |= AUTH_KEY_SZ_AES256 << AUTH_KEY_SIZE_SHIFT;
0115 }
0116
0117 if (IS_SHA1(flags) || IS_SHA1_HMAC(flags))
0118 cfg |= AUTH_SIZE_SHA1 << AUTH_SIZE_SHIFT;
0119 else if (IS_SHA256(flags) || IS_SHA256_HMAC(flags))
0120 cfg |= AUTH_SIZE_SHA256 << AUTH_SIZE_SHIFT;
0121 else if (IS_CMAC(flags))
0122 cfg |= AUTH_SIZE_ENUM_16_BYTES << AUTH_SIZE_SHIFT;
0123 else if (IS_CCM(flags))
0124 cfg |= (auth_size - 1) << AUTH_SIZE_SHIFT;
0125
0126 if (IS_SHA1(flags) || IS_SHA256(flags))
0127 cfg |= AUTH_MODE_HASH << AUTH_MODE_SHIFT;
0128 else if (IS_SHA1_HMAC(flags) || IS_SHA256_HMAC(flags))
0129 cfg |= AUTH_MODE_HMAC << AUTH_MODE_SHIFT;
0130 else if (IS_CCM(flags))
0131 cfg |= AUTH_MODE_CCM << AUTH_MODE_SHIFT;
0132 else if (IS_CMAC(flags))
0133 cfg |= AUTH_MODE_CMAC << AUTH_MODE_SHIFT;
0134
0135 if (IS_SHA(flags) || IS_SHA_HMAC(flags))
0136 cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT;
0137
0138 if (IS_CCM(flags))
0139 cfg |= QCE_MAX_NONCE_WORDS << AUTH_NONCE_NUM_WORDS_SHIFT;
0140
0141 return cfg;
0142 }
0143 #endif
0144
0145 #ifdef CONFIG_CRYPTO_DEV_QCE_SHA
0146 static int qce_setup_regs_ahash(struct crypto_async_request *async_req)
0147 {
0148 struct ahash_request *req = ahash_request_cast(async_req);
0149 struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm);
0150 struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
0151 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);
0152 struct qce_device *qce = tmpl->qce;
0153 unsigned int digestsize = crypto_ahash_digestsize(ahash);
0154 unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm);
0155 __be32 auth[SHA256_DIGEST_SIZE / sizeof(__be32)] = {0};
0156 __be32 mackey[QCE_SHA_HMAC_KEY_SIZE / sizeof(__be32)] = {0};
0157 u32 auth_cfg = 0, config;
0158 unsigned int iv_words;
0159
0160
0161 if (!rctx->last_blk && req->nbytes % blocksize)
0162 return -EINVAL;
0163
0164 qce_setup_config(qce);
0165
0166 if (IS_CMAC(rctx->flags)) {
0167 qce_write(qce, REG_AUTH_SEG_CFG, 0);
0168 qce_write(qce, REG_ENCR_SEG_CFG, 0);
0169 qce_write(qce, REG_ENCR_SEG_SIZE, 0);
0170 qce_clear_array(qce, REG_AUTH_IV0, 16);
0171 qce_clear_array(qce, REG_AUTH_KEY0, 16);
0172 qce_clear_array(qce, REG_AUTH_BYTECNT0, 4);
0173
0174 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen, digestsize);
0175 }
0176
0177 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) {
0178 u32 authkey_words = rctx->authklen / sizeof(u32);
0179
0180 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen);
0181 qce_write_array(qce, REG_AUTH_KEY0, (u32 *)mackey,
0182 authkey_words);
0183 }
0184
0185 if (IS_CMAC(rctx->flags))
0186 goto go_proc;
0187
0188 if (rctx->first_blk)
0189 memcpy(auth, rctx->digest, digestsize);
0190 else
0191 qce_cpu_to_be32p_array(auth, rctx->digest, digestsize);
0192
0193 iv_words = (IS_SHA1(rctx->flags) || IS_SHA1_HMAC(rctx->flags)) ? 5 : 8;
0194 qce_write_array(qce, REG_AUTH_IV0, (u32 *)auth, iv_words);
0195
0196 if (rctx->first_blk)
0197 qce_clear_array(qce, REG_AUTH_BYTECNT0, 4);
0198 else
0199 qce_write_array(qce, REG_AUTH_BYTECNT0,
0200 (u32 *)rctx->byte_count, 2);
0201
0202 auth_cfg = qce_auth_cfg(rctx->flags, 0, digestsize);
0203
0204 if (rctx->last_blk)
0205 auth_cfg |= BIT(AUTH_LAST_SHIFT);
0206 else
0207 auth_cfg &= ~BIT(AUTH_LAST_SHIFT);
0208
0209 if (rctx->first_blk)
0210 auth_cfg |= BIT(AUTH_FIRST_SHIFT);
0211 else
0212 auth_cfg &= ~BIT(AUTH_FIRST_SHIFT);
0213
0214 go_proc:
0215 qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg);
0216 qce_write(qce, REG_AUTH_SEG_SIZE, req->nbytes);
0217 qce_write(qce, REG_AUTH_SEG_START, 0);
0218 qce_write(qce, REG_ENCR_SEG_CFG, 0);
0219 qce_write(qce, REG_SEG_SIZE, req->nbytes);
0220
0221
0222 config = qce_config_reg(qce, 1);
0223 qce_write(qce, REG_CONFIG, config);
0224
0225 qce_crypto_go(qce, true);
0226
0227 return 0;
0228 }
0229 #endif
0230
0231 #if defined(CONFIG_CRYPTO_DEV_QCE_SKCIPHER) || defined(CONFIG_CRYPTO_DEV_QCE_AEAD)
0232 static u32 qce_encr_cfg(unsigned long flags, u32 aes_key_size)
0233 {
0234 u32 cfg = 0;
0235
0236 if (IS_AES(flags)) {
0237 if (aes_key_size == AES_KEYSIZE_128)
0238 cfg |= ENCR_KEY_SZ_AES128 << ENCR_KEY_SZ_SHIFT;
0239 else if (aes_key_size == AES_KEYSIZE_256)
0240 cfg |= ENCR_KEY_SZ_AES256 << ENCR_KEY_SZ_SHIFT;
0241 }
0242
0243 if (IS_AES(flags))
0244 cfg |= ENCR_ALG_AES << ENCR_ALG_SHIFT;
0245 else if (IS_DES(flags) || IS_3DES(flags))
0246 cfg |= ENCR_ALG_DES << ENCR_ALG_SHIFT;
0247
0248 if (IS_DES(flags))
0249 cfg |= ENCR_KEY_SZ_DES << ENCR_KEY_SZ_SHIFT;
0250
0251 if (IS_3DES(flags))
0252 cfg |= ENCR_KEY_SZ_3DES << ENCR_KEY_SZ_SHIFT;
0253
0254 switch (flags & QCE_MODE_MASK) {
0255 case QCE_MODE_ECB:
0256 cfg |= ENCR_MODE_ECB << ENCR_MODE_SHIFT;
0257 break;
0258 case QCE_MODE_CBC:
0259 cfg |= ENCR_MODE_CBC << ENCR_MODE_SHIFT;
0260 break;
0261 case QCE_MODE_CTR:
0262 cfg |= ENCR_MODE_CTR << ENCR_MODE_SHIFT;
0263 break;
0264 case QCE_MODE_XTS:
0265 cfg |= ENCR_MODE_XTS << ENCR_MODE_SHIFT;
0266 break;
0267 case QCE_MODE_CCM:
0268 cfg |= ENCR_MODE_CCM << ENCR_MODE_SHIFT;
0269 cfg |= LAST_CCM_XFR << LAST_CCM_SHIFT;
0270 break;
0271 default:
0272 return ~0;
0273 }
0274
0275 return cfg;
0276 }
0277 #endif
0278
0279 #ifdef CONFIG_CRYPTO_DEV_QCE_SKCIPHER
0280 static void qce_xts_swapiv(__be32 *dst, const u8 *src, unsigned int ivsize)
0281 {
0282 u8 swap[QCE_AES_IV_LENGTH];
0283 u32 i, j;
0284
0285 if (ivsize > QCE_AES_IV_LENGTH)
0286 return;
0287
0288 memset(swap, 0, QCE_AES_IV_LENGTH);
0289
0290 for (i = (QCE_AES_IV_LENGTH - ivsize), j = ivsize - 1;
0291 i < QCE_AES_IV_LENGTH; i++, j--)
0292 swap[i] = src[j];
0293
0294 qce_cpu_to_be32p_array(dst, swap, QCE_AES_IV_LENGTH);
0295 }
0296
0297 static void qce_xtskey(struct qce_device *qce, const u8 *enckey,
0298 unsigned int enckeylen, unsigned int cryptlen)
0299 {
0300 u32 xtskey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(u32)] = {0};
0301 unsigned int xtsklen = enckeylen / (2 * sizeof(u32));
0302
0303 qce_cpu_to_be32p_array((__be32 *)xtskey, enckey + enckeylen / 2,
0304 enckeylen / 2);
0305 qce_write_array(qce, REG_ENCR_XTS_KEY0, xtskey, xtsklen);
0306
0307
0308
0309
0310 qce_write(qce, REG_ENCR_XTS_DU_SIZE, cryptlen);
0311 }
0312
0313 static int qce_setup_regs_skcipher(struct crypto_async_request *async_req)
0314 {
0315 struct skcipher_request *req = skcipher_request_cast(async_req);
0316 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req);
0317 struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
0318 struct qce_alg_template *tmpl = to_cipher_tmpl(crypto_skcipher_reqtfm(req));
0319 struct qce_device *qce = tmpl->qce;
0320 __be32 enckey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(__be32)] = {0};
0321 __be32 enciv[QCE_MAX_IV_SIZE / sizeof(__be32)] = {0};
0322 unsigned int enckey_words, enciv_words;
0323 unsigned int keylen;
0324 u32 encr_cfg = 0, auth_cfg = 0, config;
0325 unsigned int ivsize = rctx->ivsize;
0326 unsigned long flags = rctx->flags;
0327
0328 qce_setup_config(qce);
0329
0330 if (IS_XTS(flags))
0331 keylen = ctx->enc_keylen / 2;
0332 else
0333 keylen = ctx->enc_keylen;
0334
0335 qce_cpu_to_be32p_array(enckey, ctx->enc_key, keylen);
0336 enckey_words = keylen / sizeof(u32);
0337
0338 qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg);
0339
0340 encr_cfg = qce_encr_cfg(flags, keylen);
0341
0342 if (IS_DES(flags)) {
0343 enciv_words = 2;
0344 enckey_words = 2;
0345 } else if (IS_3DES(flags)) {
0346 enciv_words = 2;
0347 enckey_words = 6;
0348 } else if (IS_AES(flags)) {
0349 if (IS_XTS(flags))
0350 qce_xtskey(qce, ctx->enc_key, ctx->enc_keylen,
0351 rctx->cryptlen);
0352 enciv_words = 4;
0353 } else {
0354 return -EINVAL;
0355 }
0356
0357 qce_write_array(qce, REG_ENCR_KEY0, (u32 *)enckey, enckey_words);
0358
0359 if (!IS_ECB(flags)) {
0360 if (IS_XTS(flags))
0361 qce_xts_swapiv(enciv, rctx->iv, ivsize);
0362 else
0363 qce_cpu_to_be32p_array(enciv, rctx->iv, ivsize);
0364
0365 qce_write_array(qce, REG_CNTR0_IV0, (u32 *)enciv, enciv_words);
0366 }
0367
0368 if (IS_ENCRYPT(flags))
0369 encr_cfg |= BIT(ENCODE_SHIFT);
0370
0371 qce_write(qce, REG_ENCR_SEG_CFG, encr_cfg);
0372 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen);
0373 qce_write(qce, REG_ENCR_SEG_START, 0);
0374
0375 if (IS_CTR(flags)) {
0376 qce_write(qce, REG_CNTR_MASK, ~0);
0377 qce_write(qce, REG_CNTR_MASK0, ~0);
0378 qce_write(qce, REG_CNTR_MASK1, ~0);
0379 qce_write(qce, REG_CNTR_MASK2, ~0);
0380 }
0381
0382 qce_write(qce, REG_SEG_SIZE, rctx->cryptlen);
0383
0384
0385 config = qce_config_reg(qce, 1);
0386 qce_write(qce, REG_CONFIG, config);
0387
0388 qce_crypto_go(qce, true);
0389
0390 return 0;
0391 }
0392 #endif
0393
0394 #ifdef CONFIG_CRYPTO_DEV_QCE_AEAD
0395 static const u32 std_iv_sha1[SHA256_DIGEST_SIZE / sizeof(u32)] = {
0396 SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4, 0, 0, 0
0397 };
0398
0399 static const u32 std_iv_sha256[SHA256_DIGEST_SIZE / sizeof(u32)] = {
0400 SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
0401 SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7
0402 };
0403
0404 static unsigned int qce_be32_to_cpu_array(u32 *dst, const u8 *src, unsigned int len)
0405 {
0406 u32 *d = dst;
0407 const u8 *s = src;
0408 unsigned int n;
0409
0410 n = len / sizeof(u32);
0411 for (; n > 0; n--) {
0412 *d = be32_to_cpup((const __be32 *)s);
0413 s += sizeof(u32);
0414 d++;
0415 }
0416 return DIV_ROUND_UP(len, sizeof(u32));
0417 }
0418
0419 static int qce_setup_regs_aead(struct crypto_async_request *async_req)
0420 {
0421 struct aead_request *req = aead_request_cast(async_req);
0422 struct qce_aead_reqctx *rctx = aead_request_ctx(req);
0423 struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
0424 struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
0425 struct qce_device *qce = tmpl->qce;
0426 u32 enckey[QCE_MAX_CIPHER_KEY_SIZE / sizeof(u32)] = {0};
0427 u32 enciv[QCE_MAX_IV_SIZE / sizeof(u32)] = {0};
0428 u32 authkey[QCE_SHA_HMAC_KEY_SIZE / sizeof(u32)] = {0};
0429 u32 authiv[SHA256_DIGEST_SIZE / sizeof(u32)] = {0};
0430 u32 authnonce[QCE_MAX_NONCE / sizeof(u32)] = {0};
0431 unsigned int enc_keylen = ctx->enc_keylen;
0432 unsigned int auth_keylen = ctx->auth_keylen;
0433 unsigned int enc_ivsize = rctx->ivsize;
0434 unsigned int auth_ivsize = 0;
0435 unsigned int enckey_words, enciv_words;
0436 unsigned int authkey_words, authiv_words, authnonce_words;
0437 unsigned long flags = rctx->flags;
0438 u32 encr_cfg, auth_cfg, config, totallen;
0439 u32 iv_last_word;
0440
0441 qce_setup_config(qce);
0442
0443
0444 enckey_words = qce_be32_to_cpu_array(enckey, ctx->enc_key, enc_keylen);
0445 qce_write_array(qce, REG_ENCR_KEY0, enckey, enckey_words);
0446
0447
0448 enciv_words = qce_be32_to_cpu_array(enciv, rctx->iv, enc_ivsize);
0449 qce_write_array(qce, REG_CNTR0_IV0, enciv, enciv_words);
0450
0451 if (IS_CCM(rctx->flags)) {
0452 iv_last_word = enciv[enciv_words - 1];
0453 qce_write(qce, REG_CNTR3_IV3, iv_last_word + 1);
0454 qce_write_array(qce, REG_ENCR_CCM_INT_CNTR0, (u32 *)enciv, enciv_words);
0455 qce_write(qce, REG_CNTR_MASK, ~0);
0456 qce_write(qce, REG_CNTR_MASK0, ~0);
0457 qce_write(qce, REG_CNTR_MASK1, ~0);
0458 qce_write(qce, REG_CNTR_MASK2, ~0);
0459 }
0460
0461
0462 qce_clear_array(qce, REG_AUTH_IV0, 16);
0463 qce_clear_array(qce, REG_AUTH_KEY0, 16);
0464
0465
0466 qce_clear_array(qce, REG_AUTH_BYTECNT0, 4);
0467
0468
0469 authkey_words = qce_be32_to_cpu_array(authkey, ctx->auth_key, auth_keylen);
0470 qce_write_array(qce, REG_AUTH_KEY0, (u32 *)authkey, authkey_words);
0471
0472
0473 if (IS_SHA_HMAC(rctx->flags)) {
0474
0475 if (IS_SHA1_HMAC(rctx->flags)) {
0476 auth_ivsize = SHA1_DIGEST_SIZE;
0477 memcpy(authiv, std_iv_sha1, auth_ivsize);
0478 } else if (IS_SHA256_HMAC(rctx->flags)) {
0479 auth_ivsize = SHA256_DIGEST_SIZE;
0480 memcpy(authiv, std_iv_sha256, auth_ivsize);
0481 }
0482 authiv_words = auth_ivsize / sizeof(u32);
0483 qce_write_array(qce, REG_AUTH_IV0, (u32 *)authiv, authiv_words);
0484 } else if (IS_CCM(rctx->flags)) {
0485
0486 authnonce_words = qce_be32_to_cpu_array(authnonce, rctx->ccm_nonce, QCE_MAX_NONCE);
0487 qce_write_array(qce, REG_AUTH_INFO_NONCE0, authnonce, authnonce_words);
0488 }
0489
0490
0491 encr_cfg = qce_encr_cfg(flags, enc_keylen);
0492 if (IS_ENCRYPT(flags))
0493 encr_cfg |= BIT(ENCODE_SHIFT);
0494 qce_write(qce, REG_ENCR_SEG_CFG, encr_cfg);
0495
0496
0497 auth_cfg = qce_auth_cfg(rctx->flags, auth_keylen, ctx->authsize);
0498 auth_cfg |= BIT(AUTH_LAST_SHIFT);
0499 auth_cfg |= BIT(AUTH_FIRST_SHIFT);
0500 if (IS_ENCRYPT(flags)) {
0501 if (IS_CCM(rctx->flags))
0502 auth_cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT;
0503 else
0504 auth_cfg |= AUTH_POS_AFTER << AUTH_POS_SHIFT;
0505 } else {
0506 if (IS_CCM(rctx->flags))
0507 auth_cfg |= AUTH_POS_AFTER << AUTH_POS_SHIFT;
0508 else
0509 auth_cfg |= AUTH_POS_BEFORE << AUTH_POS_SHIFT;
0510 }
0511 qce_write(qce, REG_AUTH_SEG_CFG, auth_cfg);
0512
0513 totallen = rctx->cryptlen + rctx->assoclen;
0514
0515
0516 if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags))
0517 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen + ctx->authsize);
0518 else
0519 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen);
0520 qce_write(qce, REG_ENCR_SEG_START, rctx->assoclen & 0xffff);
0521
0522
0523 qce_write(qce, REG_AUTH_SEG_SIZE, totallen);
0524 qce_write(qce, REG_AUTH_SEG_START, 0);
0525
0526
0527 if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags))
0528 qce_write(qce, REG_SEG_SIZE, totallen + ctx->authsize);
0529 else
0530 qce_write(qce, REG_SEG_SIZE, totallen);
0531
0532
0533 config = qce_config_reg(qce, 1);
0534 qce_write(qce, REG_CONFIG, config);
0535
0536
0537 qce_crypto_go(qce, !IS_CCM(flags));
0538
0539 return 0;
0540 }
0541 #endif
0542
0543 int qce_start(struct crypto_async_request *async_req, u32 type)
0544 {
0545 switch (type) {
0546 #ifdef CONFIG_CRYPTO_DEV_QCE_SKCIPHER
0547 case CRYPTO_ALG_TYPE_SKCIPHER:
0548 return qce_setup_regs_skcipher(async_req);
0549 #endif
0550 #ifdef CONFIG_CRYPTO_DEV_QCE_SHA
0551 case CRYPTO_ALG_TYPE_AHASH:
0552 return qce_setup_regs_ahash(async_req);
0553 #endif
0554 #ifdef CONFIG_CRYPTO_DEV_QCE_AEAD
0555 case CRYPTO_ALG_TYPE_AEAD:
0556 return qce_setup_regs_aead(async_req);
0557 #endif
0558 default:
0559 return -EINVAL;
0560 }
0561 }
0562
0563 #define STATUS_ERRORS \
0564 (BIT(SW_ERR_SHIFT) | BIT(AXI_ERR_SHIFT) | BIT(HSD_ERR_SHIFT))
0565
0566 int qce_check_status(struct qce_device *qce, u32 *status)
0567 {
0568 int ret = 0;
0569
0570 *status = qce_read(qce, REG_STATUS);
0571
0572
0573
0574
0575
0576
0577
0578 if (*status & STATUS_ERRORS || !(*status & BIT(OPERATION_DONE_SHIFT)))
0579 ret = -ENXIO;
0580 else if (*status & BIT(MAC_FAILED_SHIFT))
0581 ret = -EBADMSG;
0582
0583 return ret;
0584 }
0585
0586 void qce_get_version(struct qce_device *qce, u32 *major, u32 *minor, u32 *step)
0587 {
0588 u32 val;
0589
0590 val = qce_read(qce, REG_VERSION);
0591 *major = (val & CORE_MAJOR_REV_MASK) >> CORE_MAJOR_REV_SHIFT;
0592 *minor = (val & CORE_MINOR_REV_MASK) >> CORE_MINOR_REV_SHIFT;
0593 *step = (val & CORE_STEP_REV_MASK) >> CORE_STEP_REV_SHIFT;
0594 }