Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0
0002 /*
0003  * Copyright (C) 2017 Marvell
0004  *
0005  * Antoine Tenart <antoine.tenart@free-electrons.com>
0006  */
0007 
0008 #include <crypto/aes.h>
0009 #include <crypto/hmac.h>
0010 #include <crypto/md5.h>
0011 #include <crypto/sha1.h>
0012 #include <crypto/sha2.h>
0013 #include <crypto/sha3.h>
0014 #include <crypto/skcipher.h>
0015 #include <crypto/sm3.h>
0016 #include <crypto/internal/cipher.h>
0017 #include <linux/device.h>
0018 #include <linux/dma-mapping.h>
0019 #include <linux/dmapool.h>
0020 
0021 #include "safexcel.h"
0022 
0023 struct safexcel_ahash_ctx {
0024     struct safexcel_context base;
0025 
0026     u32 alg;
0027     u8  key_sz;
0028     bool cbcmac;
0029     bool do_fallback;
0030     bool fb_init_done;
0031     bool fb_do_setkey;
0032 
0033     struct crypto_cipher *kaes;
0034     struct crypto_ahash *fback;
0035     struct crypto_shash *shpre;
0036     struct shash_desc *shdesc;
0037 };
0038 
0039 struct safexcel_ahash_req {
0040     bool last_req;
0041     bool finish;
0042     bool hmac;
0043     bool needs_inv;
0044     bool hmac_zlen;
0045     bool len_is_le;
0046     bool not_first;
0047     bool xcbcmac;
0048 
0049     int nents;
0050     dma_addr_t result_dma;
0051 
0052     u32 digest;
0053 
0054     u8 state_sz;    /* expected state size, only set once */
0055     u8 block_sz;    /* block size, only set once */
0056     u8 digest_sz;   /* output digest size, only set once */
0057     __le32 state[SHA3_512_BLOCK_SIZE /
0058              sizeof(__le32)] __aligned(sizeof(__le32));
0059 
0060     u64 len;
0061     u64 processed;
0062 
0063     u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
0064     dma_addr_t cache_dma;
0065     unsigned int cache_sz;
0066 
0067     u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
0068 };
0069 
0070 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
0071 {
0072     return req->len - req->processed;
0073 }
0074 
0075 static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
0076                 u32 input_length, u32 result_length,
0077                 bool cbcmac)
0078 {
0079     struct safexcel_token *token =
0080         (struct safexcel_token *)cdesc->control_data.token;
0081 
0082     token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
0083     token[0].packet_length = input_length;
0084     token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
0085 
0086     input_length &= 15;
0087     if (unlikely(cbcmac && input_length)) {
0088         token[0].stat =  0;
0089         token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
0090         token[1].packet_length = 16 - input_length;
0091         token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
0092         token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
0093     } else {
0094         token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
0095         eip197_noop_token(&token[1]);
0096     }
0097 
0098     token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
0099     token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
0100             EIP197_TOKEN_STAT_LAST_PACKET;
0101     token[2].packet_length = result_length;
0102     token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
0103                 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
0104 
0105     eip197_noop_token(&token[3]);
0106 }
0107 
0108 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
0109                      struct safexcel_ahash_req *req,
0110                      struct safexcel_command_desc *cdesc)
0111 {
0112     struct safexcel_crypto_priv *priv = ctx->base.priv;
0113     u64 count = 0;
0114 
0115     cdesc->control_data.control0 = ctx->alg;
0116     cdesc->control_data.control1 = 0;
0117 
0118     /*
0119      * Copy the input digest if needed, and setup the context
0120      * fields. Do this now as we need it to setup the first command
0121      * descriptor.
0122      */
0123     if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
0124         if (req->xcbcmac)
0125             memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
0126         else
0127             memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
0128 
0129         if (!req->finish && req->xcbcmac)
0130             cdesc->control_data.control0 |=
0131                 CONTEXT_CONTROL_DIGEST_XCM |
0132                 CONTEXT_CONTROL_TYPE_HASH_OUT  |
0133                 CONTEXT_CONTROL_NO_FINISH_HASH |
0134                 CONTEXT_CONTROL_SIZE(req->state_sz /
0135                              sizeof(u32));
0136         else
0137             cdesc->control_data.control0 |=
0138                 CONTEXT_CONTROL_DIGEST_XCM |
0139                 CONTEXT_CONTROL_TYPE_HASH_OUT  |
0140                 CONTEXT_CONTROL_SIZE(req->state_sz /
0141                              sizeof(u32));
0142         return;
0143     } else if (!req->processed) {
0144         /* First - and possibly only - block of basic hash only */
0145         if (req->finish)
0146             cdesc->control_data.control0 |= req->digest |
0147                 CONTEXT_CONTROL_TYPE_HASH_OUT |
0148                 CONTEXT_CONTROL_RESTART_HASH  |
0149                 /* ensure its not 0! */
0150                 CONTEXT_CONTROL_SIZE(1);
0151         else
0152             cdesc->control_data.control0 |= req->digest |
0153                 CONTEXT_CONTROL_TYPE_HASH_OUT  |
0154                 CONTEXT_CONTROL_RESTART_HASH   |
0155                 CONTEXT_CONTROL_NO_FINISH_HASH |
0156                 /* ensure its not 0! */
0157                 CONTEXT_CONTROL_SIZE(1);
0158         return;
0159     }
0160 
0161     /* Hash continuation or HMAC, setup (inner) digest from state */
0162     memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
0163 
0164     if (req->finish) {
0165         /* Compute digest count for hash/HMAC finish operations */
0166         if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
0167             req->hmac_zlen || (req->processed != req->block_sz)) {
0168             count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
0169 
0170             /* This is a hardware limitation, as the
0171              * counter must fit into an u32. This represents
0172              * a fairly big amount of input data, so we
0173              * shouldn't see this.
0174              */
0175             if (unlikely(count & 0xffffffff00000000ULL)) {
0176                 dev_warn(priv->dev,
0177                      "Input data is too big\n");
0178                 return;
0179             }
0180         }
0181 
0182         if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
0183             /* Special case: zero length HMAC */
0184             req->hmac_zlen ||
0185             /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
0186             (req->processed != req->block_sz)) {
0187             /* Basic hash continue operation, need digest + cnt */
0188             cdesc->control_data.control0 |=
0189                 CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
0190                 CONTEXT_CONTROL_TYPE_HASH_OUT |
0191                 CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
0192             /* For zero-len HMAC, don't finalize, already padded! */
0193             if (req->hmac_zlen)
0194                 cdesc->control_data.control0 |=
0195                     CONTEXT_CONTROL_NO_FINISH_HASH;
0196             cdesc->control_data.control1 |=
0197                 CONTEXT_CONTROL_DIGEST_CNT;
0198             ctx->base.ctxr->data[req->state_sz >> 2] =
0199                 cpu_to_le32(count);
0200             req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
0201 
0202             /* Clear zero-length HMAC flag for next operation! */
0203             req->hmac_zlen = false;
0204         } else { /* HMAC */
0205             /* Need outer digest for HMAC finalization */
0206             memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
0207                    &ctx->base.opad, req->state_sz);
0208 
0209             /* Single pass HMAC - no digest count */
0210             cdesc->control_data.control0 |=
0211                 CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
0212                 CONTEXT_CONTROL_TYPE_HASH_OUT |
0213                 CONTEXT_CONTROL_DIGEST_HMAC;
0214         }
0215     } else { /* Hash continuation, do not finish yet */
0216         cdesc->control_data.control0 |=
0217             CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
0218             CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
0219             CONTEXT_CONTROL_TYPE_HASH_OUT |
0220             CONTEXT_CONTROL_NO_FINISH_HASH;
0221     }
0222 }
0223 
0224 static int safexcel_ahash_enqueue(struct ahash_request *areq);
0225 
0226 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
0227                       int ring,
0228                       struct crypto_async_request *async,
0229                       bool *should_complete, int *ret)
0230 {
0231     struct safexcel_result_desc *rdesc;
0232     struct ahash_request *areq = ahash_request_cast(async);
0233     struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
0234     struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
0235     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
0236     u64 cache_len;
0237 
0238     *ret = 0;
0239 
0240     rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
0241     if (IS_ERR(rdesc)) {
0242         dev_err(priv->dev,
0243             "hash: result: could not retrieve the result descriptor\n");
0244         *ret = PTR_ERR(rdesc);
0245     } else {
0246         *ret = safexcel_rdesc_check_errors(priv, rdesc);
0247     }
0248 
0249     safexcel_complete(priv, ring);
0250 
0251     if (sreq->nents) {
0252         dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
0253         sreq->nents = 0;
0254     }
0255 
0256     if (sreq->result_dma) {
0257         dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
0258                  DMA_FROM_DEVICE);
0259         sreq->result_dma = 0;
0260     }
0261 
0262     if (sreq->cache_dma) {
0263         dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
0264                  DMA_TO_DEVICE);
0265         sreq->cache_dma = 0;
0266         sreq->cache_sz = 0;
0267     }
0268 
0269     if (sreq->finish) {
0270         if (sreq->hmac &&
0271             (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
0272             /* Faking HMAC using hash - need to do outer hash */
0273             memcpy(sreq->cache, sreq->state,
0274                    crypto_ahash_digestsize(ahash));
0275 
0276             memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
0277 
0278             sreq->len = sreq->block_sz +
0279                     crypto_ahash_digestsize(ahash);
0280             sreq->processed = sreq->block_sz;
0281             sreq->hmac = 0;
0282 
0283             if (priv->flags & EIP197_TRC_CACHE)
0284                 ctx->base.needs_inv = true;
0285             areq->nbytes = 0;
0286             safexcel_ahash_enqueue(areq);
0287 
0288             *should_complete = false; /* Not done yet */
0289             return 1;
0290         }
0291 
0292         if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
0293                  ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
0294             /* Undo final XOR with 0xffffffff ...*/
0295             *(__le32 *)areq->result = ~sreq->state[0];
0296         } else {
0297             memcpy(areq->result, sreq->state,
0298                    crypto_ahash_digestsize(ahash));
0299         }
0300     }
0301 
0302     cache_len = safexcel_queued_len(sreq);
0303     if (cache_len)
0304         memcpy(sreq->cache, sreq->cache_next, cache_len);
0305 
0306     *should_complete = true;
0307 
0308     return 1;
0309 }
0310 
0311 static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
0312                    int *commands, int *results)
0313 {
0314     struct ahash_request *areq = ahash_request_cast(async);
0315     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
0316     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
0317     struct safexcel_crypto_priv *priv = ctx->base.priv;
0318     struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
0319     struct safexcel_result_desc *rdesc;
0320     struct scatterlist *sg;
0321     struct safexcel_token *dmmy;
0322     int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
0323     u64 queued, len;
0324 
0325     queued = safexcel_queued_len(req);
0326     if (queued <= HASH_CACHE_SIZE)
0327         cache_len = queued;
0328     else
0329         cache_len = queued - areq->nbytes;
0330 
0331     if (!req->finish && !req->last_req) {
0332         /* If this is not the last request and the queued data does not
0333          * fit into full cache blocks, cache it for the next send call.
0334          */
0335         extra = queued & (HASH_CACHE_SIZE - 1);
0336 
0337         /* If this is not the last request and the queued data
0338          * is a multiple of a block, cache the last one for now.
0339          */
0340         if (!extra)
0341             extra = HASH_CACHE_SIZE;
0342 
0343         sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
0344                    req->cache_next, extra,
0345                    areq->nbytes - extra);
0346 
0347         queued -= extra;
0348 
0349         if (!queued) {
0350             *commands = 0;
0351             *results = 0;
0352             return 0;
0353         }
0354 
0355         extra = 0;
0356     }
0357 
0358     if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
0359         if (unlikely(cache_len < AES_BLOCK_SIZE)) {
0360             /*
0361              * Cache contains less than 1 full block, complete.
0362              */
0363             extra = AES_BLOCK_SIZE - cache_len;
0364             if (queued > cache_len) {
0365                 /* More data follows: borrow bytes */
0366                 u64 tmp = queued - cache_len;
0367 
0368                 skip = min_t(u64, tmp, extra);
0369                 sg_pcopy_to_buffer(areq->src,
0370                     sg_nents(areq->src),
0371                     req->cache + cache_len,
0372                     skip, 0);
0373             }
0374             extra -= skip;
0375             memset(req->cache + cache_len + skip, 0, extra);
0376             if (!ctx->cbcmac && extra) {
0377                 // 10- padding for XCBCMAC & CMAC
0378                 req->cache[cache_len + skip] = 0x80;
0379                 // HW will use K2 iso K3 - compensate!
0380                 for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
0381                     u32 *cache = (void *)req->cache;
0382                     u32 *ipad = ctx->base.ipad.word;
0383                     u32 x;
0384 
0385                     x = ipad[i] ^ ipad[i + 4];
0386                     cache[i] ^= swab(x);
0387                 }
0388             }
0389             cache_len = AES_BLOCK_SIZE;
0390             queued = queued + extra;
0391         }
0392 
0393         /* XCBC continue: XOR previous result into 1st word */
0394         crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
0395     }
0396 
0397     len = queued;
0398     /* Add a command descriptor for the cached data, if any */
0399     if (cache_len) {
0400         req->cache_dma = dma_map_single(priv->dev, req->cache,
0401                         cache_len, DMA_TO_DEVICE);
0402         if (dma_mapping_error(priv->dev, req->cache_dma))
0403             return -EINVAL;
0404 
0405         req->cache_sz = cache_len;
0406         first_cdesc = safexcel_add_cdesc(priv, ring, 1,
0407                          (cache_len == len),
0408                          req->cache_dma, cache_len,
0409                          len, ctx->base.ctxr_dma,
0410                          &dmmy);
0411         if (IS_ERR(first_cdesc)) {
0412             ret = PTR_ERR(first_cdesc);
0413             goto unmap_cache;
0414         }
0415         n_cdesc++;
0416 
0417         queued -= cache_len;
0418         if (!queued)
0419             goto send_command;
0420     }
0421 
0422     /* Now handle the current ahash request buffer(s) */
0423     req->nents = dma_map_sg(priv->dev, areq->src,
0424                 sg_nents_for_len(areq->src,
0425                          areq->nbytes),
0426                 DMA_TO_DEVICE);
0427     if (!req->nents) {
0428         ret = -ENOMEM;
0429         goto cdesc_rollback;
0430     }
0431 
0432     for_each_sg(areq->src, sg, req->nents, i) {
0433         int sglen = sg_dma_len(sg);
0434 
0435         if (unlikely(sglen <= skip)) {
0436             skip -= sglen;
0437             continue;
0438         }
0439 
0440         /* Do not overflow the request */
0441         if ((queued + skip) <= sglen)
0442             sglen = queued;
0443         else
0444             sglen -= skip;
0445 
0446         cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
0447                        !(queued - sglen),
0448                        sg_dma_address(sg) + skip, sglen,
0449                        len, ctx->base.ctxr_dma, &dmmy);
0450         if (IS_ERR(cdesc)) {
0451             ret = PTR_ERR(cdesc);
0452             goto unmap_sg;
0453         }
0454 
0455         if (!n_cdesc)
0456             first_cdesc = cdesc;
0457         n_cdesc++;
0458 
0459         queued -= sglen;
0460         if (!queued)
0461             break;
0462         skip = 0;
0463     }
0464 
0465 send_command:
0466     /* Setup the context options */
0467     safexcel_context_control(ctx, req, first_cdesc);
0468 
0469     /* Add the token */
0470     safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
0471 
0472     req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
0473                      DMA_FROM_DEVICE);
0474     if (dma_mapping_error(priv->dev, req->result_dma)) {
0475         ret = -EINVAL;
0476         goto unmap_sg;
0477     }
0478 
0479     /* Add a result descriptor */
0480     rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
0481                    req->digest_sz);
0482     if (IS_ERR(rdesc)) {
0483         ret = PTR_ERR(rdesc);
0484         goto unmap_result;
0485     }
0486 
0487     safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
0488 
0489     req->processed += len - extra;
0490 
0491     *commands = n_cdesc;
0492     *results = 1;
0493     return 0;
0494 
0495 unmap_result:
0496     dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
0497              DMA_FROM_DEVICE);
0498 unmap_sg:
0499     if (req->nents) {
0500         dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
0501         req->nents = 0;
0502     }
0503 cdesc_rollback:
0504     for (i = 0; i < n_cdesc; i++)
0505         safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
0506 unmap_cache:
0507     if (req->cache_dma) {
0508         dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
0509                  DMA_TO_DEVICE);
0510         req->cache_dma = 0;
0511         req->cache_sz = 0;
0512     }
0513 
0514     return ret;
0515 }
0516 
0517 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
0518                       int ring,
0519                       struct crypto_async_request *async,
0520                       bool *should_complete, int *ret)
0521 {
0522     struct safexcel_result_desc *rdesc;
0523     struct ahash_request *areq = ahash_request_cast(async);
0524     struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
0525     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
0526     int enq_ret;
0527 
0528     *ret = 0;
0529 
0530     rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
0531     if (IS_ERR(rdesc)) {
0532         dev_err(priv->dev,
0533             "hash: invalidate: could not retrieve the result descriptor\n");
0534         *ret = PTR_ERR(rdesc);
0535     } else {
0536         *ret = safexcel_rdesc_check_errors(priv, rdesc);
0537     }
0538 
0539     safexcel_complete(priv, ring);
0540 
0541     if (ctx->base.exit_inv) {
0542         dma_pool_free(priv->context_pool, ctx->base.ctxr,
0543                   ctx->base.ctxr_dma);
0544 
0545         *should_complete = true;
0546         return 1;
0547     }
0548 
0549     ring = safexcel_select_ring(priv);
0550     ctx->base.ring = ring;
0551 
0552     spin_lock_bh(&priv->ring[ring].queue_lock);
0553     enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
0554     spin_unlock_bh(&priv->ring[ring].queue_lock);
0555 
0556     if (enq_ret != -EINPROGRESS)
0557         *ret = enq_ret;
0558 
0559     queue_work(priv->ring[ring].workqueue,
0560            &priv->ring[ring].work_data.work);
0561 
0562     *should_complete = false;
0563 
0564     return 1;
0565 }
0566 
0567 static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
0568                   struct crypto_async_request *async,
0569                   bool *should_complete, int *ret)
0570 {
0571     struct ahash_request *areq = ahash_request_cast(async);
0572     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
0573     int err;
0574 
0575     BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
0576 
0577     if (req->needs_inv) {
0578         req->needs_inv = false;
0579         err = safexcel_handle_inv_result(priv, ring, async,
0580                          should_complete, ret);
0581     } else {
0582         err = safexcel_handle_req_result(priv, ring, async,
0583                          should_complete, ret);
0584     }
0585 
0586     return err;
0587 }
0588 
0589 static int safexcel_ahash_send_inv(struct crypto_async_request *async,
0590                    int ring, int *commands, int *results)
0591 {
0592     struct ahash_request *areq = ahash_request_cast(async);
0593     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
0594     int ret;
0595 
0596     ret = safexcel_invalidate_cache(async, ctx->base.priv,
0597                     ctx->base.ctxr_dma, ring);
0598     if (unlikely(ret))
0599         return ret;
0600 
0601     *commands = 1;
0602     *results = 1;
0603 
0604     return 0;
0605 }
0606 
0607 static int safexcel_ahash_send(struct crypto_async_request *async,
0608                    int ring, int *commands, int *results)
0609 {
0610     struct ahash_request *areq = ahash_request_cast(async);
0611     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
0612     int ret;
0613 
0614     if (req->needs_inv)
0615         ret = safexcel_ahash_send_inv(async, ring, commands, results);
0616     else
0617         ret = safexcel_ahash_send_req(async, ring, commands, results);
0618 
0619     return ret;
0620 }
0621 
0622 static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
0623 {
0624     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
0625     struct safexcel_crypto_priv *priv = ctx->base.priv;
0626     EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
0627     struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
0628     struct safexcel_inv_result result = {};
0629     int ring = ctx->base.ring;
0630 
0631     memset(req, 0, EIP197_AHASH_REQ_SIZE);
0632 
0633     /* create invalidation request */
0634     init_completion(&result.completion);
0635     ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
0636                    safexcel_inv_complete, &result);
0637 
0638     ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
0639     ctx = crypto_tfm_ctx(req->base.tfm);
0640     ctx->base.exit_inv = true;
0641     rctx->needs_inv = true;
0642 
0643     spin_lock_bh(&priv->ring[ring].queue_lock);
0644     crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
0645     spin_unlock_bh(&priv->ring[ring].queue_lock);
0646 
0647     queue_work(priv->ring[ring].workqueue,
0648            &priv->ring[ring].work_data.work);
0649 
0650     wait_for_completion(&result.completion);
0651 
0652     if (result.error) {
0653         dev_warn(priv->dev, "hash: completion error (%d)\n",
0654              result.error);
0655         return result.error;
0656     }
0657 
0658     return 0;
0659 }
0660 
0661 /* safexcel_ahash_cache: cache data until at least one request can be sent to
0662  * the engine, aka. when there is at least 1 block size in the pipe.
0663  */
0664 static int safexcel_ahash_cache(struct ahash_request *areq)
0665 {
0666     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
0667     u64 cache_len;
0668 
0669     /* cache_len: everything accepted by the driver but not sent yet,
0670      * tot sz handled by update() - last req sz - tot sz handled by send()
0671      */
0672     cache_len = safexcel_queued_len(req);
0673 
0674     /*
0675      * In case there isn't enough bytes to proceed (less than a
0676      * block size), cache the data until we have enough.
0677      */
0678     if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
0679         sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
0680                    req->cache + cache_len,
0681                    areq->nbytes, 0);
0682         return 0;
0683     }
0684 
0685     /* We couldn't cache all the data */
0686     return -E2BIG;
0687 }
0688 
0689 static int safexcel_ahash_enqueue(struct ahash_request *areq)
0690 {
0691     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
0692     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
0693     struct safexcel_crypto_priv *priv = ctx->base.priv;
0694     int ret, ring;
0695 
0696     req->needs_inv = false;
0697 
0698     if (ctx->base.ctxr) {
0699         if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
0700              /* invalidate for *any* non-XCBC continuation */
0701            ((req->not_first && !req->xcbcmac) ||
0702              /* invalidate if (i)digest changed */
0703              memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
0704              /* invalidate for HMAC finish with odigest changed */
0705              (req->finish && req->hmac &&
0706               memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
0707                  &ctx->base.opad, req->state_sz))))
0708             /*
0709              * We're still setting needs_inv here, even though it is
0710              * cleared right away, because the needs_inv flag can be
0711              * set in other functions and we want to keep the same
0712              * logic.
0713              */
0714             ctx->base.needs_inv = true;
0715 
0716         if (ctx->base.needs_inv) {
0717             ctx->base.needs_inv = false;
0718             req->needs_inv = true;
0719         }
0720     } else {
0721         ctx->base.ring = safexcel_select_ring(priv);
0722         ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
0723                          EIP197_GFP_FLAGS(areq->base),
0724                          &ctx->base.ctxr_dma);
0725         if (!ctx->base.ctxr)
0726             return -ENOMEM;
0727     }
0728     req->not_first = true;
0729 
0730     ring = ctx->base.ring;
0731 
0732     spin_lock_bh(&priv->ring[ring].queue_lock);
0733     ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
0734     spin_unlock_bh(&priv->ring[ring].queue_lock);
0735 
0736     queue_work(priv->ring[ring].workqueue,
0737            &priv->ring[ring].work_data.work);
0738 
0739     return ret;
0740 }
0741 
0742 static int safexcel_ahash_update(struct ahash_request *areq)
0743 {
0744     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
0745     int ret;
0746 
0747     /* If the request is 0 length, do nothing */
0748     if (!areq->nbytes)
0749         return 0;
0750 
0751     /* Add request to the cache if it fits */
0752     ret = safexcel_ahash_cache(areq);
0753 
0754     /* Update total request length */
0755     req->len += areq->nbytes;
0756 
0757     /* If not all data could fit into the cache, go process the excess.
0758      * Also go process immediately for an HMAC IV precompute, which
0759      * will never be finished at all, but needs to be processed anyway.
0760      */
0761     if ((ret && !req->finish) || req->last_req)
0762         return safexcel_ahash_enqueue(areq);
0763 
0764     return 0;
0765 }
0766 
0767 static int safexcel_ahash_final(struct ahash_request *areq)
0768 {
0769     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
0770     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
0771 
0772     req->finish = true;
0773 
0774     if (unlikely(!req->len && !areq->nbytes)) {
0775         /*
0776          * If we have an overall 0 length *hash* request:
0777          * The HW cannot do 0 length hash, so we provide the correct
0778          * result directly here.
0779          */
0780         if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
0781             memcpy(areq->result, md5_zero_message_hash,
0782                    MD5_DIGEST_SIZE);
0783         else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
0784             memcpy(areq->result, sha1_zero_message_hash,
0785                    SHA1_DIGEST_SIZE);
0786         else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
0787             memcpy(areq->result, sha224_zero_message_hash,
0788                    SHA224_DIGEST_SIZE);
0789         else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
0790             memcpy(areq->result, sha256_zero_message_hash,
0791                    SHA256_DIGEST_SIZE);
0792         else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
0793             memcpy(areq->result, sha384_zero_message_hash,
0794                    SHA384_DIGEST_SIZE);
0795         else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
0796             memcpy(areq->result, sha512_zero_message_hash,
0797                    SHA512_DIGEST_SIZE);
0798         else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
0799             memcpy(areq->result,
0800                    EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
0801         }
0802 
0803         return 0;
0804     } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
0805                 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
0806                 req->len == sizeof(u32) && !areq->nbytes)) {
0807         /* Zero length CRC32 */
0808         memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
0809         return 0;
0810     } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
0811                 !areq->nbytes)) {
0812         /* Zero length CBC MAC */
0813         memset(areq->result, 0, AES_BLOCK_SIZE);
0814         return 0;
0815     } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
0816                 !areq->nbytes)) {
0817         /* Zero length (X)CBC/CMAC */
0818         int i;
0819 
0820         for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
0821             u32 *result = (void *)areq->result;
0822 
0823             /* K3 */
0824             result[i] = swab(ctx->base.ipad.word[i + 4]);
0825         }
0826         areq->result[0] ^= 0x80;            // 10- padding
0827         crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
0828         return 0;
0829     } else if (unlikely(req->hmac &&
0830                 (req->len == req->block_sz) &&
0831                 !areq->nbytes)) {
0832         /*
0833          * If we have an overall 0 length *HMAC* request:
0834          * For HMAC, we need to finalize the inner digest
0835          * and then perform the outer hash.
0836          */
0837 
0838         /* generate pad block in the cache */
0839         /* start with a hash block of all zeroes */
0840         memset(req->cache, 0, req->block_sz);
0841         /* set the first byte to 0x80 to 'append a 1 bit' */
0842         req->cache[0] = 0x80;
0843         /* add the length in bits in the last 2 bytes */
0844         if (req->len_is_le) {
0845             /* Little endian length word (e.g. MD5) */
0846             req->cache[req->block_sz-8] = (req->block_sz << 3) &
0847                               255;
0848             req->cache[req->block_sz-7] = (req->block_sz >> 5);
0849         } else {
0850             /* Big endian length word (e.g. any SHA) */
0851             req->cache[req->block_sz-2] = (req->block_sz >> 5);
0852             req->cache[req->block_sz-1] = (req->block_sz << 3) &
0853                               255;
0854         }
0855 
0856         req->len += req->block_sz; /* plus 1 hash block */
0857 
0858         /* Set special zero-length HMAC flag */
0859         req->hmac_zlen = true;
0860 
0861         /* Finalize HMAC */
0862         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
0863     } else if (req->hmac) {
0864         /* Finalize HMAC */
0865         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
0866     }
0867 
0868     return safexcel_ahash_enqueue(areq);
0869 }
0870 
0871 static int safexcel_ahash_finup(struct ahash_request *areq)
0872 {
0873     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
0874 
0875     req->finish = true;
0876 
0877     safexcel_ahash_update(areq);
0878     return safexcel_ahash_final(areq);
0879 }
0880 
0881 static int safexcel_ahash_export(struct ahash_request *areq, void *out)
0882 {
0883     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
0884     struct safexcel_ahash_export_state *export = out;
0885 
0886     export->len = req->len;
0887     export->processed = req->processed;
0888 
0889     export->digest = req->digest;
0890 
0891     memcpy(export->state, req->state, req->state_sz);
0892     memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
0893 
0894     return 0;
0895 }
0896 
0897 static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
0898 {
0899     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
0900     const struct safexcel_ahash_export_state *export = in;
0901     int ret;
0902 
0903     ret = crypto_ahash_init(areq);
0904     if (ret)
0905         return ret;
0906 
0907     req->len = export->len;
0908     req->processed = export->processed;
0909 
0910     req->digest = export->digest;
0911 
0912     memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
0913     memcpy(req->state, export->state, req->state_sz);
0914 
0915     return 0;
0916 }
0917 
0918 static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
0919 {
0920     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
0921     struct safexcel_alg_template *tmpl =
0922         container_of(__crypto_ahash_alg(tfm->__crt_alg),
0923                  struct safexcel_alg_template, alg.ahash);
0924 
0925     ctx->base.priv = tmpl->priv;
0926     ctx->base.send = safexcel_ahash_send;
0927     ctx->base.handle_result = safexcel_handle_result;
0928     ctx->fb_do_setkey = false;
0929 
0930     crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
0931                  sizeof(struct safexcel_ahash_req));
0932     return 0;
0933 }
0934 
0935 static int safexcel_sha1_init(struct ahash_request *areq)
0936 {
0937     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
0938     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
0939 
0940     memset(req, 0, sizeof(*req));
0941 
0942     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
0943     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
0944     req->state_sz = SHA1_DIGEST_SIZE;
0945     req->digest_sz = SHA1_DIGEST_SIZE;
0946     req->block_sz = SHA1_BLOCK_SIZE;
0947 
0948     return 0;
0949 }
0950 
0951 static int safexcel_sha1_digest(struct ahash_request *areq)
0952 {
0953     int ret = safexcel_sha1_init(areq);
0954 
0955     if (ret)
0956         return ret;
0957 
0958     return safexcel_ahash_finup(areq);
0959 }
0960 
0961 static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
0962 {
0963     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
0964     struct safexcel_crypto_priv *priv = ctx->base.priv;
0965     int ret;
0966 
0967     /* context not allocated, skip invalidation */
0968     if (!ctx->base.ctxr)
0969         return;
0970 
0971     if (priv->flags & EIP197_TRC_CACHE) {
0972         ret = safexcel_ahash_exit_inv(tfm);
0973         if (ret)
0974             dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
0975     } else {
0976         dma_pool_free(priv->context_pool, ctx->base.ctxr,
0977                   ctx->base.ctxr_dma);
0978     }
0979 }
0980 
0981 struct safexcel_alg_template safexcel_alg_sha1 = {
0982     .type = SAFEXCEL_ALG_TYPE_AHASH,
0983     .algo_mask = SAFEXCEL_ALG_SHA1,
0984     .alg.ahash = {
0985         .init = safexcel_sha1_init,
0986         .update = safexcel_ahash_update,
0987         .final = safexcel_ahash_final,
0988         .finup = safexcel_ahash_finup,
0989         .digest = safexcel_sha1_digest,
0990         .export = safexcel_ahash_export,
0991         .import = safexcel_ahash_import,
0992         .halg = {
0993             .digestsize = SHA1_DIGEST_SIZE,
0994             .statesize = sizeof(struct safexcel_ahash_export_state),
0995             .base = {
0996                 .cra_name = "sha1",
0997                 .cra_driver_name = "safexcel-sha1",
0998                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
0999                 .cra_flags = CRYPTO_ALG_ASYNC |
1000                          CRYPTO_ALG_ALLOCATES_MEMORY |
1001                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1002                 .cra_blocksize = SHA1_BLOCK_SIZE,
1003                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1004                 .cra_init = safexcel_ahash_cra_init,
1005                 .cra_exit = safexcel_ahash_cra_exit,
1006                 .cra_module = THIS_MODULE,
1007             },
1008         },
1009     },
1010 };
1011 
1012 static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1013 {
1014     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1015     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1016 
1017     memset(req, 0, sizeof(*req));
1018 
1019     /* Start from ipad precompute */
1020     memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
1021     /* Already processed the key^ipad part now! */
1022     req->len    = SHA1_BLOCK_SIZE;
1023     req->processed  = SHA1_BLOCK_SIZE;
1024 
1025     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1026     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1027     req->state_sz = SHA1_DIGEST_SIZE;
1028     req->digest_sz = SHA1_DIGEST_SIZE;
1029     req->block_sz = SHA1_BLOCK_SIZE;
1030     req->hmac = true;
1031 
1032     return 0;
1033 }
1034 
1035 static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1036 {
1037     int ret = safexcel_hmac_sha1_init(areq);
1038 
1039     if (ret)
1040         return ret;
1041 
1042     return safexcel_ahash_finup(areq);
1043 }
1044 
1045 struct safexcel_ahash_result {
1046     struct completion completion;
1047     int error;
1048 };
1049 
1050 static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1051 {
1052     struct safexcel_ahash_result *result = req->data;
1053 
1054     if (error == -EINPROGRESS)
1055         return;
1056 
1057     result->error = error;
1058     complete(&result->completion);
1059 }
1060 
1061 static int safexcel_hmac_init_pad(struct ahash_request *areq,
1062                   unsigned int blocksize, const u8 *key,
1063                   unsigned int keylen, u8 *ipad, u8 *opad)
1064 {
1065     struct safexcel_ahash_result result;
1066     struct scatterlist sg;
1067     int ret, i;
1068     u8 *keydup;
1069 
1070     if (keylen <= blocksize) {
1071         memcpy(ipad, key, keylen);
1072     } else {
1073         keydup = kmemdup(key, keylen, GFP_KERNEL);
1074         if (!keydup)
1075             return -ENOMEM;
1076 
1077         ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1078                        safexcel_ahash_complete, &result);
1079         sg_init_one(&sg, keydup, keylen);
1080         ahash_request_set_crypt(areq, &sg, ipad, keylen);
1081         init_completion(&result.completion);
1082 
1083         ret = crypto_ahash_digest(areq);
1084         if (ret == -EINPROGRESS || ret == -EBUSY) {
1085             wait_for_completion_interruptible(&result.completion);
1086             ret = result.error;
1087         }
1088 
1089         /* Avoid leaking */
1090         kfree_sensitive(keydup);
1091 
1092         if (ret)
1093             return ret;
1094 
1095         keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1096     }
1097 
1098     memset(ipad + keylen, 0, blocksize - keylen);
1099     memcpy(opad, ipad, blocksize);
1100 
1101     for (i = 0; i < blocksize; i++) {
1102         ipad[i] ^= HMAC_IPAD_VALUE;
1103         opad[i] ^= HMAC_OPAD_VALUE;
1104     }
1105 
1106     return 0;
1107 }
1108 
1109 static int safexcel_hmac_init_iv(struct ahash_request *areq,
1110                  unsigned int blocksize, u8 *pad, void *state)
1111 {
1112     struct safexcel_ahash_result result;
1113     struct safexcel_ahash_req *req;
1114     struct scatterlist sg;
1115     int ret;
1116 
1117     ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1118                    safexcel_ahash_complete, &result);
1119     sg_init_one(&sg, pad, blocksize);
1120     ahash_request_set_crypt(areq, &sg, pad, blocksize);
1121     init_completion(&result.completion);
1122 
1123     ret = crypto_ahash_init(areq);
1124     if (ret)
1125         return ret;
1126 
1127     req = ahash_request_ctx(areq);
1128     req->hmac = true;
1129     req->last_req = true;
1130 
1131     ret = crypto_ahash_update(areq);
1132     if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1133         return ret;
1134 
1135     wait_for_completion_interruptible(&result.completion);
1136     if (result.error)
1137         return result.error;
1138 
1139     return crypto_ahash_export(areq, state);
1140 }
1141 
1142 static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
1143                   unsigned int keylen,
1144                   void *istate, void *ostate)
1145 {
1146     struct ahash_request *areq;
1147     struct crypto_ahash *tfm;
1148     unsigned int blocksize;
1149     u8 *ipad, *opad;
1150     int ret;
1151 
1152     tfm = crypto_alloc_ahash(alg, 0, 0);
1153     if (IS_ERR(tfm))
1154         return PTR_ERR(tfm);
1155 
1156     areq = ahash_request_alloc(tfm, GFP_KERNEL);
1157     if (!areq) {
1158         ret = -ENOMEM;
1159         goto free_ahash;
1160     }
1161 
1162     crypto_ahash_clear_flags(tfm, ~0);
1163     blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1164 
1165     ipad = kcalloc(2, blocksize, GFP_KERNEL);
1166     if (!ipad) {
1167         ret = -ENOMEM;
1168         goto free_request;
1169     }
1170 
1171     opad = ipad + blocksize;
1172 
1173     ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1174     if (ret)
1175         goto free_ipad;
1176 
1177     ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1178     if (ret)
1179         goto free_ipad;
1180 
1181     ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1182 
1183 free_ipad:
1184     kfree(ipad);
1185 free_request:
1186     ahash_request_free(areq);
1187 free_ahash:
1188     crypto_free_ahash(tfm);
1189 
1190     return ret;
1191 }
1192 
1193 int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
1194              unsigned int keylen, const char *alg,
1195              unsigned int state_sz)
1196 {
1197     struct safexcel_crypto_priv *priv = base->priv;
1198     struct safexcel_ahash_export_state istate, ostate;
1199     int ret;
1200 
1201     ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1202     if (ret)
1203         return ret;
1204 
1205     if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
1206         (memcmp(&base->ipad, istate.state, state_sz) ||
1207          memcmp(&base->opad, ostate.state, state_sz)))
1208         base->needs_inv = true;
1209 
1210     memcpy(&base->ipad, &istate.state, state_sz);
1211     memcpy(&base->opad, &ostate.state, state_sz);
1212 
1213     return 0;
1214 }
1215 
1216 static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1217                     unsigned int keylen, const char *alg,
1218                     unsigned int state_sz)
1219 {
1220     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
1221 
1222     return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
1223 }
1224 
1225 static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1226                      unsigned int keylen)
1227 {
1228     return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1229                     SHA1_DIGEST_SIZE);
1230 }
1231 
1232 struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1233     .type = SAFEXCEL_ALG_TYPE_AHASH,
1234     .algo_mask = SAFEXCEL_ALG_SHA1,
1235     .alg.ahash = {
1236         .init = safexcel_hmac_sha1_init,
1237         .update = safexcel_ahash_update,
1238         .final = safexcel_ahash_final,
1239         .finup = safexcel_ahash_finup,
1240         .digest = safexcel_hmac_sha1_digest,
1241         .setkey = safexcel_hmac_sha1_setkey,
1242         .export = safexcel_ahash_export,
1243         .import = safexcel_ahash_import,
1244         .halg = {
1245             .digestsize = SHA1_DIGEST_SIZE,
1246             .statesize = sizeof(struct safexcel_ahash_export_state),
1247             .base = {
1248                 .cra_name = "hmac(sha1)",
1249                 .cra_driver_name = "safexcel-hmac-sha1",
1250                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1251                 .cra_flags = CRYPTO_ALG_ASYNC |
1252                          CRYPTO_ALG_ALLOCATES_MEMORY |
1253                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1254                 .cra_blocksize = SHA1_BLOCK_SIZE,
1255                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1256                 .cra_init = safexcel_ahash_cra_init,
1257                 .cra_exit = safexcel_ahash_cra_exit,
1258                 .cra_module = THIS_MODULE,
1259             },
1260         },
1261     },
1262 };
1263 
1264 static int safexcel_sha256_init(struct ahash_request *areq)
1265 {
1266     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1267     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1268 
1269     memset(req, 0, sizeof(*req));
1270 
1271     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1272     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1273     req->state_sz = SHA256_DIGEST_SIZE;
1274     req->digest_sz = SHA256_DIGEST_SIZE;
1275     req->block_sz = SHA256_BLOCK_SIZE;
1276 
1277     return 0;
1278 }
1279 
1280 static int safexcel_sha256_digest(struct ahash_request *areq)
1281 {
1282     int ret = safexcel_sha256_init(areq);
1283 
1284     if (ret)
1285         return ret;
1286 
1287     return safexcel_ahash_finup(areq);
1288 }
1289 
1290 struct safexcel_alg_template safexcel_alg_sha256 = {
1291     .type = SAFEXCEL_ALG_TYPE_AHASH,
1292     .algo_mask = SAFEXCEL_ALG_SHA2_256,
1293     .alg.ahash = {
1294         .init = safexcel_sha256_init,
1295         .update = safexcel_ahash_update,
1296         .final = safexcel_ahash_final,
1297         .finup = safexcel_ahash_finup,
1298         .digest = safexcel_sha256_digest,
1299         .export = safexcel_ahash_export,
1300         .import = safexcel_ahash_import,
1301         .halg = {
1302             .digestsize = SHA256_DIGEST_SIZE,
1303             .statesize = sizeof(struct safexcel_ahash_export_state),
1304             .base = {
1305                 .cra_name = "sha256",
1306                 .cra_driver_name = "safexcel-sha256",
1307                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1308                 .cra_flags = CRYPTO_ALG_ASYNC |
1309                          CRYPTO_ALG_ALLOCATES_MEMORY |
1310                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1311                 .cra_blocksize = SHA256_BLOCK_SIZE,
1312                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1313                 .cra_init = safexcel_ahash_cra_init,
1314                 .cra_exit = safexcel_ahash_cra_exit,
1315                 .cra_module = THIS_MODULE,
1316             },
1317         },
1318     },
1319 };
1320 
1321 static int safexcel_sha224_init(struct ahash_request *areq)
1322 {
1323     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1324     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1325 
1326     memset(req, 0, sizeof(*req));
1327 
1328     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1329     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1330     req->state_sz = SHA256_DIGEST_SIZE;
1331     req->digest_sz = SHA256_DIGEST_SIZE;
1332     req->block_sz = SHA256_BLOCK_SIZE;
1333 
1334     return 0;
1335 }
1336 
1337 static int safexcel_sha224_digest(struct ahash_request *areq)
1338 {
1339     int ret = safexcel_sha224_init(areq);
1340 
1341     if (ret)
1342         return ret;
1343 
1344     return safexcel_ahash_finup(areq);
1345 }
1346 
1347 struct safexcel_alg_template safexcel_alg_sha224 = {
1348     .type = SAFEXCEL_ALG_TYPE_AHASH,
1349     .algo_mask = SAFEXCEL_ALG_SHA2_256,
1350     .alg.ahash = {
1351         .init = safexcel_sha224_init,
1352         .update = safexcel_ahash_update,
1353         .final = safexcel_ahash_final,
1354         .finup = safexcel_ahash_finup,
1355         .digest = safexcel_sha224_digest,
1356         .export = safexcel_ahash_export,
1357         .import = safexcel_ahash_import,
1358         .halg = {
1359             .digestsize = SHA224_DIGEST_SIZE,
1360             .statesize = sizeof(struct safexcel_ahash_export_state),
1361             .base = {
1362                 .cra_name = "sha224",
1363                 .cra_driver_name = "safexcel-sha224",
1364                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1365                 .cra_flags = CRYPTO_ALG_ASYNC |
1366                          CRYPTO_ALG_ALLOCATES_MEMORY |
1367                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1368                 .cra_blocksize = SHA224_BLOCK_SIZE,
1369                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1370                 .cra_init = safexcel_ahash_cra_init,
1371                 .cra_exit = safexcel_ahash_cra_exit,
1372                 .cra_module = THIS_MODULE,
1373             },
1374         },
1375     },
1376 };
1377 
1378 static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1379                        unsigned int keylen)
1380 {
1381     return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1382                     SHA256_DIGEST_SIZE);
1383 }
1384 
1385 static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1386 {
1387     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1388     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1389 
1390     memset(req, 0, sizeof(*req));
1391 
1392     /* Start from ipad precompute */
1393     memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1394     /* Already processed the key^ipad part now! */
1395     req->len    = SHA256_BLOCK_SIZE;
1396     req->processed  = SHA256_BLOCK_SIZE;
1397 
1398     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1399     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1400     req->state_sz = SHA256_DIGEST_SIZE;
1401     req->digest_sz = SHA256_DIGEST_SIZE;
1402     req->block_sz = SHA256_BLOCK_SIZE;
1403     req->hmac = true;
1404 
1405     return 0;
1406 }
1407 
1408 static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1409 {
1410     int ret = safexcel_hmac_sha224_init(areq);
1411 
1412     if (ret)
1413         return ret;
1414 
1415     return safexcel_ahash_finup(areq);
1416 }
1417 
1418 struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1419     .type = SAFEXCEL_ALG_TYPE_AHASH,
1420     .algo_mask = SAFEXCEL_ALG_SHA2_256,
1421     .alg.ahash = {
1422         .init = safexcel_hmac_sha224_init,
1423         .update = safexcel_ahash_update,
1424         .final = safexcel_ahash_final,
1425         .finup = safexcel_ahash_finup,
1426         .digest = safexcel_hmac_sha224_digest,
1427         .setkey = safexcel_hmac_sha224_setkey,
1428         .export = safexcel_ahash_export,
1429         .import = safexcel_ahash_import,
1430         .halg = {
1431             .digestsize = SHA224_DIGEST_SIZE,
1432             .statesize = sizeof(struct safexcel_ahash_export_state),
1433             .base = {
1434                 .cra_name = "hmac(sha224)",
1435                 .cra_driver_name = "safexcel-hmac-sha224",
1436                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1437                 .cra_flags = CRYPTO_ALG_ASYNC |
1438                          CRYPTO_ALG_ALLOCATES_MEMORY |
1439                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1440                 .cra_blocksize = SHA224_BLOCK_SIZE,
1441                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1442                 .cra_init = safexcel_ahash_cra_init,
1443                 .cra_exit = safexcel_ahash_cra_exit,
1444                 .cra_module = THIS_MODULE,
1445             },
1446         },
1447     },
1448 };
1449 
1450 static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1451                      unsigned int keylen)
1452 {
1453     return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1454                     SHA256_DIGEST_SIZE);
1455 }
1456 
1457 static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1458 {
1459     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1460     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1461 
1462     memset(req, 0, sizeof(*req));
1463 
1464     /* Start from ipad precompute */
1465     memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1466     /* Already processed the key^ipad part now! */
1467     req->len    = SHA256_BLOCK_SIZE;
1468     req->processed  = SHA256_BLOCK_SIZE;
1469 
1470     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1471     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1472     req->state_sz = SHA256_DIGEST_SIZE;
1473     req->digest_sz = SHA256_DIGEST_SIZE;
1474     req->block_sz = SHA256_BLOCK_SIZE;
1475     req->hmac = true;
1476 
1477     return 0;
1478 }
1479 
1480 static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1481 {
1482     int ret = safexcel_hmac_sha256_init(areq);
1483 
1484     if (ret)
1485         return ret;
1486 
1487     return safexcel_ahash_finup(areq);
1488 }
1489 
1490 struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1491     .type = SAFEXCEL_ALG_TYPE_AHASH,
1492     .algo_mask = SAFEXCEL_ALG_SHA2_256,
1493     .alg.ahash = {
1494         .init = safexcel_hmac_sha256_init,
1495         .update = safexcel_ahash_update,
1496         .final = safexcel_ahash_final,
1497         .finup = safexcel_ahash_finup,
1498         .digest = safexcel_hmac_sha256_digest,
1499         .setkey = safexcel_hmac_sha256_setkey,
1500         .export = safexcel_ahash_export,
1501         .import = safexcel_ahash_import,
1502         .halg = {
1503             .digestsize = SHA256_DIGEST_SIZE,
1504             .statesize = sizeof(struct safexcel_ahash_export_state),
1505             .base = {
1506                 .cra_name = "hmac(sha256)",
1507                 .cra_driver_name = "safexcel-hmac-sha256",
1508                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1509                 .cra_flags = CRYPTO_ALG_ASYNC |
1510                          CRYPTO_ALG_ALLOCATES_MEMORY |
1511                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1512                 .cra_blocksize = SHA256_BLOCK_SIZE,
1513                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1514                 .cra_init = safexcel_ahash_cra_init,
1515                 .cra_exit = safexcel_ahash_cra_exit,
1516                 .cra_module = THIS_MODULE,
1517             },
1518         },
1519     },
1520 };
1521 
1522 static int safexcel_sha512_init(struct ahash_request *areq)
1523 {
1524     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1525     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1526 
1527     memset(req, 0, sizeof(*req));
1528 
1529     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1530     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1531     req->state_sz = SHA512_DIGEST_SIZE;
1532     req->digest_sz = SHA512_DIGEST_SIZE;
1533     req->block_sz = SHA512_BLOCK_SIZE;
1534 
1535     return 0;
1536 }
1537 
1538 static int safexcel_sha512_digest(struct ahash_request *areq)
1539 {
1540     int ret = safexcel_sha512_init(areq);
1541 
1542     if (ret)
1543         return ret;
1544 
1545     return safexcel_ahash_finup(areq);
1546 }
1547 
1548 struct safexcel_alg_template safexcel_alg_sha512 = {
1549     .type = SAFEXCEL_ALG_TYPE_AHASH,
1550     .algo_mask = SAFEXCEL_ALG_SHA2_512,
1551     .alg.ahash = {
1552         .init = safexcel_sha512_init,
1553         .update = safexcel_ahash_update,
1554         .final = safexcel_ahash_final,
1555         .finup = safexcel_ahash_finup,
1556         .digest = safexcel_sha512_digest,
1557         .export = safexcel_ahash_export,
1558         .import = safexcel_ahash_import,
1559         .halg = {
1560             .digestsize = SHA512_DIGEST_SIZE,
1561             .statesize = sizeof(struct safexcel_ahash_export_state),
1562             .base = {
1563                 .cra_name = "sha512",
1564                 .cra_driver_name = "safexcel-sha512",
1565                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1566                 .cra_flags = CRYPTO_ALG_ASYNC |
1567                          CRYPTO_ALG_ALLOCATES_MEMORY |
1568                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1569                 .cra_blocksize = SHA512_BLOCK_SIZE,
1570                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1571                 .cra_init = safexcel_ahash_cra_init,
1572                 .cra_exit = safexcel_ahash_cra_exit,
1573                 .cra_module = THIS_MODULE,
1574             },
1575         },
1576     },
1577 };
1578 
1579 static int safexcel_sha384_init(struct ahash_request *areq)
1580 {
1581     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1582     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1583 
1584     memset(req, 0, sizeof(*req));
1585 
1586     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1587     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1588     req->state_sz = SHA512_DIGEST_SIZE;
1589     req->digest_sz = SHA512_DIGEST_SIZE;
1590     req->block_sz = SHA512_BLOCK_SIZE;
1591 
1592     return 0;
1593 }
1594 
1595 static int safexcel_sha384_digest(struct ahash_request *areq)
1596 {
1597     int ret = safexcel_sha384_init(areq);
1598 
1599     if (ret)
1600         return ret;
1601 
1602     return safexcel_ahash_finup(areq);
1603 }
1604 
1605 struct safexcel_alg_template safexcel_alg_sha384 = {
1606     .type = SAFEXCEL_ALG_TYPE_AHASH,
1607     .algo_mask = SAFEXCEL_ALG_SHA2_512,
1608     .alg.ahash = {
1609         .init = safexcel_sha384_init,
1610         .update = safexcel_ahash_update,
1611         .final = safexcel_ahash_final,
1612         .finup = safexcel_ahash_finup,
1613         .digest = safexcel_sha384_digest,
1614         .export = safexcel_ahash_export,
1615         .import = safexcel_ahash_import,
1616         .halg = {
1617             .digestsize = SHA384_DIGEST_SIZE,
1618             .statesize = sizeof(struct safexcel_ahash_export_state),
1619             .base = {
1620                 .cra_name = "sha384",
1621                 .cra_driver_name = "safexcel-sha384",
1622                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1623                 .cra_flags = CRYPTO_ALG_ASYNC |
1624                          CRYPTO_ALG_ALLOCATES_MEMORY |
1625                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1626                 .cra_blocksize = SHA384_BLOCK_SIZE,
1627                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1628                 .cra_init = safexcel_ahash_cra_init,
1629                 .cra_exit = safexcel_ahash_cra_exit,
1630                 .cra_module = THIS_MODULE,
1631             },
1632         },
1633     },
1634 };
1635 
1636 static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1637                        unsigned int keylen)
1638 {
1639     return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1640                     SHA512_DIGEST_SIZE);
1641 }
1642 
1643 static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1644 {
1645     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1646     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1647 
1648     memset(req, 0, sizeof(*req));
1649 
1650     /* Start from ipad precompute */
1651     memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1652     /* Already processed the key^ipad part now! */
1653     req->len    = SHA512_BLOCK_SIZE;
1654     req->processed  = SHA512_BLOCK_SIZE;
1655 
1656     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1657     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1658     req->state_sz = SHA512_DIGEST_SIZE;
1659     req->digest_sz = SHA512_DIGEST_SIZE;
1660     req->block_sz = SHA512_BLOCK_SIZE;
1661     req->hmac = true;
1662 
1663     return 0;
1664 }
1665 
1666 static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1667 {
1668     int ret = safexcel_hmac_sha512_init(areq);
1669 
1670     if (ret)
1671         return ret;
1672 
1673     return safexcel_ahash_finup(areq);
1674 }
1675 
1676 struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1677     .type = SAFEXCEL_ALG_TYPE_AHASH,
1678     .algo_mask = SAFEXCEL_ALG_SHA2_512,
1679     .alg.ahash = {
1680         .init = safexcel_hmac_sha512_init,
1681         .update = safexcel_ahash_update,
1682         .final = safexcel_ahash_final,
1683         .finup = safexcel_ahash_finup,
1684         .digest = safexcel_hmac_sha512_digest,
1685         .setkey = safexcel_hmac_sha512_setkey,
1686         .export = safexcel_ahash_export,
1687         .import = safexcel_ahash_import,
1688         .halg = {
1689             .digestsize = SHA512_DIGEST_SIZE,
1690             .statesize = sizeof(struct safexcel_ahash_export_state),
1691             .base = {
1692                 .cra_name = "hmac(sha512)",
1693                 .cra_driver_name = "safexcel-hmac-sha512",
1694                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1695                 .cra_flags = CRYPTO_ALG_ASYNC |
1696                          CRYPTO_ALG_ALLOCATES_MEMORY |
1697                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1698                 .cra_blocksize = SHA512_BLOCK_SIZE,
1699                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1700                 .cra_init = safexcel_ahash_cra_init,
1701                 .cra_exit = safexcel_ahash_cra_exit,
1702                 .cra_module = THIS_MODULE,
1703             },
1704         },
1705     },
1706 };
1707 
1708 static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1709                        unsigned int keylen)
1710 {
1711     return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1712                     SHA512_DIGEST_SIZE);
1713 }
1714 
1715 static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1716 {
1717     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1718     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1719 
1720     memset(req, 0, sizeof(*req));
1721 
1722     /* Start from ipad precompute */
1723     memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1724     /* Already processed the key^ipad part now! */
1725     req->len    = SHA512_BLOCK_SIZE;
1726     req->processed  = SHA512_BLOCK_SIZE;
1727 
1728     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1729     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1730     req->state_sz = SHA512_DIGEST_SIZE;
1731     req->digest_sz = SHA512_DIGEST_SIZE;
1732     req->block_sz = SHA512_BLOCK_SIZE;
1733     req->hmac = true;
1734 
1735     return 0;
1736 }
1737 
1738 static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1739 {
1740     int ret = safexcel_hmac_sha384_init(areq);
1741 
1742     if (ret)
1743         return ret;
1744 
1745     return safexcel_ahash_finup(areq);
1746 }
1747 
1748 struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1749     .type = SAFEXCEL_ALG_TYPE_AHASH,
1750     .algo_mask = SAFEXCEL_ALG_SHA2_512,
1751     .alg.ahash = {
1752         .init = safexcel_hmac_sha384_init,
1753         .update = safexcel_ahash_update,
1754         .final = safexcel_ahash_final,
1755         .finup = safexcel_ahash_finup,
1756         .digest = safexcel_hmac_sha384_digest,
1757         .setkey = safexcel_hmac_sha384_setkey,
1758         .export = safexcel_ahash_export,
1759         .import = safexcel_ahash_import,
1760         .halg = {
1761             .digestsize = SHA384_DIGEST_SIZE,
1762             .statesize = sizeof(struct safexcel_ahash_export_state),
1763             .base = {
1764                 .cra_name = "hmac(sha384)",
1765                 .cra_driver_name = "safexcel-hmac-sha384",
1766                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1767                 .cra_flags = CRYPTO_ALG_ASYNC |
1768                          CRYPTO_ALG_ALLOCATES_MEMORY |
1769                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1770                 .cra_blocksize = SHA384_BLOCK_SIZE,
1771                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1772                 .cra_init = safexcel_ahash_cra_init,
1773                 .cra_exit = safexcel_ahash_cra_exit,
1774                 .cra_module = THIS_MODULE,
1775             },
1776         },
1777     },
1778 };
1779 
1780 static int safexcel_md5_init(struct ahash_request *areq)
1781 {
1782     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1783     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1784 
1785     memset(req, 0, sizeof(*req));
1786 
1787     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1788     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1789     req->state_sz = MD5_DIGEST_SIZE;
1790     req->digest_sz = MD5_DIGEST_SIZE;
1791     req->block_sz = MD5_HMAC_BLOCK_SIZE;
1792 
1793     return 0;
1794 }
1795 
1796 static int safexcel_md5_digest(struct ahash_request *areq)
1797 {
1798     int ret = safexcel_md5_init(areq);
1799 
1800     if (ret)
1801         return ret;
1802 
1803     return safexcel_ahash_finup(areq);
1804 }
1805 
1806 struct safexcel_alg_template safexcel_alg_md5 = {
1807     .type = SAFEXCEL_ALG_TYPE_AHASH,
1808     .algo_mask = SAFEXCEL_ALG_MD5,
1809     .alg.ahash = {
1810         .init = safexcel_md5_init,
1811         .update = safexcel_ahash_update,
1812         .final = safexcel_ahash_final,
1813         .finup = safexcel_ahash_finup,
1814         .digest = safexcel_md5_digest,
1815         .export = safexcel_ahash_export,
1816         .import = safexcel_ahash_import,
1817         .halg = {
1818             .digestsize = MD5_DIGEST_SIZE,
1819             .statesize = sizeof(struct safexcel_ahash_export_state),
1820             .base = {
1821                 .cra_name = "md5",
1822                 .cra_driver_name = "safexcel-md5",
1823                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1824                 .cra_flags = CRYPTO_ALG_ASYNC |
1825                          CRYPTO_ALG_ALLOCATES_MEMORY |
1826                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1827                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1828                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1829                 .cra_init = safexcel_ahash_cra_init,
1830                 .cra_exit = safexcel_ahash_cra_exit,
1831                 .cra_module = THIS_MODULE,
1832             },
1833         },
1834     },
1835 };
1836 
1837 static int safexcel_hmac_md5_init(struct ahash_request *areq)
1838 {
1839     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1840     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1841 
1842     memset(req, 0, sizeof(*req));
1843 
1844     /* Start from ipad precompute */
1845     memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
1846     /* Already processed the key^ipad part now! */
1847     req->len    = MD5_HMAC_BLOCK_SIZE;
1848     req->processed  = MD5_HMAC_BLOCK_SIZE;
1849 
1850     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1851     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1852     req->state_sz = MD5_DIGEST_SIZE;
1853     req->digest_sz = MD5_DIGEST_SIZE;
1854     req->block_sz = MD5_HMAC_BLOCK_SIZE;
1855     req->len_is_le = true; /* MD5 is little endian! ... */
1856     req->hmac = true;
1857 
1858     return 0;
1859 }
1860 
1861 static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1862                      unsigned int keylen)
1863 {
1864     return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1865                     MD5_DIGEST_SIZE);
1866 }
1867 
1868 static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1869 {
1870     int ret = safexcel_hmac_md5_init(areq);
1871 
1872     if (ret)
1873         return ret;
1874 
1875     return safexcel_ahash_finup(areq);
1876 }
1877 
1878 struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1879     .type = SAFEXCEL_ALG_TYPE_AHASH,
1880     .algo_mask = SAFEXCEL_ALG_MD5,
1881     .alg.ahash = {
1882         .init = safexcel_hmac_md5_init,
1883         .update = safexcel_ahash_update,
1884         .final = safexcel_ahash_final,
1885         .finup = safexcel_ahash_finup,
1886         .digest = safexcel_hmac_md5_digest,
1887         .setkey = safexcel_hmac_md5_setkey,
1888         .export = safexcel_ahash_export,
1889         .import = safexcel_ahash_import,
1890         .halg = {
1891             .digestsize = MD5_DIGEST_SIZE,
1892             .statesize = sizeof(struct safexcel_ahash_export_state),
1893             .base = {
1894                 .cra_name = "hmac(md5)",
1895                 .cra_driver_name = "safexcel-hmac-md5",
1896                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1897                 .cra_flags = CRYPTO_ALG_ASYNC |
1898                          CRYPTO_ALG_ALLOCATES_MEMORY |
1899                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1900                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1901                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1902                 .cra_init = safexcel_ahash_cra_init,
1903                 .cra_exit = safexcel_ahash_cra_exit,
1904                 .cra_module = THIS_MODULE,
1905             },
1906         },
1907     },
1908 };
1909 
1910 static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1911 {
1912     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1913     int ret = safexcel_ahash_cra_init(tfm);
1914 
1915     /* Default 'key' is all zeroes */
1916     memset(&ctx->base.ipad, 0, sizeof(u32));
1917     return ret;
1918 }
1919 
1920 static int safexcel_crc32_init(struct ahash_request *areq)
1921 {
1922     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1923     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1924 
1925     memset(req, 0, sizeof(*req));
1926 
1927     /* Start from loaded key */
1928     req->state[0]   = cpu_to_le32(~ctx->base.ipad.word[0]);
1929     /* Set processed to non-zero to enable invalidation detection */
1930     req->len    = sizeof(u32);
1931     req->processed  = sizeof(u32);
1932 
1933     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1934     req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1935     req->state_sz = sizeof(u32);
1936     req->digest_sz = sizeof(u32);
1937     req->block_sz = sizeof(u32);
1938 
1939     return 0;
1940 }
1941 
1942 static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1943                  unsigned int keylen)
1944 {
1945     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1946 
1947     if (keylen != sizeof(u32))
1948         return -EINVAL;
1949 
1950     memcpy(&ctx->base.ipad, key, sizeof(u32));
1951     return 0;
1952 }
1953 
1954 static int safexcel_crc32_digest(struct ahash_request *areq)
1955 {
1956     return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1957 }
1958 
1959 struct safexcel_alg_template safexcel_alg_crc32 = {
1960     .type = SAFEXCEL_ALG_TYPE_AHASH,
1961     .algo_mask = 0,
1962     .alg.ahash = {
1963         .init = safexcel_crc32_init,
1964         .update = safexcel_ahash_update,
1965         .final = safexcel_ahash_final,
1966         .finup = safexcel_ahash_finup,
1967         .digest = safexcel_crc32_digest,
1968         .setkey = safexcel_crc32_setkey,
1969         .export = safexcel_ahash_export,
1970         .import = safexcel_ahash_import,
1971         .halg = {
1972             .digestsize = sizeof(u32),
1973             .statesize = sizeof(struct safexcel_ahash_export_state),
1974             .base = {
1975                 .cra_name = "crc32",
1976                 .cra_driver_name = "safexcel-crc32",
1977                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1978                 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1979                          CRYPTO_ALG_ASYNC |
1980                          CRYPTO_ALG_ALLOCATES_MEMORY |
1981                          CRYPTO_ALG_KERN_DRIVER_ONLY,
1982                 .cra_blocksize = 1,
1983                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1984                 .cra_init = safexcel_crc32_cra_init,
1985                 .cra_exit = safexcel_ahash_cra_exit,
1986                 .cra_module = THIS_MODULE,
1987             },
1988         },
1989     },
1990 };
1991 
1992 static int safexcel_cbcmac_init(struct ahash_request *areq)
1993 {
1994     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1995     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1996 
1997     memset(req, 0, sizeof(*req));
1998 
1999     /* Start from loaded keys */
2000     memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
2001     /* Set processed to non-zero to enable invalidation detection */
2002     req->len    = AES_BLOCK_SIZE;
2003     req->processed  = AES_BLOCK_SIZE;
2004 
2005     req->digest   = CONTEXT_CONTROL_DIGEST_XCM;
2006     req->state_sz = ctx->key_sz;
2007     req->digest_sz = AES_BLOCK_SIZE;
2008     req->block_sz = AES_BLOCK_SIZE;
2009     req->xcbcmac  = true;
2010 
2011     return 0;
2012 }
2013 
2014 static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2015                  unsigned int len)
2016 {
2017     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2018     struct crypto_aes_ctx aes;
2019     int ret, i;
2020 
2021     ret = aes_expandkey(&aes, key, len);
2022     if (ret)
2023         return ret;
2024 
2025     memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
2026     for (i = 0; i < len / sizeof(u32); i++)
2027         ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
2028 
2029     if (len == AES_KEYSIZE_192) {
2030         ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2031         ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2032     } else if (len == AES_KEYSIZE_256) {
2033         ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2034         ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2035     } else {
2036         ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2037         ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2038     }
2039     ctx->cbcmac  = true;
2040 
2041     memzero_explicit(&aes, sizeof(aes));
2042     return 0;
2043 }
2044 
2045 static int safexcel_cbcmac_digest(struct ahash_request *areq)
2046 {
2047     return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2048 }
2049 
2050 struct safexcel_alg_template safexcel_alg_cbcmac = {
2051     .type = SAFEXCEL_ALG_TYPE_AHASH,
2052     .algo_mask = 0,
2053     .alg.ahash = {
2054         .init = safexcel_cbcmac_init,
2055         .update = safexcel_ahash_update,
2056         .final = safexcel_ahash_final,
2057         .finup = safexcel_ahash_finup,
2058         .digest = safexcel_cbcmac_digest,
2059         .setkey = safexcel_cbcmac_setkey,
2060         .export = safexcel_ahash_export,
2061         .import = safexcel_ahash_import,
2062         .halg = {
2063             .digestsize = AES_BLOCK_SIZE,
2064             .statesize = sizeof(struct safexcel_ahash_export_state),
2065             .base = {
2066                 .cra_name = "cbcmac(aes)",
2067                 .cra_driver_name = "safexcel-cbcmac-aes",
2068                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2069                 .cra_flags = CRYPTO_ALG_ASYNC |
2070                          CRYPTO_ALG_ALLOCATES_MEMORY |
2071                          CRYPTO_ALG_KERN_DRIVER_ONLY,
2072                 .cra_blocksize = 1,
2073                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2074                 .cra_init = safexcel_ahash_cra_init,
2075                 .cra_exit = safexcel_ahash_cra_exit,
2076                 .cra_module = THIS_MODULE,
2077             },
2078         },
2079     },
2080 };
2081 
2082 static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2083                  unsigned int len)
2084 {
2085     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2086     struct crypto_aes_ctx aes;
2087     u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2088     int ret, i;
2089 
2090     ret = aes_expandkey(&aes, key, len);
2091     if (ret)
2092         return ret;
2093 
2094     /* precompute the XCBC key material */
2095     crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2096     crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2097                 CRYPTO_TFM_REQ_MASK);
2098     ret = crypto_cipher_setkey(ctx->kaes, key, len);
2099     if (ret)
2100         return ret;
2101 
2102     crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2103         "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2104     crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2105         "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2106     crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2107         "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2108     for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2109         ctx->base.ipad.word[i] = swab(key_tmp[i]);
2110 
2111     crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2112     crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2113                 CRYPTO_TFM_REQ_MASK);
2114     ret = crypto_cipher_setkey(ctx->kaes,
2115                    (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2116                    AES_MIN_KEY_SIZE);
2117     if (ret)
2118         return ret;
2119 
2120     ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2121     ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2122     ctx->cbcmac = false;
2123 
2124     memzero_explicit(&aes, sizeof(aes));
2125     return 0;
2126 }
2127 
2128 static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2129 {
2130     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2131 
2132     safexcel_ahash_cra_init(tfm);
2133     ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
2134     return PTR_ERR_OR_ZERO(ctx->kaes);
2135 }
2136 
2137 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2138 {
2139     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2140 
2141     crypto_free_cipher(ctx->kaes);
2142     safexcel_ahash_cra_exit(tfm);
2143 }
2144 
2145 struct safexcel_alg_template safexcel_alg_xcbcmac = {
2146     .type = SAFEXCEL_ALG_TYPE_AHASH,
2147     .algo_mask = 0,
2148     .alg.ahash = {
2149         .init = safexcel_cbcmac_init,
2150         .update = safexcel_ahash_update,
2151         .final = safexcel_ahash_final,
2152         .finup = safexcel_ahash_finup,
2153         .digest = safexcel_cbcmac_digest,
2154         .setkey = safexcel_xcbcmac_setkey,
2155         .export = safexcel_ahash_export,
2156         .import = safexcel_ahash_import,
2157         .halg = {
2158             .digestsize = AES_BLOCK_SIZE,
2159             .statesize = sizeof(struct safexcel_ahash_export_state),
2160             .base = {
2161                 .cra_name = "xcbc(aes)",
2162                 .cra_driver_name = "safexcel-xcbc-aes",
2163                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2164                 .cra_flags = CRYPTO_ALG_ASYNC |
2165                          CRYPTO_ALG_ALLOCATES_MEMORY |
2166                          CRYPTO_ALG_KERN_DRIVER_ONLY,
2167                 .cra_blocksize = AES_BLOCK_SIZE,
2168                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2169                 .cra_init = safexcel_xcbcmac_cra_init,
2170                 .cra_exit = safexcel_xcbcmac_cra_exit,
2171                 .cra_module = THIS_MODULE,
2172             },
2173         },
2174     },
2175 };
2176 
2177 static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2178                 unsigned int len)
2179 {
2180     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2181     struct crypto_aes_ctx aes;
2182     __be64 consts[4];
2183     u64 _const[2];
2184     u8 msb_mask, gfmask;
2185     int ret, i;
2186 
2187     ret = aes_expandkey(&aes, key, len);
2188     if (ret)
2189         return ret;
2190 
2191     for (i = 0; i < len / sizeof(u32); i++)
2192         ctx->base.ipad.word[i + 8] = swab(aes.key_enc[i]);
2193 
2194     /* precompute the CMAC key material */
2195     crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2196     crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2197                 CRYPTO_TFM_REQ_MASK);
2198     ret = crypto_cipher_setkey(ctx->kaes, key, len);
2199     if (ret)
2200         return ret;
2201 
2202     /* code below borrowed from crypto/cmac.c */
2203     /* encrypt the zero block */
2204     memset(consts, 0, AES_BLOCK_SIZE);
2205     crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2206 
2207     gfmask = 0x87;
2208     _const[0] = be64_to_cpu(consts[1]);
2209     _const[1] = be64_to_cpu(consts[0]);
2210 
2211     /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2212     for (i = 0; i < 4; i += 2) {
2213         msb_mask = ((s64)_const[1] >> 63) & gfmask;
2214         _const[1] = (_const[1] << 1) | (_const[0] >> 63);
2215         _const[0] = (_const[0] << 1) ^ msb_mask;
2216 
2217         consts[i + 0] = cpu_to_be64(_const[1]);
2218         consts[i + 1] = cpu_to_be64(_const[0]);
2219     }
2220     /* end of code borrowed from crypto/cmac.c */
2221 
2222     for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2223         ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
2224 
2225     if (len == AES_KEYSIZE_192) {
2226         ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2227         ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2228     } else if (len == AES_KEYSIZE_256) {
2229         ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2230         ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2231     } else {
2232         ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2233         ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2234     }
2235     ctx->cbcmac = false;
2236 
2237     memzero_explicit(&aes, sizeof(aes));
2238     return 0;
2239 }
2240 
2241 struct safexcel_alg_template safexcel_alg_cmac = {
2242     .type = SAFEXCEL_ALG_TYPE_AHASH,
2243     .algo_mask = 0,
2244     .alg.ahash = {
2245         .init = safexcel_cbcmac_init,
2246         .update = safexcel_ahash_update,
2247         .final = safexcel_ahash_final,
2248         .finup = safexcel_ahash_finup,
2249         .digest = safexcel_cbcmac_digest,
2250         .setkey = safexcel_cmac_setkey,
2251         .export = safexcel_ahash_export,
2252         .import = safexcel_ahash_import,
2253         .halg = {
2254             .digestsize = AES_BLOCK_SIZE,
2255             .statesize = sizeof(struct safexcel_ahash_export_state),
2256             .base = {
2257                 .cra_name = "cmac(aes)",
2258                 .cra_driver_name = "safexcel-cmac-aes",
2259                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2260                 .cra_flags = CRYPTO_ALG_ASYNC |
2261                          CRYPTO_ALG_ALLOCATES_MEMORY |
2262                          CRYPTO_ALG_KERN_DRIVER_ONLY,
2263                 .cra_blocksize = AES_BLOCK_SIZE,
2264                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2265                 .cra_init = safexcel_xcbcmac_cra_init,
2266                 .cra_exit = safexcel_xcbcmac_cra_exit,
2267                 .cra_module = THIS_MODULE,
2268             },
2269         },
2270     },
2271 };
2272 
2273 static int safexcel_sm3_init(struct ahash_request *areq)
2274 {
2275     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2276     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2277 
2278     memset(req, 0, sizeof(*req));
2279 
2280     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2281     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2282     req->state_sz = SM3_DIGEST_SIZE;
2283     req->digest_sz = SM3_DIGEST_SIZE;
2284     req->block_sz = SM3_BLOCK_SIZE;
2285 
2286     return 0;
2287 }
2288 
2289 static int safexcel_sm3_digest(struct ahash_request *areq)
2290 {
2291     int ret = safexcel_sm3_init(areq);
2292 
2293     if (ret)
2294         return ret;
2295 
2296     return safexcel_ahash_finup(areq);
2297 }
2298 
2299 struct safexcel_alg_template safexcel_alg_sm3 = {
2300     .type = SAFEXCEL_ALG_TYPE_AHASH,
2301     .algo_mask = SAFEXCEL_ALG_SM3,
2302     .alg.ahash = {
2303         .init = safexcel_sm3_init,
2304         .update = safexcel_ahash_update,
2305         .final = safexcel_ahash_final,
2306         .finup = safexcel_ahash_finup,
2307         .digest = safexcel_sm3_digest,
2308         .export = safexcel_ahash_export,
2309         .import = safexcel_ahash_import,
2310         .halg = {
2311             .digestsize = SM3_DIGEST_SIZE,
2312             .statesize = sizeof(struct safexcel_ahash_export_state),
2313             .base = {
2314                 .cra_name = "sm3",
2315                 .cra_driver_name = "safexcel-sm3",
2316                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2317                 .cra_flags = CRYPTO_ALG_ASYNC |
2318                          CRYPTO_ALG_ALLOCATES_MEMORY |
2319                          CRYPTO_ALG_KERN_DRIVER_ONLY,
2320                 .cra_blocksize = SM3_BLOCK_SIZE,
2321                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2322                 .cra_init = safexcel_ahash_cra_init,
2323                 .cra_exit = safexcel_ahash_cra_exit,
2324                 .cra_module = THIS_MODULE,
2325             },
2326         },
2327     },
2328 };
2329 
2330 static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2331                     unsigned int keylen)
2332 {
2333     return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2334                     SM3_DIGEST_SIZE);
2335 }
2336 
2337 static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2338 {
2339     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2340     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2341 
2342     memset(req, 0, sizeof(*req));
2343 
2344     /* Start from ipad precompute */
2345     memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
2346     /* Already processed the key^ipad part now! */
2347     req->len    = SM3_BLOCK_SIZE;
2348     req->processed  = SM3_BLOCK_SIZE;
2349 
2350     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2351     req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2352     req->state_sz = SM3_DIGEST_SIZE;
2353     req->digest_sz = SM3_DIGEST_SIZE;
2354     req->block_sz = SM3_BLOCK_SIZE;
2355     req->hmac = true;
2356 
2357     return 0;
2358 }
2359 
2360 static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2361 {
2362     int ret = safexcel_hmac_sm3_init(areq);
2363 
2364     if (ret)
2365         return ret;
2366 
2367     return safexcel_ahash_finup(areq);
2368 }
2369 
2370 struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2371     .type = SAFEXCEL_ALG_TYPE_AHASH,
2372     .algo_mask = SAFEXCEL_ALG_SM3,
2373     .alg.ahash = {
2374         .init = safexcel_hmac_sm3_init,
2375         .update = safexcel_ahash_update,
2376         .final = safexcel_ahash_final,
2377         .finup = safexcel_ahash_finup,
2378         .digest = safexcel_hmac_sm3_digest,
2379         .setkey = safexcel_hmac_sm3_setkey,
2380         .export = safexcel_ahash_export,
2381         .import = safexcel_ahash_import,
2382         .halg = {
2383             .digestsize = SM3_DIGEST_SIZE,
2384             .statesize = sizeof(struct safexcel_ahash_export_state),
2385             .base = {
2386                 .cra_name = "hmac(sm3)",
2387                 .cra_driver_name = "safexcel-hmac-sm3",
2388                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2389                 .cra_flags = CRYPTO_ALG_ASYNC |
2390                          CRYPTO_ALG_ALLOCATES_MEMORY |
2391                          CRYPTO_ALG_KERN_DRIVER_ONLY,
2392                 .cra_blocksize = SM3_BLOCK_SIZE,
2393                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2394                 .cra_init = safexcel_ahash_cra_init,
2395                 .cra_exit = safexcel_ahash_cra_exit,
2396                 .cra_module = THIS_MODULE,
2397             },
2398         },
2399     },
2400 };
2401 
2402 static int safexcel_sha3_224_init(struct ahash_request *areq)
2403 {
2404     struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2405     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2406     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2407 
2408     memset(req, 0, sizeof(*req));
2409 
2410     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2411     req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2412     req->state_sz = SHA3_224_DIGEST_SIZE;
2413     req->digest_sz = SHA3_224_DIGEST_SIZE;
2414     req->block_sz = SHA3_224_BLOCK_SIZE;
2415     ctx->do_fallback = false;
2416     ctx->fb_init_done = false;
2417     return 0;
2418 }
2419 
2420 static int safexcel_sha3_fbcheck(struct ahash_request *req)
2421 {
2422     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2423     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2424     struct ahash_request *subreq = ahash_request_ctx(req);
2425     int ret = 0;
2426 
2427     if (ctx->do_fallback) {
2428         ahash_request_set_tfm(subreq, ctx->fback);
2429         ahash_request_set_callback(subreq, req->base.flags,
2430                        req->base.complete, req->base.data);
2431         ahash_request_set_crypt(subreq, req->src, req->result,
2432                     req->nbytes);
2433         if (!ctx->fb_init_done) {
2434             if (ctx->fb_do_setkey) {
2435                 /* Set fallback cipher HMAC key */
2436                 u8 key[SHA3_224_BLOCK_SIZE];
2437 
2438                 memcpy(key, &ctx->base.ipad,
2439                        crypto_ahash_blocksize(ctx->fback) / 2);
2440                 memcpy(key +
2441                        crypto_ahash_blocksize(ctx->fback) / 2,
2442                        &ctx->base.opad,
2443                        crypto_ahash_blocksize(ctx->fback) / 2);
2444                 ret = crypto_ahash_setkey(ctx->fback, key,
2445                     crypto_ahash_blocksize(ctx->fback));
2446                 memzero_explicit(key,
2447                     crypto_ahash_blocksize(ctx->fback));
2448                 ctx->fb_do_setkey = false;
2449             }
2450             ret = ret ?: crypto_ahash_init(subreq);
2451             ctx->fb_init_done = true;
2452         }
2453     }
2454     return ret;
2455 }
2456 
2457 static int safexcel_sha3_update(struct ahash_request *req)
2458 {
2459     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2460     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2461     struct ahash_request *subreq = ahash_request_ctx(req);
2462 
2463     ctx->do_fallback = true;
2464     return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2465 }
2466 
2467 static int safexcel_sha3_final(struct ahash_request *req)
2468 {
2469     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2470     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2471     struct ahash_request *subreq = ahash_request_ctx(req);
2472 
2473     ctx->do_fallback = true;
2474     return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2475 }
2476 
2477 static int safexcel_sha3_finup(struct ahash_request *req)
2478 {
2479     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2480     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2481     struct ahash_request *subreq = ahash_request_ctx(req);
2482 
2483     ctx->do_fallback |= !req->nbytes;
2484     if (ctx->do_fallback)
2485         /* Update or ex/import happened or len 0, cannot use the HW */
2486         return safexcel_sha3_fbcheck(req) ?:
2487                crypto_ahash_finup(subreq);
2488     else
2489         return safexcel_ahash_finup(req);
2490 }
2491 
2492 static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2493 {
2494     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2495     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2496     struct ahash_request *subreq = ahash_request_ctx(req);
2497 
2498     ctx->do_fallback = true;
2499     ctx->fb_init_done = false;
2500     return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2501 }
2502 
2503 static int safexcel_sha3_224_digest(struct ahash_request *req)
2504 {
2505     if (req->nbytes)
2506         return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2507 
2508     /* HW cannot do zero length hash, use fallback instead */
2509     return safexcel_sha3_digest_fallback(req);
2510 }
2511 
2512 static int safexcel_sha3_export(struct ahash_request *req, void *out)
2513 {
2514     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2515     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2516     struct ahash_request *subreq = ahash_request_ctx(req);
2517 
2518     ctx->do_fallback = true;
2519     return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2520 }
2521 
2522 static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2523 {
2524     struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2525     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2526     struct ahash_request *subreq = ahash_request_ctx(req);
2527 
2528     ctx->do_fallback = true;
2529     return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2530     // return safexcel_ahash_import(req, in);
2531 }
2532 
2533 static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2534 {
2535     struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2536     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2537 
2538     safexcel_ahash_cra_init(tfm);
2539 
2540     /* Allocate fallback implementation */
2541     ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2542                     CRYPTO_ALG_ASYNC |
2543                     CRYPTO_ALG_NEED_FALLBACK);
2544     if (IS_ERR(ctx->fback))
2545         return PTR_ERR(ctx->fback);
2546 
2547     /* Update statesize from fallback algorithm! */
2548     crypto_hash_alg_common(ahash)->statesize =
2549         crypto_ahash_statesize(ctx->fback);
2550     crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2551                         sizeof(struct ahash_request) +
2552                         crypto_ahash_reqsize(ctx->fback)));
2553     return 0;
2554 }
2555 
2556 static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2557 {
2558     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2559 
2560     crypto_free_ahash(ctx->fback);
2561     safexcel_ahash_cra_exit(tfm);
2562 }
2563 
2564 struct safexcel_alg_template safexcel_alg_sha3_224 = {
2565     .type = SAFEXCEL_ALG_TYPE_AHASH,
2566     .algo_mask = SAFEXCEL_ALG_SHA3,
2567     .alg.ahash = {
2568         .init = safexcel_sha3_224_init,
2569         .update = safexcel_sha3_update,
2570         .final = safexcel_sha3_final,
2571         .finup = safexcel_sha3_finup,
2572         .digest = safexcel_sha3_224_digest,
2573         .export = safexcel_sha3_export,
2574         .import = safexcel_sha3_import,
2575         .halg = {
2576             .digestsize = SHA3_224_DIGEST_SIZE,
2577             .statesize = sizeof(struct safexcel_ahash_export_state),
2578             .base = {
2579                 .cra_name = "sha3-224",
2580                 .cra_driver_name = "safexcel-sha3-224",
2581                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2582                 .cra_flags = CRYPTO_ALG_ASYNC |
2583                          CRYPTO_ALG_KERN_DRIVER_ONLY |
2584                          CRYPTO_ALG_NEED_FALLBACK,
2585                 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2586                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2587                 .cra_init = safexcel_sha3_cra_init,
2588                 .cra_exit = safexcel_sha3_cra_exit,
2589                 .cra_module = THIS_MODULE,
2590             },
2591         },
2592     },
2593 };
2594 
2595 static int safexcel_sha3_256_init(struct ahash_request *areq)
2596 {
2597     struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2598     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2599     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2600 
2601     memset(req, 0, sizeof(*req));
2602 
2603     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2604     req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2605     req->state_sz = SHA3_256_DIGEST_SIZE;
2606     req->digest_sz = SHA3_256_DIGEST_SIZE;
2607     req->block_sz = SHA3_256_BLOCK_SIZE;
2608     ctx->do_fallback = false;
2609     ctx->fb_init_done = false;
2610     return 0;
2611 }
2612 
2613 static int safexcel_sha3_256_digest(struct ahash_request *req)
2614 {
2615     if (req->nbytes)
2616         return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2617 
2618     /* HW cannot do zero length hash, use fallback instead */
2619     return safexcel_sha3_digest_fallback(req);
2620 }
2621 
2622 struct safexcel_alg_template safexcel_alg_sha3_256 = {
2623     .type = SAFEXCEL_ALG_TYPE_AHASH,
2624     .algo_mask = SAFEXCEL_ALG_SHA3,
2625     .alg.ahash = {
2626         .init = safexcel_sha3_256_init,
2627         .update = safexcel_sha3_update,
2628         .final = safexcel_sha3_final,
2629         .finup = safexcel_sha3_finup,
2630         .digest = safexcel_sha3_256_digest,
2631         .export = safexcel_sha3_export,
2632         .import = safexcel_sha3_import,
2633         .halg = {
2634             .digestsize = SHA3_256_DIGEST_SIZE,
2635             .statesize = sizeof(struct safexcel_ahash_export_state),
2636             .base = {
2637                 .cra_name = "sha3-256",
2638                 .cra_driver_name = "safexcel-sha3-256",
2639                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2640                 .cra_flags = CRYPTO_ALG_ASYNC |
2641                          CRYPTO_ALG_KERN_DRIVER_ONLY |
2642                          CRYPTO_ALG_NEED_FALLBACK,
2643                 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2644                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2645                 .cra_init = safexcel_sha3_cra_init,
2646                 .cra_exit = safexcel_sha3_cra_exit,
2647                 .cra_module = THIS_MODULE,
2648             },
2649         },
2650     },
2651 };
2652 
2653 static int safexcel_sha3_384_init(struct ahash_request *areq)
2654 {
2655     struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2656     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2657     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2658 
2659     memset(req, 0, sizeof(*req));
2660 
2661     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2662     req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2663     req->state_sz = SHA3_384_DIGEST_SIZE;
2664     req->digest_sz = SHA3_384_DIGEST_SIZE;
2665     req->block_sz = SHA3_384_BLOCK_SIZE;
2666     ctx->do_fallback = false;
2667     ctx->fb_init_done = false;
2668     return 0;
2669 }
2670 
2671 static int safexcel_sha3_384_digest(struct ahash_request *req)
2672 {
2673     if (req->nbytes)
2674         return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2675 
2676     /* HW cannot do zero length hash, use fallback instead */
2677     return safexcel_sha3_digest_fallback(req);
2678 }
2679 
2680 struct safexcel_alg_template safexcel_alg_sha3_384 = {
2681     .type = SAFEXCEL_ALG_TYPE_AHASH,
2682     .algo_mask = SAFEXCEL_ALG_SHA3,
2683     .alg.ahash = {
2684         .init = safexcel_sha3_384_init,
2685         .update = safexcel_sha3_update,
2686         .final = safexcel_sha3_final,
2687         .finup = safexcel_sha3_finup,
2688         .digest = safexcel_sha3_384_digest,
2689         .export = safexcel_sha3_export,
2690         .import = safexcel_sha3_import,
2691         .halg = {
2692             .digestsize = SHA3_384_DIGEST_SIZE,
2693             .statesize = sizeof(struct safexcel_ahash_export_state),
2694             .base = {
2695                 .cra_name = "sha3-384",
2696                 .cra_driver_name = "safexcel-sha3-384",
2697                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2698                 .cra_flags = CRYPTO_ALG_ASYNC |
2699                          CRYPTO_ALG_KERN_DRIVER_ONLY |
2700                          CRYPTO_ALG_NEED_FALLBACK,
2701                 .cra_blocksize = SHA3_384_BLOCK_SIZE,
2702                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2703                 .cra_init = safexcel_sha3_cra_init,
2704                 .cra_exit = safexcel_sha3_cra_exit,
2705                 .cra_module = THIS_MODULE,
2706             },
2707         },
2708     },
2709 };
2710 
2711 static int safexcel_sha3_512_init(struct ahash_request *areq)
2712 {
2713     struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2714     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2715     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2716 
2717     memset(req, 0, sizeof(*req));
2718 
2719     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2720     req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2721     req->state_sz = SHA3_512_DIGEST_SIZE;
2722     req->digest_sz = SHA3_512_DIGEST_SIZE;
2723     req->block_sz = SHA3_512_BLOCK_SIZE;
2724     ctx->do_fallback = false;
2725     ctx->fb_init_done = false;
2726     return 0;
2727 }
2728 
2729 static int safexcel_sha3_512_digest(struct ahash_request *req)
2730 {
2731     if (req->nbytes)
2732         return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2733 
2734     /* HW cannot do zero length hash, use fallback instead */
2735     return safexcel_sha3_digest_fallback(req);
2736 }
2737 
2738 struct safexcel_alg_template safexcel_alg_sha3_512 = {
2739     .type = SAFEXCEL_ALG_TYPE_AHASH,
2740     .algo_mask = SAFEXCEL_ALG_SHA3,
2741     .alg.ahash = {
2742         .init = safexcel_sha3_512_init,
2743         .update = safexcel_sha3_update,
2744         .final = safexcel_sha3_final,
2745         .finup = safexcel_sha3_finup,
2746         .digest = safexcel_sha3_512_digest,
2747         .export = safexcel_sha3_export,
2748         .import = safexcel_sha3_import,
2749         .halg = {
2750             .digestsize = SHA3_512_DIGEST_SIZE,
2751             .statesize = sizeof(struct safexcel_ahash_export_state),
2752             .base = {
2753                 .cra_name = "sha3-512",
2754                 .cra_driver_name = "safexcel-sha3-512",
2755                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2756                 .cra_flags = CRYPTO_ALG_ASYNC |
2757                          CRYPTO_ALG_KERN_DRIVER_ONLY |
2758                          CRYPTO_ALG_NEED_FALLBACK,
2759                 .cra_blocksize = SHA3_512_BLOCK_SIZE,
2760                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2761                 .cra_init = safexcel_sha3_cra_init,
2762                 .cra_exit = safexcel_sha3_cra_exit,
2763                 .cra_module = THIS_MODULE,
2764             },
2765         },
2766     },
2767 };
2768 
2769 static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2770 {
2771     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2772     int ret;
2773 
2774     ret = safexcel_sha3_cra_init(tfm);
2775     if (ret)
2776         return ret;
2777 
2778     /* Allocate precalc basic digest implementation */
2779     ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2780     if (IS_ERR(ctx->shpre))
2781         return PTR_ERR(ctx->shpre);
2782 
2783     ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2784                   crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2785     if (!ctx->shdesc) {
2786         crypto_free_shash(ctx->shpre);
2787         return -ENOMEM;
2788     }
2789     ctx->shdesc->tfm = ctx->shpre;
2790     return 0;
2791 }
2792 
2793 static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2794 {
2795     struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2796 
2797     crypto_free_ahash(ctx->fback);
2798     crypto_free_shash(ctx->shpre);
2799     kfree(ctx->shdesc);
2800     safexcel_ahash_cra_exit(tfm);
2801 }
2802 
2803 static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2804                      unsigned int keylen)
2805 {
2806     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2807     int ret = 0;
2808 
2809     if (keylen > crypto_ahash_blocksize(tfm)) {
2810         /*
2811          * If the key is larger than the blocksize, then hash it
2812          * first using our fallback cipher
2813          */
2814         ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2815                       ctx->base.ipad.byte);
2816         keylen = crypto_shash_digestsize(ctx->shpre);
2817 
2818         /*
2819          * If the digest is larger than half the blocksize, we need to
2820          * move the rest to opad due to the way our HMAC infra works.
2821          */
2822         if (keylen > crypto_ahash_blocksize(tfm) / 2)
2823             /* Buffers overlap, need to use memmove iso memcpy! */
2824             memmove(&ctx->base.opad,
2825                 ctx->base.ipad.byte +
2826                     crypto_ahash_blocksize(tfm) / 2,
2827                 keylen - crypto_ahash_blocksize(tfm) / 2);
2828     } else {
2829         /*
2830          * Copy the key to our ipad & opad buffers
2831          * Note that ipad and opad each contain one half of the key,
2832          * to match the existing HMAC driver infrastructure.
2833          */
2834         if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2835             memcpy(&ctx->base.ipad, key, keylen);
2836         } else {
2837             memcpy(&ctx->base.ipad, key,
2838                    crypto_ahash_blocksize(tfm) / 2);
2839             memcpy(&ctx->base.opad,
2840                    key + crypto_ahash_blocksize(tfm) / 2,
2841                    keylen - crypto_ahash_blocksize(tfm) / 2);
2842         }
2843     }
2844 
2845     /* Pad key with zeroes */
2846     if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2847         memset(ctx->base.ipad.byte + keylen, 0,
2848                crypto_ahash_blocksize(tfm) / 2 - keylen);
2849         memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
2850     } else {
2851         memset(ctx->base.opad.byte + keylen -
2852                crypto_ahash_blocksize(tfm) / 2, 0,
2853                crypto_ahash_blocksize(tfm) - keylen);
2854     }
2855 
2856     /* If doing fallback, still need to set the new key! */
2857     ctx->fb_do_setkey = true;
2858     return ret;
2859 }
2860 
2861 static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2862 {
2863     struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2864     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2865     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2866 
2867     memset(req, 0, sizeof(*req));
2868 
2869     /* Copy (half of) the key */
2870     memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
2871     /* Start of HMAC should have len == processed == blocksize */
2872     req->len    = SHA3_224_BLOCK_SIZE;
2873     req->processed  = SHA3_224_BLOCK_SIZE;
2874     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2875     req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2876     req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2877     req->digest_sz = SHA3_224_DIGEST_SIZE;
2878     req->block_sz = SHA3_224_BLOCK_SIZE;
2879     req->hmac = true;
2880     ctx->do_fallback = false;
2881     ctx->fb_init_done = false;
2882     return 0;
2883 }
2884 
2885 static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2886 {
2887     if (req->nbytes)
2888         return safexcel_hmac_sha3_224_init(req) ?:
2889                safexcel_ahash_finup(req);
2890 
2891     /* HW cannot do zero length HMAC, use fallback instead */
2892     return safexcel_sha3_digest_fallback(req);
2893 }
2894 
2895 static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2896 {
2897     return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2898 }
2899 
2900 struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2901     .type = SAFEXCEL_ALG_TYPE_AHASH,
2902     .algo_mask = SAFEXCEL_ALG_SHA3,
2903     .alg.ahash = {
2904         .init = safexcel_hmac_sha3_224_init,
2905         .update = safexcel_sha3_update,
2906         .final = safexcel_sha3_final,
2907         .finup = safexcel_sha3_finup,
2908         .digest = safexcel_hmac_sha3_224_digest,
2909         .setkey = safexcel_hmac_sha3_setkey,
2910         .export = safexcel_sha3_export,
2911         .import = safexcel_sha3_import,
2912         .halg = {
2913             .digestsize = SHA3_224_DIGEST_SIZE,
2914             .statesize = sizeof(struct safexcel_ahash_export_state),
2915             .base = {
2916                 .cra_name = "hmac(sha3-224)",
2917                 .cra_driver_name = "safexcel-hmac-sha3-224",
2918                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2919                 .cra_flags = CRYPTO_ALG_ASYNC |
2920                          CRYPTO_ALG_KERN_DRIVER_ONLY |
2921                          CRYPTO_ALG_NEED_FALLBACK,
2922                 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2923                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2924                 .cra_init = safexcel_hmac_sha3_224_cra_init,
2925                 .cra_exit = safexcel_hmac_sha3_cra_exit,
2926                 .cra_module = THIS_MODULE,
2927             },
2928         },
2929     },
2930 };
2931 
2932 static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2933 {
2934     struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2935     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2936     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2937 
2938     memset(req, 0, sizeof(*req));
2939 
2940     /* Copy (half of) the key */
2941     memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
2942     /* Start of HMAC should have len == processed == blocksize */
2943     req->len    = SHA3_256_BLOCK_SIZE;
2944     req->processed  = SHA3_256_BLOCK_SIZE;
2945     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2946     req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2947     req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2948     req->digest_sz = SHA3_256_DIGEST_SIZE;
2949     req->block_sz = SHA3_256_BLOCK_SIZE;
2950     req->hmac = true;
2951     ctx->do_fallback = false;
2952     ctx->fb_init_done = false;
2953     return 0;
2954 }
2955 
2956 static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2957 {
2958     if (req->nbytes)
2959         return safexcel_hmac_sha3_256_init(req) ?:
2960                safexcel_ahash_finup(req);
2961 
2962     /* HW cannot do zero length HMAC, use fallback instead */
2963     return safexcel_sha3_digest_fallback(req);
2964 }
2965 
2966 static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2967 {
2968     return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2969 }
2970 
2971 struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2972     .type = SAFEXCEL_ALG_TYPE_AHASH,
2973     .algo_mask = SAFEXCEL_ALG_SHA3,
2974     .alg.ahash = {
2975         .init = safexcel_hmac_sha3_256_init,
2976         .update = safexcel_sha3_update,
2977         .final = safexcel_sha3_final,
2978         .finup = safexcel_sha3_finup,
2979         .digest = safexcel_hmac_sha3_256_digest,
2980         .setkey = safexcel_hmac_sha3_setkey,
2981         .export = safexcel_sha3_export,
2982         .import = safexcel_sha3_import,
2983         .halg = {
2984             .digestsize = SHA3_256_DIGEST_SIZE,
2985             .statesize = sizeof(struct safexcel_ahash_export_state),
2986             .base = {
2987                 .cra_name = "hmac(sha3-256)",
2988                 .cra_driver_name = "safexcel-hmac-sha3-256",
2989                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2990                 .cra_flags = CRYPTO_ALG_ASYNC |
2991                          CRYPTO_ALG_KERN_DRIVER_ONLY |
2992                          CRYPTO_ALG_NEED_FALLBACK,
2993                 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2994                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2995                 .cra_init = safexcel_hmac_sha3_256_cra_init,
2996                 .cra_exit = safexcel_hmac_sha3_cra_exit,
2997                 .cra_module = THIS_MODULE,
2998             },
2999         },
3000     },
3001 };
3002 
3003 static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
3004 {
3005     struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3006     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3007     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3008 
3009     memset(req, 0, sizeof(*req));
3010 
3011     /* Copy (half of) the key */
3012     memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
3013     /* Start of HMAC should have len == processed == blocksize */
3014     req->len    = SHA3_384_BLOCK_SIZE;
3015     req->processed  = SHA3_384_BLOCK_SIZE;
3016     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
3017     req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3018     req->state_sz = SHA3_384_BLOCK_SIZE / 2;
3019     req->digest_sz = SHA3_384_DIGEST_SIZE;
3020     req->block_sz = SHA3_384_BLOCK_SIZE;
3021     req->hmac = true;
3022     ctx->do_fallback = false;
3023     ctx->fb_init_done = false;
3024     return 0;
3025 }
3026 
3027 static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
3028 {
3029     if (req->nbytes)
3030         return safexcel_hmac_sha3_384_init(req) ?:
3031                safexcel_ahash_finup(req);
3032 
3033     /* HW cannot do zero length HMAC, use fallback instead */
3034     return safexcel_sha3_digest_fallback(req);
3035 }
3036 
3037 static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3038 {
3039     return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3040 }
3041 
3042 struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3043     .type = SAFEXCEL_ALG_TYPE_AHASH,
3044     .algo_mask = SAFEXCEL_ALG_SHA3,
3045     .alg.ahash = {
3046         .init = safexcel_hmac_sha3_384_init,
3047         .update = safexcel_sha3_update,
3048         .final = safexcel_sha3_final,
3049         .finup = safexcel_sha3_finup,
3050         .digest = safexcel_hmac_sha3_384_digest,
3051         .setkey = safexcel_hmac_sha3_setkey,
3052         .export = safexcel_sha3_export,
3053         .import = safexcel_sha3_import,
3054         .halg = {
3055             .digestsize = SHA3_384_DIGEST_SIZE,
3056             .statesize = sizeof(struct safexcel_ahash_export_state),
3057             .base = {
3058                 .cra_name = "hmac(sha3-384)",
3059                 .cra_driver_name = "safexcel-hmac-sha3-384",
3060                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3061                 .cra_flags = CRYPTO_ALG_ASYNC |
3062                          CRYPTO_ALG_KERN_DRIVER_ONLY |
3063                          CRYPTO_ALG_NEED_FALLBACK,
3064                 .cra_blocksize = SHA3_384_BLOCK_SIZE,
3065                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3066                 .cra_init = safexcel_hmac_sha3_384_cra_init,
3067                 .cra_exit = safexcel_hmac_sha3_cra_exit,
3068                 .cra_module = THIS_MODULE,
3069             },
3070         },
3071     },
3072 };
3073 
3074 static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3075 {
3076     struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3077     struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3078     struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3079 
3080     memset(req, 0, sizeof(*req));
3081 
3082     /* Copy (half of) the key */
3083     memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
3084     /* Start of HMAC should have len == processed == blocksize */
3085     req->len    = SHA3_512_BLOCK_SIZE;
3086     req->processed  = SHA3_512_BLOCK_SIZE;
3087     ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3088     req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3089     req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3090     req->digest_sz = SHA3_512_DIGEST_SIZE;
3091     req->block_sz = SHA3_512_BLOCK_SIZE;
3092     req->hmac = true;
3093     ctx->do_fallback = false;
3094     ctx->fb_init_done = false;
3095     return 0;
3096 }
3097 
3098 static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3099 {
3100     if (req->nbytes)
3101         return safexcel_hmac_sha3_512_init(req) ?:
3102                safexcel_ahash_finup(req);
3103 
3104     /* HW cannot do zero length HMAC, use fallback instead */
3105     return safexcel_sha3_digest_fallback(req);
3106 }
3107 
3108 static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3109 {
3110     return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3111 }
3112 struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3113     .type = SAFEXCEL_ALG_TYPE_AHASH,
3114     .algo_mask = SAFEXCEL_ALG_SHA3,
3115     .alg.ahash = {
3116         .init = safexcel_hmac_sha3_512_init,
3117         .update = safexcel_sha3_update,
3118         .final = safexcel_sha3_final,
3119         .finup = safexcel_sha3_finup,
3120         .digest = safexcel_hmac_sha3_512_digest,
3121         .setkey = safexcel_hmac_sha3_setkey,
3122         .export = safexcel_sha3_export,
3123         .import = safexcel_sha3_import,
3124         .halg = {
3125             .digestsize = SHA3_512_DIGEST_SIZE,
3126             .statesize = sizeof(struct safexcel_ahash_export_state),
3127             .base = {
3128                 .cra_name = "hmac(sha3-512)",
3129                 .cra_driver_name = "safexcel-hmac-sha3-512",
3130                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3131                 .cra_flags = CRYPTO_ALG_ASYNC |
3132                          CRYPTO_ALG_KERN_DRIVER_ONLY |
3133                          CRYPTO_ALG_NEED_FALLBACK,
3134                 .cra_blocksize = SHA3_512_BLOCK_SIZE,
3135                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3136                 .cra_init = safexcel_hmac_sha3_512_cra_init,
3137                 .cra_exit = safexcel_hmac_sha3_cra_exit,
3138                 .cra_module = THIS_MODULE,
3139             },
3140         },
3141     },
3142 };