Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-only
0002 /*
0003  * AES CTR routines supporting VMX instructions on the Power 8
0004  *
0005  * Copyright (C) 2015 International Business Machines Inc.
0006  *
0007  * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
0008  */
0009 
0010 #include <asm/simd.h>
0011 #include <asm/switch_to.h>
0012 #include <crypto/aes.h>
0013 #include <crypto/internal/simd.h>
0014 #include <crypto/internal/skcipher.h>
0015 
0016 #include "aesp8-ppc.h"
0017 
0018 struct p8_aes_ctr_ctx {
0019     struct crypto_skcipher *fallback;
0020     struct aes_key enc_key;
0021 };
0022 
0023 static int p8_aes_ctr_init(struct crypto_skcipher *tfm)
0024 {
0025     struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
0026     struct crypto_skcipher *fallback;
0027 
0028     fallback = crypto_alloc_skcipher("ctr(aes)", 0,
0029                      CRYPTO_ALG_NEED_FALLBACK |
0030                      CRYPTO_ALG_ASYNC);
0031     if (IS_ERR(fallback)) {
0032         pr_err("Failed to allocate ctr(aes) fallback: %ld\n",
0033                PTR_ERR(fallback));
0034         return PTR_ERR(fallback);
0035     }
0036 
0037     crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
0038                     crypto_skcipher_reqsize(fallback));
0039     ctx->fallback = fallback;
0040     return 0;
0041 }
0042 
0043 static void p8_aes_ctr_exit(struct crypto_skcipher *tfm)
0044 {
0045     struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
0046 
0047     crypto_free_skcipher(ctx->fallback);
0048 }
0049 
0050 static int p8_aes_ctr_setkey(struct crypto_skcipher *tfm, const u8 *key,
0051                  unsigned int keylen)
0052 {
0053     struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
0054     int ret;
0055 
0056     preempt_disable();
0057     pagefault_disable();
0058     enable_kernel_vsx();
0059     ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
0060     disable_kernel_vsx();
0061     pagefault_enable();
0062     preempt_enable();
0063 
0064     ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
0065 
0066     return ret ? -EINVAL : 0;
0067 }
0068 
0069 static void p8_aes_ctr_final(const struct p8_aes_ctr_ctx *ctx,
0070                  struct skcipher_walk *walk)
0071 {
0072     u8 *ctrblk = walk->iv;
0073     u8 keystream[AES_BLOCK_SIZE];
0074     u8 *src = walk->src.virt.addr;
0075     u8 *dst = walk->dst.virt.addr;
0076     unsigned int nbytes = walk->nbytes;
0077 
0078     preempt_disable();
0079     pagefault_disable();
0080     enable_kernel_vsx();
0081     aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
0082     disable_kernel_vsx();
0083     pagefault_enable();
0084     preempt_enable();
0085 
0086     crypto_xor_cpy(dst, keystream, src, nbytes);
0087     crypto_inc(ctrblk, AES_BLOCK_SIZE);
0088 }
0089 
0090 static int p8_aes_ctr_crypt(struct skcipher_request *req)
0091 {
0092     struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0093     const struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
0094     struct skcipher_walk walk;
0095     unsigned int nbytes;
0096     int ret;
0097 
0098     if (!crypto_simd_usable()) {
0099         struct skcipher_request *subreq = skcipher_request_ctx(req);
0100 
0101         *subreq = *req;
0102         skcipher_request_set_tfm(subreq, ctx->fallback);
0103         return crypto_skcipher_encrypt(subreq);
0104     }
0105 
0106     ret = skcipher_walk_virt(&walk, req, false);
0107     while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
0108         preempt_disable();
0109         pagefault_disable();
0110         enable_kernel_vsx();
0111         aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
0112                         walk.dst.virt.addr,
0113                         nbytes / AES_BLOCK_SIZE,
0114                         &ctx->enc_key, walk.iv);
0115         disable_kernel_vsx();
0116         pagefault_enable();
0117         preempt_enable();
0118 
0119         do {
0120             crypto_inc(walk.iv, AES_BLOCK_SIZE);
0121         } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE);
0122 
0123         ret = skcipher_walk_done(&walk, nbytes);
0124     }
0125     if (nbytes) {
0126         p8_aes_ctr_final(ctx, &walk);
0127         ret = skcipher_walk_done(&walk, 0);
0128     }
0129     return ret;
0130 }
0131 
0132 struct skcipher_alg p8_aes_ctr_alg = {
0133     .base.cra_name = "ctr(aes)",
0134     .base.cra_driver_name = "p8_aes_ctr",
0135     .base.cra_module = THIS_MODULE,
0136     .base.cra_priority = 2000,
0137     .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
0138     .base.cra_blocksize = 1,
0139     .base.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
0140     .setkey = p8_aes_ctr_setkey,
0141     .encrypt = p8_aes_ctr_crypt,
0142     .decrypt = p8_aes_ctr_crypt,
0143     .init = p8_aes_ctr_init,
0144     .exit = p8_aes_ctr_exit,
0145     .min_keysize = AES_MIN_KEY_SIZE,
0146     .max_keysize = AES_MAX_KEY_SIZE,
0147     .ivsize = AES_BLOCK_SIZE,
0148     .chunksize = AES_BLOCK_SIZE,
0149 };