0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #include <asm/simd.h>
0011 #include <asm/switch_to.h>
0012 #include <crypto/aes.h>
0013 #include <crypto/internal/simd.h>
0014 #include <crypto/internal/skcipher.h>
0015
0016 #include "aesp8-ppc.h"
0017
0018 struct p8_aes_cbc_ctx {
0019 struct crypto_skcipher *fallback;
0020 struct aes_key enc_key;
0021 struct aes_key dec_key;
0022 };
0023
0024 static int p8_aes_cbc_init(struct crypto_skcipher *tfm)
0025 {
0026 struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
0027 struct crypto_skcipher *fallback;
0028
0029 fallback = crypto_alloc_skcipher("cbc(aes)", 0,
0030 CRYPTO_ALG_NEED_FALLBACK |
0031 CRYPTO_ALG_ASYNC);
0032 if (IS_ERR(fallback)) {
0033 pr_err("Failed to allocate cbc(aes) fallback: %ld\n",
0034 PTR_ERR(fallback));
0035 return PTR_ERR(fallback);
0036 }
0037
0038 crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
0039 crypto_skcipher_reqsize(fallback));
0040 ctx->fallback = fallback;
0041 return 0;
0042 }
0043
0044 static void p8_aes_cbc_exit(struct crypto_skcipher *tfm)
0045 {
0046 struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
0047
0048 crypto_free_skcipher(ctx->fallback);
0049 }
0050
0051 static int p8_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
0052 unsigned int keylen)
0053 {
0054 struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
0055 int ret;
0056
0057 preempt_disable();
0058 pagefault_disable();
0059 enable_kernel_vsx();
0060 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
0061 ret |= aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
0062 disable_kernel_vsx();
0063 pagefault_enable();
0064 preempt_enable();
0065
0066 ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
0067
0068 return ret ? -EINVAL : 0;
0069 }
0070
0071 static int p8_aes_cbc_crypt(struct skcipher_request *req, int enc)
0072 {
0073 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0074 const struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
0075 struct skcipher_walk walk;
0076 unsigned int nbytes;
0077 int ret;
0078
0079 if (!crypto_simd_usable()) {
0080 struct skcipher_request *subreq = skcipher_request_ctx(req);
0081
0082 *subreq = *req;
0083 skcipher_request_set_tfm(subreq, ctx->fallback);
0084 return enc ? crypto_skcipher_encrypt(subreq) :
0085 crypto_skcipher_decrypt(subreq);
0086 }
0087
0088 ret = skcipher_walk_virt(&walk, req, false);
0089 while ((nbytes = walk.nbytes) != 0) {
0090 preempt_disable();
0091 pagefault_disable();
0092 enable_kernel_vsx();
0093 aes_p8_cbc_encrypt(walk.src.virt.addr,
0094 walk.dst.virt.addr,
0095 round_down(nbytes, AES_BLOCK_SIZE),
0096 enc ? &ctx->enc_key : &ctx->dec_key,
0097 walk.iv, enc);
0098 disable_kernel_vsx();
0099 pagefault_enable();
0100 preempt_enable();
0101
0102 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
0103 }
0104 return ret;
0105 }
0106
0107 static int p8_aes_cbc_encrypt(struct skcipher_request *req)
0108 {
0109 return p8_aes_cbc_crypt(req, 1);
0110 }
0111
0112 static int p8_aes_cbc_decrypt(struct skcipher_request *req)
0113 {
0114 return p8_aes_cbc_crypt(req, 0);
0115 }
0116
0117 struct skcipher_alg p8_aes_cbc_alg = {
0118 .base.cra_name = "cbc(aes)",
0119 .base.cra_driver_name = "p8_aes_cbc",
0120 .base.cra_module = THIS_MODULE,
0121 .base.cra_priority = 2000,
0122 .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
0123 .base.cra_blocksize = AES_BLOCK_SIZE,
0124 .base.cra_ctxsize = sizeof(struct p8_aes_cbc_ctx),
0125 .setkey = p8_aes_cbc_setkey,
0126 .encrypt = p8_aes_cbc_encrypt,
0127 .decrypt = p8_aes_cbc_decrypt,
0128 .init = p8_aes_cbc_init,
0129 .exit = p8_aes_cbc_exit,
0130 .min_keysize = AES_MIN_KEY_SIZE,
0131 .max_keysize = AES_MAX_KEY_SIZE,
0132 .ivsize = AES_BLOCK_SIZE,
0133 };