Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-only
0002 /*
0003  * AES XTS routines supporting VMX In-core instructions on Power 8
0004  *
0005  * Copyright (C) 2015 International Business Machines Inc.
0006  *
0007  * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com>
0008  */
0009 
0010 #include <asm/simd.h>
0011 #include <asm/switch_to.h>
0012 #include <crypto/aes.h>
0013 #include <crypto/internal/simd.h>
0014 #include <crypto/internal/skcipher.h>
0015 #include <crypto/xts.h>
0016 
0017 #include "aesp8-ppc.h"
0018 
0019 struct p8_aes_xts_ctx {
0020     struct crypto_skcipher *fallback;
0021     struct aes_key enc_key;
0022     struct aes_key dec_key;
0023     struct aes_key tweak_key;
0024 };
0025 
0026 static int p8_aes_xts_init(struct crypto_skcipher *tfm)
0027 {
0028     struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
0029     struct crypto_skcipher *fallback;
0030 
0031     fallback = crypto_alloc_skcipher("xts(aes)", 0,
0032                      CRYPTO_ALG_NEED_FALLBACK |
0033                      CRYPTO_ALG_ASYNC);
0034     if (IS_ERR(fallback)) {
0035         pr_err("Failed to allocate xts(aes) fallback: %ld\n",
0036                PTR_ERR(fallback));
0037         return PTR_ERR(fallback);
0038     }
0039 
0040     crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
0041                     crypto_skcipher_reqsize(fallback));
0042     ctx->fallback = fallback;
0043     return 0;
0044 }
0045 
0046 static void p8_aes_xts_exit(struct crypto_skcipher *tfm)
0047 {
0048     struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
0049 
0050     crypto_free_skcipher(ctx->fallback);
0051 }
0052 
0053 static int p8_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
0054                  unsigned int keylen)
0055 {
0056     struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
0057     int ret;
0058 
0059     ret = xts_verify_key(tfm, key, keylen);
0060     if (ret)
0061         return ret;
0062 
0063     preempt_disable();
0064     pagefault_disable();
0065     enable_kernel_vsx();
0066     ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key);
0067     ret |= aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key);
0068     ret |= aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key);
0069     disable_kernel_vsx();
0070     pagefault_enable();
0071     preempt_enable();
0072 
0073     ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
0074 
0075     return ret ? -EINVAL : 0;
0076 }
0077 
0078 static int p8_aes_xts_crypt(struct skcipher_request *req, int enc)
0079 {
0080     struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0081     const struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
0082     struct skcipher_walk walk;
0083     unsigned int nbytes;
0084     u8 tweak[AES_BLOCK_SIZE];
0085     int ret;
0086 
0087     if (req->cryptlen < AES_BLOCK_SIZE)
0088         return -EINVAL;
0089 
0090     if (!crypto_simd_usable() || (req->cryptlen % XTS_BLOCK_SIZE) != 0) {
0091         struct skcipher_request *subreq = skcipher_request_ctx(req);
0092 
0093         *subreq = *req;
0094         skcipher_request_set_tfm(subreq, ctx->fallback);
0095         return enc ? crypto_skcipher_encrypt(subreq) :
0096                  crypto_skcipher_decrypt(subreq);
0097     }
0098 
0099     ret = skcipher_walk_virt(&walk, req, false);
0100     if (ret)
0101         return ret;
0102 
0103     preempt_disable();
0104     pagefault_disable();
0105     enable_kernel_vsx();
0106 
0107     aes_p8_encrypt(walk.iv, tweak, &ctx->tweak_key);
0108 
0109     disable_kernel_vsx();
0110     pagefault_enable();
0111     preempt_enable();
0112 
0113     while ((nbytes = walk.nbytes) != 0) {
0114         preempt_disable();
0115         pagefault_disable();
0116         enable_kernel_vsx();
0117         if (enc)
0118             aes_p8_xts_encrypt(walk.src.virt.addr,
0119                        walk.dst.virt.addr,
0120                        round_down(nbytes, AES_BLOCK_SIZE),
0121                        &ctx->enc_key, NULL, tweak);
0122         else
0123             aes_p8_xts_decrypt(walk.src.virt.addr,
0124                        walk.dst.virt.addr,
0125                        round_down(nbytes, AES_BLOCK_SIZE),
0126                        &ctx->dec_key, NULL, tweak);
0127         disable_kernel_vsx();
0128         pagefault_enable();
0129         preempt_enable();
0130 
0131         ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
0132     }
0133     return ret;
0134 }
0135 
0136 static int p8_aes_xts_encrypt(struct skcipher_request *req)
0137 {
0138     return p8_aes_xts_crypt(req, 1);
0139 }
0140 
0141 static int p8_aes_xts_decrypt(struct skcipher_request *req)
0142 {
0143     return p8_aes_xts_crypt(req, 0);
0144 }
0145 
0146 struct skcipher_alg p8_aes_xts_alg = {
0147     .base.cra_name = "xts(aes)",
0148     .base.cra_driver_name = "p8_aes_xts",
0149     .base.cra_module = THIS_MODULE,
0150     .base.cra_priority = 2000,
0151     .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
0152     .base.cra_blocksize = AES_BLOCK_SIZE,
0153     .base.cra_ctxsize = sizeof(struct p8_aes_xts_ctx),
0154     .setkey = p8_aes_xts_setkey,
0155     .encrypt = p8_aes_xts_encrypt,
0156     .decrypt = p8_aes_xts_decrypt,
0157     .init = p8_aes_xts_init,
0158     .exit = p8_aes_xts_exit,
0159     .min_keysize = 2 * AES_MIN_KEY_SIZE,
0160     .max_keysize = 2 * AES_MAX_KEY_SIZE,
0161     .ivsize = AES_BLOCK_SIZE,
0162 };