Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0-only
0002 /*
0003  * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
0004  *
0005  * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
0006  */
0007 
0008 #include <asm/neon.h>
0009 #include <asm/unaligned.h>
0010 #include <crypto/aes.h>
0011 #include <crypto/scatterwalk.h>
0012 #include <crypto/internal/aead.h>
0013 #include <crypto/internal/skcipher.h>
0014 #include <linux/module.h>
0015 
0016 #include "aes-ce-setkey.h"
0017 
0018 static int num_rounds(struct crypto_aes_ctx *ctx)
0019 {
0020     /*
0021      * # of rounds specified by AES:
0022      * 128 bit key      10 rounds
0023      * 192 bit key      12 rounds
0024      * 256 bit key      14 rounds
0025      * => n byte key    => 6 + (n/4) rounds
0026      */
0027     return 6 + ctx->key_length / 4;
0028 }
0029 
0030 asmlinkage u32 ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
0031                     u32 macp, u32 const rk[], u32 rounds);
0032 
0033 asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
0034                    u32 const rk[], u32 rounds, u8 mac[],
0035                    u8 ctr[]);
0036 
0037 asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
0038                    u32 const rk[], u32 rounds, u8 mac[],
0039                    u8 ctr[]);
0040 
0041 asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
0042                  u32 rounds);
0043 
0044 static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
0045               unsigned int key_len)
0046 {
0047     struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
0048 
0049     return ce_aes_expandkey(ctx, in_key, key_len);
0050 }
0051 
0052 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
0053 {
0054     if ((authsize & 1) || authsize < 4)
0055         return -EINVAL;
0056     return 0;
0057 }
0058 
0059 static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
0060 {
0061     struct crypto_aead *aead = crypto_aead_reqtfm(req);
0062     __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
0063     u32 l = req->iv[0] + 1;
0064 
0065     /* verify that CCM dimension 'L' is set correctly in the IV */
0066     if (l < 2 || l > 8)
0067         return -EINVAL;
0068 
0069     /* verify that msglen can in fact be represented in L bytes */
0070     if (l < 4 && msglen >> (8 * l))
0071         return -EOVERFLOW;
0072 
0073     /*
0074      * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
0075      * uses a u32 type to represent msglen so the top 4 bytes are always 0.
0076      */
0077     n[0] = 0;
0078     n[1] = cpu_to_be32(msglen);
0079 
0080     memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
0081 
0082     /*
0083      * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
0084      * - bits 0..2  : max # of bytes required to represent msglen, minus 1
0085      *                (already set by caller)
0086      * - bits 3..5  : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
0087      * - bit 6  : indicates presence of authenticate-only data
0088      */
0089     maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
0090     if (req->assoclen)
0091         maciv[0] |= 0x40;
0092 
0093     memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
0094     return 0;
0095 }
0096 
0097 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
0098 {
0099     struct crypto_aead *aead = crypto_aead_reqtfm(req);
0100     struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
0101     struct __packed { __be16 l; __be32 h; u16 len; } ltag;
0102     struct scatter_walk walk;
0103     u32 len = req->assoclen;
0104     u32 macp = 0;
0105 
0106     /* prepend the AAD with a length tag */
0107     if (len < 0xff00) {
0108         ltag.l = cpu_to_be16(len);
0109         ltag.len = 2;
0110     } else  {
0111         ltag.l = cpu_to_be16(0xfffe);
0112         put_unaligned_be32(len, &ltag.h);
0113         ltag.len = 6;
0114     }
0115 
0116     macp = ce_aes_ccm_auth_data(mac, (u8 *)&ltag, ltag.len, macp,
0117                     ctx->key_enc, num_rounds(ctx));
0118     scatterwalk_start(&walk, req->src);
0119 
0120     do {
0121         u32 n = scatterwalk_clamp(&walk, len);
0122         u8 *p;
0123 
0124         if (!n) {
0125             scatterwalk_start(&walk, sg_next(walk.sg));
0126             n = scatterwalk_clamp(&walk, len);
0127         }
0128         n = min_t(u32, n, SZ_4K); /* yield NEON at least every 4k */
0129         p = scatterwalk_map(&walk);
0130 
0131         macp = ce_aes_ccm_auth_data(mac, p, n, macp, ctx->key_enc,
0132                         num_rounds(ctx));
0133 
0134         if (len / SZ_4K > (len - n) / SZ_4K) {
0135             kernel_neon_end();
0136             kernel_neon_begin();
0137         }
0138         len -= n;
0139 
0140         scatterwalk_unmap(p);
0141         scatterwalk_advance(&walk, n);
0142         scatterwalk_done(&walk, 0, len);
0143     } while (len);
0144 }
0145 
0146 static int ccm_encrypt(struct aead_request *req)
0147 {
0148     struct crypto_aead *aead = crypto_aead_reqtfm(req);
0149     struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
0150     struct skcipher_walk walk;
0151     u8 __aligned(8) mac[AES_BLOCK_SIZE];
0152     u8 buf[AES_BLOCK_SIZE];
0153     u32 len = req->cryptlen;
0154     int err;
0155 
0156     err = ccm_init_mac(req, mac, len);
0157     if (err)
0158         return err;
0159 
0160     /* preserve the original iv for the final round */
0161     memcpy(buf, req->iv, AES_BLOCK_SIZE);
0162 
0163     err = skcipher_walk_aead_encrypt(&walk, req, false);
0164     if (unlikely(err))
0165         return err;
0166 
0167     kernel_neon_begin();
0168 
0169     if (req->assoclen)
0170         ccm_calculate_auth_mac(req, mac);
0171 
0172     do {
0173         u32 tail = walk.nbytes % AES_BLOCK_SIZE;
0174 
0175         if (walk.nbytes == walk.total)
0176             tail = 0;
0177 
0178         ce_aes_ccm_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
0179                    walk.nbytes - tail, ctx->key_enc,
0180                    num_rounds(ctx), mac, walk.iv);
0181 
0182         if (walk.nbytes == walk.total)
0183             ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
0184 
0185         kernel_neon_end();
0186 
0187         if (walk.nbytes) {
0188             err = skcipher_walk_done(&walk, tail);
0189             if (unlikely(err))
0190                 return err;
0191             if (unlikely(walk.nbytes))
0192                 kernel_neon_begin();
0193         }
0194     } while (walk.nbytes);
0195 
0196     /* copy authtag to end of dst */
0197     scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
0198                  crypto_aead_authsize(aead), 1);
0199 
0200     return 0;
0201 }
0202 
0203 static int ccm_decrypt(struct aead_request *req)
0204 {
0205     struct crypto_aead *aead = crypto_aead_reqtfm(req);
0206     struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
0207     unsigned int authsize = crypto_aead_authsize(aead);
0208     struct skcipher_walk walk;
0209     u8 __aligned(8) mac[AES_BLOCK_SIZE];
0210     u8 buf[AES_BLOCK_SIZE];
0211     u32 len = req->cryptlen - authsize;
0212     int err;
0213 
0214     err = ccm_init_mac(req, mac, len);
0215     if (err)
0216         return err;
0217 
0218     /* preserve the original iv for the final round */
0219     memcpy(buf, req->iv, AES_BLOCK_SIZE);
0220 
0221     err = skcipher_walk_aead_decrypt(&walk, req, false);
0222     if (unlikely(err))
0223         return err;
0224 
0225     kernel_neon_begin();
0226 
0227     if (req->assoclen)
0228         ccm_calculate_auth_mac(req, mac);
0229 
0230     do {
0231         u32 tail = walk.nbytes % AES_BLOCK_SIZE;
0232 
0233         if (walk.nbytes == walk.total)
0234             tail = 0;
0235 
0236         ce_aes_ccm_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
0237                    walk.nbytes - tail, ctx->key_enc,
0238                    num_rounds(ctx), mac, walk.iv);
0239 
0240         if (walk.nbytes == walk.total)
0241             ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
0242 
0243         kernel_neon_end();
0244 
0245         if (walk.nbytes) {
0246             err = skcipher_walk_done(&walk, tail);
0247             if (unlikely(err))
0248                 return err;
0249             if (unlikely(walk.nbytes))
0250                 kernel_neon_begin();
0251         }
0252     } while (walk.nbytes);
0253 
0254     /* compare calculated auth tag with the stored one */
0255     scatterwalk_map_and_copy(buf, req->src,
0256                  req->assoclen + req->cryptlen - authsize,
0257                  authsize, 0);
0258 
0259     if (crypto_memneq(mac, buf, authsize))
0260         return -EBADMSG;
0261     return 0;
0262 }
0263 
0264 static struct aead_alg ccm_aes_alg = {
0265     .base = {
0266         .cra_name       = "ccm(aes)",
0267         .cra_driver_name    = "ccm-aes-ce",
0268         .cra_priority       = 300,
0269         .cra_blocksize      = 1,
0270         .cra_ctxsize        = sizeof(struct crypto_aes_ctx),
0271         .cra_module     = THIS_MODULE,
0272     },
0273     .ivsize     = AES_BLOCK_SIZE,
0274     .chunksize  = AES_BLOCK_SIZE,
0275     .maxauthsize    = AES_BLOCK_SIZE,
0276     .setkey     = ccm_setkey,
0277     .setauthsize    = ccm_setauthsize,
0278     .encrypt    = ccm_encrypt,
0279     .decrypt    = ccm_decrypt,
0280 };
0281 
0282 static int __init aes_mod_init(void)
0283 {
0284     if (!cpu_have_named_feature(AES))
0285         return -ENODEV;
0286     return crypto_register_aead(&ccm_aes_alg);
0287 }
0288 
0289 static void __exit aes_mod_exit(void)
0290 {
0291     crypto_unregister_aead(&ccm_aes_alg);
0292 }
0293 
0294 module_init(aes_mod_init);
0295 module_exit(aes_mod_exit);
0296 
0297 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
0298 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
0299 MODULE_LICENSE("GPL v2");
0300 MODULE_ALIAS_CRYPTO("ccm(aes)");