0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022 #include <crypto/algapi.h>
0023 #include <crypto/internal/cipher.h>
0024 #include <crypto/internal/skcipher.h>
0025 #include <linux/err.h>
0026 #include <linux/init.h>
0027 #include <linux/kernel.h>
0028 #include <linux/module.h>
0029 #include <linux/slab.h>
0030
0031
0032 #define XCTR_BLOCKSIZE 16
0033
0034 static void crypto_xctr_crypt_final(struct skcipher_walk *walk,
0035 struct crypto_cipher *tfm, u32 byte_ctr)
0036 {
0037 u8 keystream[XCTR_BLOCKSIZE];
0038 const u8 *src = walk->src.virt.addr;
0039 u8 *dst = walk->dst.virt.addr;
0040 unsigned int nbytes = walk->nbytes;
0041 __le32 ctr32 = cpu_to_le32(byte_ctr / XCTR_BLOCKSIZE + 1);
0042
0043 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
0044 crypto_cipher_encrypt_one(tfm, keystream, walk->iv);
0045 crypto_xor_cpy(dst, keystream, src, nbytes);
0046 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
0047 }
0048
0049 static int crypto_xctr_crypt_segment(struct skcipher_walk *walk,
0050 struct crypto_cipher *tfm, u32 byte_ctr)
0051 {
0052 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
0053 crypto_cipher_alg(tfm)->cia_encrypt;
0054 const u8 *src = walk->src.virt.addr;
0055 u8 *dst = walk->dst.virt.addr;
0056 unsigned int nbytes = walk->nbytes;
0057 __le32 ctr32 = cpu_to_le32(byte_ctr / XCTR_BLOCKSIZE + 1);
0058
0059 do {
0060 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
0061 fn(crypto_cipher_tfm(tfm), dst, walk->iv);
0062 crypto_xor(dst, src, XCTR_BLOCKSIZE);
0063 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
0064
0065 le32_add_cpu(&ctr32, 1);
0066
0067 src += XCTR_BLOCKSIZE;
0068 dst += XCTR_BLOCKSIZE;
0069 } while ((nbytes -= XCTR_BLOCKSIZE) >= XCTR_BLOCKSIZE);
0070
0071 return nbytes;
0072 }
0073
0074 static int crypto_xctr_crypt_inplace(struct skcipher_walk *walk,
0075 struct crypto_cipher *tfm, u32 byte_ctr)
0076 {
0077 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
0078 crypto_cipher_alg(tfm)->cia_encrypt;
0079 unsigned long alignmask = crypto_cipher_alignmask(tfm);
0080 unsigned int nbytes = walk->nbytes;
0081 u8 *data = walk->src.virt.addr;
0082 u8 tmp[XCTR_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
0083 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
0084 __le32 ctr32 = cpu_to_le32(byte_ctr / XCTR_BLOCKSIZE + 1);
0085
0086 do {
0087 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
0088 fn(crypto_cipher_tfm(tfm), keystream, walk->iv);
0089 crypto_xor(data, keystream, XCTR_BLOCKSIZE);
0090 crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
0091
0092 le32_add_cpu(&ctr32, 1);
0093
0094 data += XCTR_BLOCKSIZE;
0095 } while ((nbytes -= XCTR_BLOCKSIZE) >= XCTR_BLOCKSIZE);
0096
0097 return nbytes;
0098 }
0099
0100 static int crypto_xctr_crypt(struct skcipher_request *req)
0101 {
0102 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0103 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
0104 struct skcipher_walk walk;
0105 unsigned int nbytes;
0106 int err;
0107 u32 byte_ctr = 0;
0108
0109 err = skcipher_walk_virt(&walk, req, false);
0110
0111 while (walk.nbytes >= XCTR_BLOCKSIZE) {
0112 if (walk.src.virt.addr == walk.dst.virt.addr)
0113 nbytes = crypto_xctr_crypt_inplace(&walk, cipher,
0114 byte_ctr);
0115 else
0116 nbytes = crypto_xctr_crypt_segment(&walk, cipher,
0117 byte_ctr);
0118
0119 byte_ctr += walk.nbytes - nbytes;
0120 err = skcipher_walk_done(&walk, nbytes);
0121 }
0122
0123 if (walk.nbytes) {
0124 crypto_xctr_crypt_final(&walk, cipher, byte_ctr);
0125 err = skcipher_walk_done(&walk, 0);
0126 }
0127
0128 return err;
0129 }
0130
0131 static int crypto_xctr_create(struct crypto_template *tmpl, struct rtattr **tb)
0132 {
0133 struct skcipher_instance *inst;
0134 struct crypto_alg *alg;
0135 int err;
0136
0137 inst = skcipher_alloc_instance_simple(tmpl, tb);
0138 if (IS_ERR(inst))
0139 return PTR_ERR(inst);
0140
0141 alg = skcipher_ialg_simple(inst);
0142
0143
0144 err = -EINVAL;
0145 if (alg->cra_blocksize != XCTR_BLOCKSIZE)
0146 goto out_free_inst;
0147
0148
0149 inst->alg.base.cra_blocksize = 1;
0150
0151
0152
0153
0154
0155 inst->alg.chunksize = alg->cra_blocksize;
0156
0157 inst->alg.encrypt = crypto_xctr_crypt;
0158 inst->alg.decrypt = crypto_xctr_crypt;
0159
0160 err = skcipher_register_instance(tmpl, inst);
0161 if (err) {
0162 out_free_inst:
0163 inst->free(inst);
0164 }
0165
0166 return err;
0167 }
0168
0169 static struct crypto_template crypto_xctr_tmpl = {
0170 .name = "xctr",
0171 .create = crypto_xctr_create,
0172 .module = THIS_MODULE,
0173 };
0174
0175 static int __init crypto_xctr_module_init(void)
0176 {
0177 return crypto_register_template(&crypto_xctr_tmpl);
0178 }
0179
0180 static void __exit crypto_xctr_module_exit(void)
0181 {
0182 crypto_unregister_template(&crypto_xctr_tmpl);
0183 }
0184
0185 subsys_initcall(crypto_xctr_module_init);
0186 module_exit(crypto_xctr_module_exit);
0187
0188 MODULE_LICENSE("GPL");
0189 MODULE_DESCRIPTION("XCTR block cipher mode of operation");
0190 MODULE_ALIAS_CRYPTO("xctr");
0191 MODULE_IMPORT_NS(CRYPTO_INTERNAL);