0001
0002
0003
0004
0005
0006
0007
0008 #include <crypto/algapi.h>
0009 #include <crypto/internal/cipher.h>
0010 #include <crypto/internal/skcipher.h>
0011 #include <linux/err.h>
0012 #include <linux/init.h>
0013 #include <linux/kernel.h>
0014 #include <linux/log2.h>
0015 #include <linux/module.h>
0016
0017 static int crypto_cbc_encrypt_segment(struct skcipher_walk *walk,
0018 struct crypto_skcipher *skcipher)
0019 {
0020 unsigned int bsize = crypto_skcipher_blocksize(skcipher);
0021 void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
0022 unsigned int nbytes = walk->nbytes;
0023 u8 *src = walk->src.virt.addr;
0024 u8 *dst = walk->dst.virt.addr;
0025 struct crypto_cipher *cipher;
0026 struct crypto_tfm *tfm;
0027 u8 *iv = walk->iv;
0028
0029 cipher = skcipher_cipher_simple(skcipher);
0030 tfm = crypto_cipher_tfm(cipher);
0031 fn = crypto_cipher_alg(cipher)->cia_encrypt;
0032
0033 do {
0034 crypto_xor(iv, src, bsize);
0035 fn(tfm, dst, iv);
0036 memcpy(iv, dst, bsize);
0037
0038 src += bsize;
0039 dst += bsize;
0040 } while ((nbytes -= bsize) >= bsize);
0041
0042 return nbytes;
0043 }
0044
0045 static int crypto_cbc_encrypt_inplace(struct skcipher_walk *walk,
0046 struct crypto_skcipher *skcipher)
0047 {
0048 unsigned int bsize = crypto_skcipher_blocksize(skcipher);
0049 void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
0050 unsigned int nbytes = walk->nbytes;
0051 u8 *src = walk->src.virt.addr;
0052 struct crypto_cipher *cipher;
0053 struct crypto_tfm *tfm;
0054 u8 *iv = walk->iv;
0055
0056 cipher = skcipher_cipher_simple(skcipher);
0057 tfm = crypto_cipher_tfm(cipher);
0058 fn = crypto_cipher_alg(cipher)->cia_encrypt;
0059
0060 do {
0061 crypto_xor(src, iv, bsize);
0062 fn(tfm, src, src);
0063 iv = src;
0064
0065 src += bsize;
0066 } while ((nbytes -= bsize) >= bsize);
0067
0068 memcpy(walk->iv, iv, bsize);
0069
0070 return nbytes;
0071 }
0072
0073 static int crypto_cbc_encrypt(struct skcipher_request *req)
0074 {
0075 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
0076 struct skcipher_walk walk;
0077 int err;
0078
0079 err = skcipher_walk_virt(&walk, req, false);
0080
0081 while (walk.nbytes) {
0082 if (walk.src.virt.addr == walk.dst.virt.addr)
0083 err = crypto_cbc_encrypt_inplace(&walk, skcipher);
0084 else
0085 err = crypto_cbc_encrypt_segment(&walk, skcipher);
0086 err = skcipher_walk_done(&walk, err);
0087 }
0088
0089 return err;
0090 }
0091
0092 static int crypto_cbc_decrypt_segment(struct skcipher_walk *walk,
0093 struct crypto_skcipher *skcipher)
0094 {
0095 unsigned int bsize = crypto_skcipher_blocksize(skcipher);
0096 void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
0097 unsigned int nbytes = walk->nbytes;
0098 u8 *src = walk->src.virt.addr;
0099 u8 *dst = walk->dst.virt.addr;
0100 struct crypto_cipher *cipher;
0101 struct crypto_tfm *tfm;
0102 u8 *iv = walk->iv;
0103
0104 cipher = skcipher_cipher_simple(skcipher);
0105 tfm = crypto_cipher_tfm(cipher);
0106 fn = crypto_cipher_alg(cipher)->cia_decrypt;
0107
0108 do {
0109 fn(tfm, dst, src);
0110 crypto_xor(dst, iv, bsize);
0111 iv = src;
0112
0113 src += bsize;
0114 dst += bsize;
0115 } while ((nbytes -= bsize) >= bsize);
0116
0117 memcpy(walk->iv, iv, bsize);
0118
0119 return nbytes;
0120 }
0121
0122 static int crypto_cbc_decrypt_inplace(struct skcipher_walk *walk,
0123 struct crypto_skcipher *skcipher)
0124 {
0125 unsigned int bsize = crypto_skcipher_blocksize(skcipher);
0126 void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
0127 unsigned int nbytes = walk->nbytes;
0128 u8 *src = walk->src.virt.addr;
0129 u8 last_iv[MAX_CIPHER_BLOCKSIZE];
0130 struct crypto_cipher *cipher;
0131 struct crypto_tfm *tfm;
0132
0133 cipher = skcipher_cipher_simple(skcipher);
0134 tfm = crypto_cipher_tfm(cipher);
0135 fn = crypto_cipher_alg(cipher)->cia_decrypt;
0136
0137
0138 src += nbytes - (nbytes & (bsize - 1)) - bsize;
0139 memcpy(last_iv, src, bsize);
0140
0141 for (;;) {
0142 fn(tfm, src, src);
0143 if ((nbytes -= bsize) < bsize)
0144 break;
0145 crypto_xor(src, src - bsize, bsize);
0146 src -= bsize;
0147 }
0148
0149 crypto_xor(src, walk->iv, bsize);
0150 memcpy(walk->iv, last_iv, bsize);
0151
0152 return nbytes;
0153 }
0154
0155 static int crypto_cbc_decrypt(struct skcipher_request *req)
0156 {
0157 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
0158 struct skcipher_walk walk;
0159 int err;
0160
0161 err = skcipher_walk_virt(&walk, req, false);
0162
0163 while (walk.nbytes) {
0164 if (walk.src.virt.addr == walk.dst.virt.addr)
0165 err = crypto_cbc_decrypt_inplace(&walk, skcipher);
0166 else
0167 err = crypto_cbc_decrypt_segment(&walk, skcipher);
0168 err = skcipher_walk_done(&walk, err);
0169 }
0170
0171 return err;
0172 }
0173
0174 static int crypto_cbc_create(struct crypto_template *tmpl, struct rtattr **tb)
0175 {
0176 struct skcipher_instance *inst;
0177 struct crypto_alg *alg;
0178 int err;
0179
0180 inst = skcipher_alloc_instance_simple(tmpl, tb);
0181 if (IS_ERR(inst))
0182 return PTR_ERR(inst);
0183
0184 alg = skcipher_ialg_simple(inst);
0185
0186 err = -EINVAL;
0187 if (!is_power_of_2(alg->cra_blocksize))
0188 goto out_free_inst;
0189
0190 inst->alg.encrypt = crypto_cbc_encrypt;
0191 inst->alg.decrypt = crypto_cbc_decrypt;
0192
0193 err = skcipher_register_instance(tmpl, inst);
0194 if (err) {
0195 out_free_inst:
0196 inst->free(inst);
0197 }
0198
0199 return err;
0200 }
0201
0202 static struct crypto_template crypto_cbc_tmpl = {
0203 .name = "cbc",
0204 .create = crypto_cbc_create,
0205 .module = THIS_MODULE,
0206 };
0207
0208 static int __init crypto_cbc_module_init(void)
0209 {
0210 return crypto_register_template(&crypto_cbc_tmpl);
0211 }
0212
0213 static void __exit crypto_cbc_module_exit(void)
0214 {
0215 crypto_unregister_template(&crypto_cbc_tmpl);
0216 }
0217
0218 subsys_initcall(crypto_cbc_module_init);
0219 module_exit(crypto_cbc_module_exit);
0220
0221 MODULE_LICENSE("GPL");
0222 MODULE_DESCRIPTION("CBC block cipher mode of operation");
0223 MODULE_ALIAS_CRYPTO("cbc");