0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012 #include <crypto/algapi.h>
0013 #include <crypto/internal/cipher.h>
0014 #include <crypto/internal/skcipher.h>
0015 #include <linux/err.h>
0016 #include <linux/init.h>
0017 #include <linux/kernel.h>
0018 #include <linux/module.h>
0019
0020 static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
0021 struct skcipher_walk *walk,
0022 struct crypto_cipher *tfm)
0023 {
0024 int bsize = crypto_cipher_blocksize(tfm);
0025 unsigned int nbytes = walk->nbytes;
0026 u8 *src = walk->src.virt.addr;
0027 u8 *dst = walk->dst.virt.addr;
0028 u8 * const iv = walk->iv;
0029
0030 do {
0031 crypto_xor(iv, src, bsize);
0032 crypto_cipher_encrypt_one(tfm, dst, iv);
0033 crypto_xor_cpy(iv, dst, src, bsize);
0034
0035 src += bsize;
0036 dst += bsize;
0037 } while ((nbytes -= bsize) >= bsize);
0038
0039 return nbytes;
0040 }
0041
0042 static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
0043 struct skcipher_walk *walk,
0044 struct crypto_cipher *tfm)
0045 {
0046 int bsize = crypto_cipher_blocksize(tfm);
0047 unsigned int nbytes = walk->nbytes;
0048 u8 *src = walk->src.virt.addr;
0049 u8 * const iv = walk->iv;
0050 u8 tmpbuf[MAX_CIPHER_BLOCKSIZE];
0051
0052 do {
0053 memcpy(tmpbuf, src, bsize);
0054 crypto_xor(iv, src, bsize);
0055 crypto_cipher_encrypt_one(tfm, src, iv);
0056 crypto_xor_cpy(iv, tmpbuf, src, bsize);
0057
0058 src += bsize;
0059 } while ((nbytes -= bsize) >= bsize);
0060
0061 return nbytes;
0062 }
0063
0064 static int crypto_pcbc_encrypt(struct skcipher_request *req)
0065 {
0066 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0067 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
0068 struct skcipher_walk walk;
0069 unsigned int nbytes;
0070 int err;
0071
0072 err = skcipher_walk_virt(&walk, req, false);
0073
0074 while ((nbytes = walk.nbytes)) {
0075 if (walk.src.virt.addr == walk.dst.virt.addr)
0076 nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
0077 cipher);
0078 else
0079 nbytes = crypto_pcbc_encrypt_segment(req, &walk,
0080 cipher);
0081 err = skcipher_walk_done(&walk, nbytes);
0082 }
0083
0084 return err;
0085 }
0086
0087 static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
0088 struct skcipher_walk *walk,
0089 struct crypto_cipher *tfm)
0090 {
0091 int bsize = crypto_cipher_blocksize(tfm);
0092 unsigned int nbytes = walk->nbytes;
0093 u8 *src = walk->src.virt.addr;
0094 u8 *dst = walk->dst.virt.addr;
0095 u8 * const iv = walk->iv;
0096
0097 do {
0098 crypto_cipher_decrypt_one(tfm, dst, src);
0099 crypto_xor(dst, iv, bsize);
0100 crypto_xor_cpy(iv, dst, src, bsize);
0101
0102 src += bsize;
0103 dst += bsize;
0104 } while ((nbytes -= bsize) >= bsize);
0105
0106 return nbytes;
0107 }
0108
0109 static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
0110 struct skcipher_walk *walk,
0111 struct crypto_cipher *tfm)
0112 {
0113 int bsize = crypto_cipher_blocksize(tfm);
0114 unsigned int nbytes = walk->nbytes;
0115 u8 *src = walk->src.virt.addr;
0116 u8 * const iv = walk->iv;
0117 u8 tmpbuf[MAX_CIPHER_BLOCKSIZE] __aligned(__alignof__(u32));
0118
0119 do {
0120 memcpy(tmpbuf, src, bsize);
0121 crypto_cipher_decrypt_one(tfm, src, src);
0122 crypto_xor(src, iv, bsize);
0123 crypto_xor_cpy(iv, src, tmpbuf, bsize);
0124
0125 src += bsize;
0126 } while ((nbytes -= bsize) >= bsize);
0127
0128 return nbytes;
0129 }
0130
0131 static int crypto_pcbc_decrypt(struct skcipher_request *req)
0132 {
0133 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0134 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
0135 struct skcipher_walk walk;
0136 unsigned int nbytes;
0137 int err;
0138
0139 err = skcipher_walk_virt(&walk, req, false);
0140
0141 while ((nbytes = walk.nbytes)) {
0142 if (walk.src.virt.addr == walk.dst.virt.addr)
0143 nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
0144 cipher);
0145 else
0146 nbytes = crypto_pcbc_decrypt_segment(req, &walk,
0147 cipher);
0148 err = skcipher_walk_done(&walk, nbytes);
0149 }
0150
0151 return err;
0152 }
0153
0154 static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
0155 {
0156 struct skcipher_instance *inst;
0157 int err;
0158
0159 inst = skcipher_alloc_instance_simple(tmpl, tb);
0160 if (IS_ERR(inst))
0161 return PTR_ERR(inst);
0162
0163 inst->alg.encrypt = crypto_pcbc_encrypt;
0164 inst->alg.decrypt = crypto_pcbc_decrypt;
0165
0166 err = skcipher_register_instance(tmpl, inst);
0167 if (err)
0168 inst->free(inst);
0169
0170 return err;
0171 }
0172
0173 static struct crypto_template crypto_pcbc_tmpl = {
0174 .name = "pcbc",
0175 .create = crypto_pcbc_create,
0176 .module = THIS_MODULE,
0177 };
0178
0179 static int __init crypto_pcbc_module_init(void)
0180 {
0181 return crypto_register_template(&crypto_pcbc_tmpl);
0182 }
0183
0184 static void __exit crypto_pcbc_module_exit(void)
0185 {
0186 crypto_unregister_template(&crypto_pcbc_tmpl);
0187 }
0188
0189 subsys_initcall(crypto_pcbc_module_init);
0190 module_exit(crypto_pcbc_module_exit);
0191
0192 MODULE_LICENSE("GPL");
0193 MODULE_DESCRIPTION("PCBC block cipher mode of operation");
0194 MODULE_ALIAS_CRYPTO("pcbc");
0195 MODULE_IMPORT_NS(CRYPTO_INTERNAL);