0001
0002
0003
0004
0005
0006
0007
0008 #include <crypto/algapi.h>
0009 #include <crypto/ctr.h>
0010 #include <crypto/internal/cipher.h>
0011 #include <crypto/internal/skcipher.h>
0012 #include <linux/err.h>
0013 #include <linux/init.h>
0014 #include <linux/kernel.h>
0015 #include <linux/module.h>
0016 #include <linux/slab.h>
0017
0018 struct crypto_rfc3686_ctx {
0019 struct crypto_skcipher *child;
0020 u8 nonce[CTR_RFC3686_NONCE_SIZE];
0021 };
0022
0023 struct crypto_rfc3686_req_ctx {
0024 u8 iv[CTR_RFC3686_BLOCK_SIZE];
0025 struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
0026 };
0027
0028 static void crypto_ctr_crypt_final(struct skcipher_walk *walk,
0029 struct crypto_cipher *tfm)
0030 {
0031 unsigned int bsize = crypto_cipher_blocksize(tfm);
0032 unsigned long alignmask = crypto_cipher_alignmask(tfm);
0033 u8 *ctrblk = walk->iv;
0034 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
0035 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
0036 u8 *src = walk->src.virt.addr;
0037 u8 *dst = walk->dst.virt.addr;
0038 unsigned int nbytes = walk->nbytes;
0039
0040 crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
0041 crypto_xor_cpy(dst, keystream, src, nbytes);
0042
0043 crypto_inc(ctrblk, bsize);
0044 }
0045
0046 static int crypto_ctr_crypt_segment(struct skcipher_walk *walk,
0047 struct crypto_cipher *tfm)
0048 {
0049 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
0050 crypto_cipher_alg(tfm)->cia_encrypt;
0051 unsigned int bsize = crypto_cipher_blocksize(tfm);
0052 u8 *ctrblk = walk->iv;
0053 u8 *src = walk->src.virt.addr;
0054 u8 *dst = walk->dst.virt.addr;
0055 unsigned int nbytes = walk->nbytes;
0056
0057 do {
0058
0059 fn(crypto_cipher_tfm(tfm), dst, ctrblk);
0060 crypto_xor(dst, src, bsize);
0061
0062
0063 crypto_inc(ctrblk, bsize);
0064
0065 src += bsize;
0066 dst += bsize;
0067 } while ((nbytes -= bsize) >= bsize);
0068
0069 return nbytes;
0070 }
0071
0072 static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk,
0073 struct crypto_cipher *tfm)
0074 {
0075 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
0076 crypto_cipher_alg(tfm)->cia_encrypt;
0077 unsigned int bsize = crypto_cipher_blocksize(tfm);
0078 unsigned long alignmask = crypto_cipher_alignmask(tfm);
0079 unsigned int nbytes = walk->nbytes;
0080 u8 *ctrblk = walk->iv;
0081 u8 *src = walk->src.virt.addr;
0082 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
0083 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
0084
0085 do {
0086
0087 fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
0088 crypto_xor(src, keystream, bsize);
0089
0090
0091 crypto_inc(ctrblk, bsize);
0092
0093 src += bsize;
0094 } while ((nbytes -= bsize) >= bsize);
0095
0096 return nbytes;
0097 }
0098
0099 static int crypto_ctr_crypt(struct skcipher_request *req)
0100 {
0101 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0102 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
0103 const unsigned int bsize = crypto_cipher_blocksize(cipher);
0104 struct skcipher_walk walk;
0105 unsigned int nbytes;
0106 int err;
0107
0108 err = skcipher_walk_virt(&walk, req, false);
0109
0110 while (walk.nbytes >= bsize) {
0111 if (walk.src.virt.addr == walk.dst.virt.addr)
0112 nbytes = crypto_ctr_crypt_inplace(&walk, cipher);
0113 else
0114 nbytes = crypto_ctr_crypt_segment(&walk, cipher);
0115
0116 err = skcipher_walk_done(&walk, nbytes);
0117 }
0118
0119 if (walk.nbytes) {
0120 crypto_ctr_crypt_final(&walk, cipher);
0121 err = skcipher_walk_done(&walk, 0);
0122 }
0123
0124 return err;
0125 }
0126
0127 static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
0128 {
0129 struct skcipher_instance *inst;
0130 struct crypto_alg *alg;
0131 int err;
0132
0133 inst = skcipher_alloc_instance_simple(tmpl, tb);
0134 if (IS_ERR(inst))
0135 return PTR_ERR(inst);
0136
0137 alg = skcipher_ialg_simple(inst);
0138
0139
0140 err = -EINVAL;
0141 if (alg->cra_blocksize < 4)
0142 goto out_free_inst;
0143
0144
0145 if (alg->cra_blocksize % 4)
0146 goto out_free_inst;
0147
0148
0149 inst->alg.base.cra_blocksize = 1;
0150
0151
0152
0153
0154
0155 inst->alg.chunksize = alg->cra_blocksize;
0156
0157 inst->alg.encrypt = crypto_ctr_crypt;
0158 inst->alg.decrypt = crypto_ctr_crypt;
0159
0160 err = skcipher_register_instance(tmpl, inst);
0161 if (err) {
0162 out_free_inst:
0163 inst->free(inst);
0164 }
0165
0166 return err;
0167 }
0168
0169 static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
0170 const u8 *key, unsigned int keylen)
0171 {
0172 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
0173 struct crypto_skcipher *child = ctx->child;
0174
0175
0176 if (keylen < CTR_RFC3686_NONCE_SIZE)
0177 return -EINVAL;
0178
0179 memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
0180 CTR_RFC3686_NONCE_SIZE);
0181
0182 keylen -= CTR_RFC3686_NONCE_SIZE;
0183
0184 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
0185 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
0186 CRYPTO_TFM_REQ_MASK);
0187 return crypto_skcipher_setkey(child, key, keylen);
0188 }
0189
0190 static int crypto_rfc3686_crypt(struct skcipher_request *req)
0191 {
0192 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0193 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
0194 struct crypto_skcipher *child = ctx->child;
0195 unsigned long align = crypto_skcipher_alignmask(tfm);
0196 struct crypto_rfc3686_req_ctx *rctx =
0197 (void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1);
0198 struct skcipher_request *subreq = &rctx->subreq;
0199 u8 *iv = rctx->iv;
0200
0201
0202 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
0203 memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
0204
0205
0206 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
0207 cpu_to_be32(1);
0208
0209 skcipher_request_set_tfm(subreq, child);
0210 skcipher_request_set_callback(subreq, req->base.flags,
0211 req->base.complete, req->base.data);
0212 skcipher_request_set_crypt(subreq, req->src, req->dst,
0213 req->cryptlen, iv);
0214
0215 return crypto_skcipher_encrypt(subreq);
0216 }
0217
0218 static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm)
0219 {
0220 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
0221 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
0222 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
0223 struct crypto_skcipher *cipher;
0224 unsigned long align;
0225 unsigned int reqsize;
0226
0227 cipher = crypto_spawn_skcipher(spawn);
0228 if (IS_ERR(cipher))
0229 return PTR_ERR(cipher);
0230
0231 ctx->child = cipher;
0232
0233 align = crypto_skcipher_alignmask(tfm);
0234 align &= ~(crypto_tfm_ctx_alignment() - 1);
0235 reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) +
0236 crypto_skcipher_reqsize(cipher);
0237 crypto_skcipher_set_reqsize(tfm, reqsize);
0238
0239 return 0;
0240 }
0241
0242 static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm)
0243 {
0244 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
0245
0246 crypto_free_skcipher(ctx->child);
0247 }
0248
0249 static void crypto_rfc3686_free(struct skcipher_instance *inst)
0250 {
0251 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
0252
0253 crypto_drop_skcipher(spawn);
0254 kfree(inst);
0255 }
0256
0257 static int crypto_rfc3686_create(struct crypto_template *tmpl,
0258 struct rtattr **tb)
0259 {
0260 struct skcipher_instance *inst;
0261 struct skcipher_alg *alg;
0262 struct crypto_skcipher_spawn *spawn;
0263 u32 mask;
0264 int err;
0265
0266 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
0267 if (err)
0268 return err;
0269
0270 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
0271 if (!inst)
0272 return -ENOMEM;
0273
0274 spawn = skcipher_instance_ctx(inst);
0275
0276 err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst),
0277 crypto_attr_alg_name(tb[1]), 0, mask);
0278 if (err)
0279 goto err_free_inst;
0280
0281 alg = crypto_spawn_skcipher_alg(spawn);
0282
0283
0284 err = -EINVAL;
0285 if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE)
0286 goto err_free_inst;
0287
0288
0289 if (alg->base.cra_blocksize != 1)
0290 goto err_free_inst;
0291
0292 err = -ENAMETOOLONG;
0293 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
0294 "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
0295 goto err_free_inst;
0296 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
0297 "rfc3686(%s)", alg->base.cra_driver_name) >=
0298 CRYPTO_MAX_ALG_NAME)
0299 goto err_free_inst;
0300
0301 inst->alg.base.cra_priority = alg->base.cra_priority;
0302 inst->alg.base.cra_blocksize = 1;
0303 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
0304
0305 inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
0306 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
0307 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) +
0308 CTR_RFC3686_NONCE_SIZE;
0309 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) +
0310 CTR_RFC3686_NONCE_SIZE;
0311
0312 inst->alg.setkey = crypto_rfc3686_setkey;
0313 inst->alg.encrypt = crypto_rfc3686_crypt;
0314 inst->alg.decrypt = crypto_rfc3686_crypt;
0315
0316 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
0317
0318 inst->alg.init = crypto_rfc3686_init_tfm;
0319 inst->alg.exit = crypto_rfc3686_exit_tfm;
0320
0321 inst->free = crypto_rfc3686_free;
0322
0323 err = skcipher_register_instance(tmpl, inst);
0324 if (err) {
0325 err_free_inst:
0326 crypto_rfc3686_free(inst);
0327 }
0328 return err;
0329 }
0330
0331 static struct crypto_template crypto_ctr_tmpls[] = {
0332 {
0333 .name = "ctr",
0334 .create = crypto_ctr_create,
0335 .module = THIS_MODULE,
0336 }, {
0337 .name = "rfc3686",
0338 .create = crypto_rfc3686_create,
0339 .module = THIS_MODULE,
0340 },
0341 };
0342
0343 static int __init crypto_ctr_module_init(void)
0344 {
0345 return crypto_register_templates(crypto_ctr_tmpls,
0346 ARRAY_SIZE(crypto_ctr_tmpls));
0347 }
0348
0349 static void __exit crypto_ctr_module_exit(void)
0350 {
0351 crypto_unregister_templates(crypto_ctr_tmpls,
0352 ARRAY_SIZE(crypto_ctr_tmpls));
0353 }
0354
0355 subsys_initcall(crypto_ctr_module_init);
0356 module_exit(crypto_ctr_module_exit);
0357
0358 MODULE_LICENSE("GPL");
0359 MODULE_DESCRIPTION("CTR block cipher mode of operation");
0360 MODULE_ALIAS_CRYPTO("rfc3686");
0361 MODULE_ALIAS_CRYPTO("ctr");
0362 MODULE_IMPORT_NS(CRYPTO_INTERNAL);