Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0-or-later */
0002 /*
0003  * Symmetric key ciphers.
0004  * 
0005  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
0006  */
0007 
0008 #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
0009 #define _CRYPTO_INTERNAL_SKCIPHER_H
0010 
0011 #include <crypto/algapi.h>
0012 #include <crypto/internal/cipher.h>
0013 #include <crypto/skcipher.h>
0014 #include <linux/list.h>
0015 #include <linux/types.h>
0016 
0017 struct aead_request;
0018 struct rtattr;
0019 
0020 struct skcipher_instance {
0021     void (*free)(struct skcipher_instance *inst);
0022     union {
0023         struct {
0024             char head[offsetof(struct skcipher_alg, base)];
0025             struct crypto_instance base;
0026         } s;
0027         struct skcipher_alg alg;
0028     };
0029 };
0030 
0031 struct crypto_skcipher_spawn {
0032     struct crypto_spawn base;
0033 };
0034 
0035 struct skcipher_walk {
0036     union {
0037         struct {
0038             struct page *page;
0039             unsigned long offset;
0040         } phys;
0041 
0042         struct {
0043             u8 *page;
0044             void *addr;
0045         } virt;
0046     } src, dst;
0047 
0048     struct scatter_walk in;
0049     unsigned int nbytes;
0050 
0051     struct scatter_walk out;
0052     unsigned int total;
0053 
0054     struct list_head buffers;
0055 
0056     u8 *page;
0057     u8 *buffer;
0058     u8 *oiv;
0059     void *iv;
0060 
0061     unsigned int ivsize;
0062 
0063     int flags;
0064     unsigned int blocksize;
0065     unsigned int stride;
0066     unsigned int alignmask;
0067 };
0068 
0069 static inline struct crypto_instance *skcipher_crypto_instance(
0070     struct skcipher_instance *inst)
0071 {
0072     return &inst->s.base;
0073 }
0074 
0075 static inline struct skcipher_instance *skcipher_alg_instance(
0076     struct crypto_skcipher *skcipher)
0077 {
0078     return container_of(crypto_skcipher_alg(skcipher),
0079                 struct skcipher_instance, alg);
0080 }
0081 
0082 static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
0083 {
0084     return crypto_instance_ctx(skcipher_crypto_instance(inst));
0085 }
0086 
0087 static inline void skcipher_request_complete(struct skcipher_request *req, int err)
0088 {
0089     req->base.complete(&req->base, err);
0090 }
0091 
0092 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
0093              struct crypto_instance *inst,
0094              const char *name, u32 type, u32 mask);
0095 
0096 static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
0097 {
0098     crypto_drop_spawn(&spawn->base);
0099 }
0100 
0101 static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
0102     struct crypto_skcipher_spawn *spawn)
0103 {
0104     return container_of(spawn->base.alg, struct skcipher_alg, base);
0105 }
0106 
0107 static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
0108     struct crypto_skcipher_spawn *spawn)
0109 {
0110     return crypto_skcipher_spawn_alg(spawn);
0111 }
0112 
0113 static inline struct crypto_skcipher *crypto_spawn_skcipher(
0114     struct crypto_skcipher_spawn *spawn)
0115 {
0116     return crypto_spawn_tfm2(&spawn->base);
0117 }
0118 
0119 static inline void crypto_skcipher_set_reqsize(
0120     struct crypto_skcipher *skcipher, unsigned int reqsize)
0121 {
0122     skcipher->reqsize = reqsize;
0123 }
0124 
0125 int crypto_register_skcipher(struct skcipher_alg *alg);
0126 void crypto_unregister_skcipher(struct skcipher_alg *alg);
0127 int crypto_register_skciphers(struct skcipher_alg *algs, int count);
0128 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
0129 int skcipher_register_instance(struct crypto_template *tmpl,
0130                    struct skcipher_instance *inst);
0131 
0132 int skcipher_walk_done(struct skcipher_walk *walk, int err);
0133 int skcipher_walk_virt(struct skcipher_walk *walk,
0134                struct skcipher_request *req,
0135                bool atomic);
0136 int skcipher_walk_async(struct skcipher_walk *walk,
0137             struct skcipher_request *req);
0138 int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
0139                    struct aead_request *req, bool atomic);
0140 int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
0141                    struct aead_request *req, bool atomic);
0142 void skcipher_walk_complete(struct skcipher_walk *walk, int err);
0143 
0144 static inline void skcipher_walk_abort(struct skcipher_walk *walk)
0145 {
0146     skcipher_walk_done(walk, -ECANCELED);
0147 }
0148 
0149 static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
0150 {
0151     return crypto_tfm_ctx(&tfm->base);
0152 }
0153 
0154 static inline void *skcipher_request_ctx(struct skcipher_request *req)
0155 {
0156     return req->__ctx;
0157 }
0158 
0159 static inline u32 skcipher_request_flags(struct skcipher_request *req)
0160 {
0161     return req->base.flags;
0162 }
0163 
0164 static inline unsigned int crypto_skcipher_alg_min_keysize(
0165     struct skcipher_alg *alg)
0166 {
0167     return alg->min_keysize;
0168 }
0169 
0170 static inline unsigned int crypto_skcipher_alg_max_keysize(
0171     struct skcipher_alg *alg)
0172 {
0173     return alg->max_keysize;
0174 }
0175 
0176 static inline unsigned int crypto_skcipher_alg_walksize(
0177     struct skcipher_alg *alg)
0178 {
0179     return alg->walksize;
0180 }
0181 
0182 /**
0183  * crypto_skcipher_walksize() - obtain walk size
0184  * @tfm: cipher handle
0185  *
0186  * In some cases, algorithms can only perform optimally when operating on
0187  * multiple blocks in parallel. This is reflected by the walksize, which
0188  * must be a multiple of the chunksize (or equal if the concern does not
0189  * apply)
0190  *
0191  * Return: walk size in bytes
0192  */
0193 static inline unsigned int crypto_skcipher_walksize(
0194     struct crypto_skcipher *tfm)
0195 {
0196     return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
0197 }
0198 
0199 /* Helpers for simple block cipher modes of operation */
0200 struct skcipher_ctx_simple {
0201     struct crypto_cipher *cipher;   /* underlying block cipher */
0202 };
0203 static inline struct crypto_cipher *
0204 skcipher_cipher_simple(struct crypto_skcipher *tfm)
0205 {
0206     struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
0207 
0208     return ctx->cipher;
0209 }
0210 
0211 struct skcipher_instance *skcipher_alloc_instance_simple(
0212     struct crypto_template *tmpl, struct rtattr **tb);
0213 
0214 static inline struct crypto_alg *skcipher_ialg_simple(
0215     struct skcipher_instance *inst)
0216 {
0217     struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
0218 
0219     return crypto_spawn_cipher_alg(spawn);
0220 }
0221 
0222 #endif  /* _CRYPTO_INTERNAL_SKCIPHER_H */
0223