Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0-or-later */
0002 /*
0003  * AEAD: Authenticated Encryption with Associated Data
0004  * 
0005  * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
0006  */
0007 
0008 #ifndef _CRYPTO_INTERNAL_AEAD_H
0009 #define _CRYPTO_INTERNAL_AEAD_H
0010 
0011 #include <crypto/aead.h>
0012 #include <crypto/algapi.h>
0013 #include <linux/stddef.h>
0014 #include <linux/types.h>
0015 
0016 struct rtattr;
0017 
0018 struct aead_instance {
0019     void (*free)(struct aead_instance *inst);
0020     union {
0021         struct {
0022             char head[offsetof(struct aead_alg, base)];
0023             struct crypto_instance base;
0024         } s;
0025         struct aead_alg alg;
0026     };
0027 };
0028 
0029 struct crypto_aead_spawn {
0030     struct crypto_spawn base;
0031 };
0032 
0033 struct aead_queue {
0034     struct crypto_queue base;
0035 };
0036 
0037 static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
0038 {
0039     return crypto_tfm_ctx(&tfm->base);
0040 }
0041 
0042 static inline struct crypto_instance *aead_crypto_instance(
0043     struct aead_instance *inst)
0044 {
0045     return container_of(&inst->alg.base, struct crypto_instance, alg);
0046 }
0047 
0048 static inline struct aead_instance *aead_instance(struct crypto_instance *inst)
0049 {
0050     return container_of(&inst->alg, struct aead_instance, alg.base);
0051 }
0052 
0053 static inline struct aead_instance *aead_alg_instance(struct crypto_aead *aead)
0054 {
0055     return aead_instance(crypto_tfm_alg_instance(&aead->base));
0056 }
0057 
0058 static inline void *aead_instance_ctx(struct aead_instance *inst)
0059 {
0060     return crypto_instance_ctx(aead_crypto_instance(inst));
0061 }
0062 
0063 static inline void *aead_request_ctx(struct aead_request *req)
0064 {
0065     return req->__ctx;
0066 }
0067 
0068 static inline void aead_request_complete(struct aead_request *req, int err)
0069 {
0070     req->base.complete(&req->base, err);
0071 }
0072 
0073 static inline u32 aead_request_flags(struct aead_request *req)
0074 {
0075     return req->base.flags;
0076 }
0077 
0078 static inline struct aead_request *aead_request_cast(
0079     struct crypto_async_request *req)
0080 {
0081     return container_of(req, struct aead_request, base);
0082 }
0083 
0084 int crypto_grab_aead(struct crypto_aead_spawn *spawn,
0085              struct crypto_instance *inst,
0086              const char *name, u32 type, u32 mask);
0087 
0088 static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn)
0089 {
0090     crypto_drop_spawn(&spawn->base);
0091 }
0092 
0093 static inline struct aead_alg *crypto_spawn_aead_alg(
0094     struct crypto_aead_spawn *spawn)
0095 {
0096     return container_of(spawn->base.alg, struct aead_alg, base);
0097 }
0098 
0099 static inline struct crypto_aead *crypto_spawn_aead(
0100     struct crypto_aead_spawn *spawn)
0101 {
0102     return crypto_spawn_tfm2(&spawn->base);
0103 }
0104 
0105 static inline void crypto_aead_set_reqsize(struct crypto_aead *aead,
0106                        unsigned int reqsize)
0107 {
0108     aead->reqsize = reqsize;
0109 }
0110 
0111 static inline void aead_init_queue(struct aead_queue *queue,
0112                    unsigned int max_qlen)
0113 {
0114     crypto_init_queue(&queue->base, max_qlen);
0115 }
0116 
0117 static inline int aead_enqueue_request(struct aead_queue *queue,
0118                        struct aead_request *request)
0119 {
0120     return crypto_enqueue_request(&queue->base, &request->base);
0121 }
0122 
0123 static inline struct aead_request *aead_dequeue_request(
0124     struct aead_queue *queue)
0125 {
0126     struct crypto_async_request *req;
0127 
0128     req = crypto_dequeue_request(&queue->base);
0129 
0130     return req ? container_of(req, struct aead_request, base) : NULL;
0131 }
0132 
0133 static inline struct aead_request *aead_get_backlog(struct aead_queue *queue)
0134 {
0135     struct crypto_async_request *req;
0136 
0137     req = crypto_get_backlog(&queue->base);
0138 
0139     return req ? container_of(req, struct aead_request, base) : NULL;
0140 }
0141 
0142 static inline unsigned int crypto_aead_alg_chunksize(struct aead_alg *alg)
0143 {
0144     return alg->chunksize;
0145 }
0146 
0147 /**
0148  * crypto_aead_chunksize() - obtain chunk size
0149  * @tfm: cipher handle
0150  *
0151  * The block size is set to one for ciphers such as CCM.  However,
0152  * you still need to provide incremental updates in multiples of
0153  * the underlying block size as the IV does not have sub-block
0154  * granularity.  This is known in this API as the chunk size.
0155  *
0156  * Return: chunk size in bytes
0157  */
0158 static inline unsigned int crypto_aead_chunksize(struct crypto_aead *tfm)
0159 {
0160     return crypto_aead_alg_chunksize(crypto_aead_alg(tfm));
0161 }
0162 
0163 int crypto_register_aead(struct aead_alg *alg);
0164 void crypto_unregister_aead(struct aead_alg *alg);
0165 int crypto_register_aeads(struct aead_alg *algs, int count);
0166 void crypto_unregister_aeads(struct aead_alg *algs, int count);
0167 int aead_register_instance(struct crypto_template *tmpl,
0168                struct aead_instance *inst);
0169 
0170 #endif  /* _CRYPTO_INTERNAL_AEAD_H */
0171