Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0-or-later */
0002 /*
0003  * Scatterlist Cryptographic API.
0004  *
0005  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
0006  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
0007  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
0008  *
0009  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
0010  * and Nettle, by Niels Möller.
0011  */
0012 #ifndef _LINUX_CRYPTO_H
0013 #define _LINUX_CRYPTO_H
0014 
0015 #include <linux/atomic.h>
0016 #include <linux/kernel.h>
0017 #include <linux/list.h>
0018 #include <linux/bug.h>
0019 #include <linux/refcount.h>
0020 #include <linux/slab.h>
0021 #include <linux/completion.h>
0022 
0023 /*
0024  * Autoloaded crypto modules should only use a prefixed name to avoid allowing
0025  * arbitrary modules to be loaded. Loading from userspace may still need the
0026  * unprefixed names, so retains those aliases as well.
0027  * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
0028  * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
0029  * expands twice on the same line. Instead, use a separate base name for the
0030  * alias.
0031  */
0032 #define MODULE_ALIAS_CRYPTO(name)   \
0033         __MODULE_INFO(alias, alias_userspace, name);    \
0034         __MODULE_INFO(alias, alias_crypto, "crypto-" name)
0035 
0036 /*
0037  * Algorithm masks and types.
0038  */
0039 #define CRYPTO_ALG_TYPE_MASK        0x0000000f
0040 #define CRYPTO_ALG_TYPE_CIPHER      0x00000001
0041 #define CRYPTO_ALG_TYPE_COMPRESS    0x00000002
0042 #define CRYPTO_ALG_TYPE_AEAD        0x00000003
0043 #define CRYPTO_ALG_TYPE_SKCIPHER    0x00000005
0044 #define CRYPTO_ALG_TYPE_KPP     0x00000008
0045 #define CRYPTO_ALG_TYPE_ACOMPRESS   0x0000000a
0046 #define CRYPTO_ALG_TYPE_SCOMPRESS   0x0000000b
0047 #define CRYPTO_ALG_TYPE_RNG     0x0000000c
0048 #define CRYPTO_ALG_TYPE_AKCIPHER    0x0000000d
0049 #define CRYPTO_ALG_TYPE_HASH        0x0000000e
0050 #define CRYPTO_ALG_TYPE_SHASH       0x0000000e
0051 #define CRYPTO_ALG_TYPE_AHASH       0x0000000f
0052 
0053 #define CRYPTO_ALG_TYPE_HASH_MASK   0x0000000e
0054 #define CRYPTO_ALG_TYPE_AHASH_MASK  0x0000000e
0055 #define CRYPTO_ALG_TYPE_ACOMPRESS_MASK  0x0000000e
0056 
0057 #define CRYPTO_ALG_LARVAL       0x00000010
0058 #define CRYPTO_ALG_DEAD         0x00000020
0059 #define CRYPTO_ALG_DYING        0x00000040
0060 #define CRYPTO_ALG_ASYNC        0x00000080
0061 
0062 /*
0063  * Set if the algorithm (or an algorithm which it uses) requires another
0064  * algorithm of the same type to handle corner cases.
0065  */
0066 #define CRYPTO_ALG_NEED_FALLBACK    0x00000100
0067 
0068 /*
0069  * Set if the algorithm has passed automated run-time testing.  Note that
0070  * if there is no run-time testing for a given algorithm it is considered
0071  * to have passed.
0072  */
0073 
0074 #define CRYPTO_ALG_TESTED       0x00000400
0075 
0076 /*
0077  * Set if the algorithm is an instance that is built from templates.
0078  */
0079 #define CRYPTO_ALG_INSTANCE     0x00000800
0080 
0081 /* Set this bit if the algorithm provided is hardware accelerated but
0082  * not available to userspace via instruction set or so.
0083  */
0084 #define CRYPTO_ALG_KERN_DRIVER_ONLY 0x00001000
0085 
0086 /*
0087  * Mark a cipher as a service implementation only usable by another
0088  * cipher and never by a normal user of the kernel crypto API
0089  */
0090 #define CRYPTO_ALG_INTERNAL     0x00002000
0091 
0092 /*
0093  * Set if the algorithm has a ->setkey() method but can be used without
0094  * calling it first, i.e. there is a default key.
0095  */
0096 #define CRYPTO_ALG_OPTIONAL_KEY     0x00004000
0097 
0098 /*
0099  * Don't trigger module loading
0100  */
0101 #define CRYPTO_NOLOAD           0x00008000
0102 
0103 /*
0104  * The algorithm may allocate memory during request processing, i.e. during
0105  * encryption, decryption, or hashing.  Users can request an algorithm with this
0106  * flag unset if they can't handle memory allocation failures.
0107  *
0108  * This flag is currently only implemented for algorithms of type "skcipher",
0109  * "aead", "ahash", "shash", and "cipher".  Algorithms of other types might not
0110  * have this flag set even if they allocate memory.
0111  *
0112  * In some edge cases, algorithms can allocate memory regardless of this flag.
0113  * To avoid these cases, users must obey the following usage constraints:
0114  *    skcipher:
0115  *  - The IV buffer and all scatterlist elements must be aligned to the
0116  *    algorithm's alignmask.
0117  *  - If the data were to be divided into chunks of size
0118  *    crypto_skcipher_walksize() (with any remainder going at the end), no
0119  *    chunk can cross a page boundary or a scatterlist element boundary.
0120  *    aead:
0121  *  - The IV buffer and all scatterlist elements must be aligned to the
0122  *    algorithm's alignmask.
0123  *  - The first scatterlist element must contain all the associated data,
0124  *    and its pages must be !PageHighMem.
0125  *  - If the plaintext/ciphertext were to be divided into chunks of size
0126  *    crypto_aead_walksize() (with the remainder going at the end), no chunk
0127  *    can cross a page boundary or a scatterlist element boundary.
0128  *    ahash:
0129  *  - The result buffer must be aligned to the algorithm's alignmask.
0130  *  - crypto_ahash_finup() must not be used unless the algorithm implements
0131  *    ->finup() natively.
0132  */
0133 #define CRYPTO_ALG_ALLOCATES_MEMORY 0x00010000
0134 
0135 /*
0136  * Mark an algorithm as a service implementation only usable by a
0137  * template and never by a normal user of the kernel crypto API.
0138  * This is intended to be used by algorithms that are themselves
0139  * not FIPS-approved but may instead be used to implement parts of
0140  * a FIPS-approved algorithm (e.g., dh vs. ffdhe2048(dh)).
0141  */
0142 #define CRYPTO_ALG_FIPS_INTERNAL    0x00020000
0143 
0144 /*
0145  * Transform masks and values (for crt_flags).
0146  */
0147 #define CRYPTO_TFM_NEED_KEY     0x00000001
0148 
0149 #define CRYPTO_TFM_REQ_MASK     0x000fff00
0150 #define CRYPTO_TFM_REQ_FORBID_WEAK_KEYS 0x00000100
0151 #define CRYPTO_TFM_REQ_MAY_SLEEP    0x00000200
0152 #define CRYPTO_TFM_REQ_MAY_BACKLOG  0x00000400
0153 
0154 /*
0155  * Miscellaneous stuff.
0156  */
0157 #define CRYPTO_MAX_ALG_NAME     128
0158 
0159 /*
0160  * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
0161  * declaration) is used to ensure that the crypto_tfm context structure is
0162  * aligned correctly for the given architecture so that there are no alignment
0163  * faults for C data types.  On architectures that support non-cache coherent
0164  * DMA, such as ARM or arm64, it also takes into account the minimal alignment
0165  * that is required to ensure that the context struct member does not share any
0166  * cachelines with the rest of the struct. This is needed to ensure that cache
0167  * maintenance for non-coherent DMA (cache invalidation in particular) does not
0168  * affect data that may be accessed by the CPU concurrently.
0169  */
0170 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
0171 
0172 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
0173 
0174 struct scatterlist;
0175 struct crypto_async_request;
0176 struct crypto_tfm;
0177 struct crypto_type;
0178 
0179 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
0180 
0181 /**
0182  * DOC: Block Cipher Context Data Structures
0183  *
0184  * These data structures define the operating context for each block cipher
0185  * type.
0186  */
0187 
0188 struct crypto_async_request {
0189     struct list_head list;
0190     crypto_completion_t complete;
0191     void *data;
0192     struct crypto_tfm *tfm;
0193 
0194     u32 flags;
0195 };
0196 
0197 /**
0198  * DOC: Block Cipher Algorithm Definitions
0199  *
0200  * These data structures define modular crypto algorithm implementations,
0201  * managed via crypto_register_alg() and crypto_unregister_alg().
0202  */
0203 
0204 /**
0205  * struct cipher_alg - single-block symmetric ciphers definition
0206  * @cia_min_keysize: Minimum key size supported by the transformation. This is
0207  *           the smallest key length supported by this transformation
0208  *           algorithm. This must be set to one of the pre-defined
0209  *           values as this is not hardware specific. Possible values
0210  *           for this field can be found via git grep "_MIN_KEY_SIZE"
0211  *           include/crypto/
0212  * @cia_max_keysize: Maximum key size supported by the transformation. This is
0213  *          the largest key length supported by this transformation
0214  *          algorithm. This must be set to one of the pre-defined values
0215  *          as this is not hardware specific. Possible values for this
0216  *          field can be found via git grep "_MAX_KEY_SIZE"
0217  *          include/crypto/
0218  * @cia_setkey: Set key for the transformation. This function is used to either
0219  *          program a supplied key into the hardware or store the key in the
0220  *          transformation context for programming it later. Note that this
0221  *          function does modify the transformation context. This function
0222  *          can be called multiple times during the existence of the
0223  *          transformation object, so one must make sure the key is properly
0224  *          reprogrammed into the hardware. This function is also
0225  *          responsible for checking the key length for validity.
0226  * @cia_encrypt: Encrypt a single block. This function is used to encrypt a
0227  *       single block of data, which must be @cra_blocksize big. This
0228  *       always operates on a full @cra_blocksize and it is not possible
0229  *       to encrypt a block of smaller size. The supplied buffers must
0230  *       therefore also be at least of @cra_blocksize size. Both the
0231  *       input and output buffers are always aligned to @cra_alignmask.
0232  *       In case either of the input or output buffer supplied by user
0233  *       of the crypto API is not aligned to @cra_alignmask, the crypto
0234  *       API will re-align the buffers. The re-alignment means that a
0235  *       new buffer will be allocated, the data will be copied into the
0236  *       new buffer, then the processing will happen on the new buffer,
0237  *       then the data will be copied back into the original buffer and
0238  *       finally the new buffer will be freed. In case a software
0239  *       fallback was put in place in the @cra_init call, this function
0240  *       might need to use the fallback if the algorithm doesn't support
0241  *       all of the key sizes. In case the key was stored in
0242  *       transformation context, the key might need to be re-programmed
0243  *       into the hardware in this function. This function shall not
0244  *       modify the transformation context, as this function may be
0245  *       called in parallel with the same transformation object.
0246  * @cia_decrypt: Decrypt a single block. This is a reverse counterpart to
0247  *       @cia_encrypt, and the conditions are exactly the same.
0248  *
0249  * All fields are mandatory and must be filled.
0250  */
0251 struct cipher_alg {
0252     unsigned int cia_min_keysize;
0253     unsigned int cia_max_keysize;
0254     int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
0255                       unsigned int keylen);
0256     void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
0257     void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
0258 };
0259 
0260 /**
0261  * struct compress_alg - compression/decompression algorithm
0262  * @coa_compress: Compress a buffer of specified length, storing the resulting
0263  *        data in the specified buffer. Return the length of the
0264  *        compressed data in dlen.
0265  * @coa_decompress: Decompress the source buffer, storing the uncompressed
0266  *          data in the specified buffer. The length of the data is
0267  *          returned in dlen.
0268  *
0269  * All fields are mandatory.
0270  */
0271 struct compress_alg {
0272     int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
0273                 unsigned int slen, u8 *dst, unsigned int *dlen);
0274     int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
0275                   unsigned int slen, u8 *dst, unsigned int *dlen);
0276 };
0277 
0278 #ifdef CONFIG_CRYPTO_STATS
0279 /*
0280  * struct crypto_istat_aead - statistics for AEAD algorithm
0281  * @encrypt_cnt:    number of encrypt requests
0282  * @encrypt_tlen:   total data size handled by encrypt requests
0283  * @decrypt_cnt:    number of decrypt requests
0284  * @decrypt_tlen:   total data size handled by decrypt requests
0285  * @err_cnt:        number of error for AEAD requests
0286  */
0287 struct crypto_istat_aead {
0288     atomic64_t encrypt_cnt;
0289     atomic64_t encrypt_tlen;
0290     atomic64_t decrypt_cnt;
0291     atomic64_t decrypt_tlen;
0292     atomic64_t err_cnt;
0293 };
0294 
0295 /*
0296  * struct crypto_istat_akcipher - statistics for akcipher algorithm
0297  * @encrypt_cnt:    number of encrypt requests
0298  * @encrypt_tlen:   total data size handled by encrypt requests
0299  * @decrypt_cnt:    number of decrypt requests
0300  * @decrypt_tlen:   total data size handled by decrypt requests
0301  * @verify_cnt:     number of verify operation
0302  * @sign_cnt:       number of sign requests
0303  * @err_cnt:        number of error for akcipher requests
0304  */
0305 struct crypto_istat_akcipher {
0306     atomic64_t encrypt_cnt;
0307     atomic64_t encrypt_tlen;
0308     atomic64_t decrypt_cnt;
0309     atomic64_t decrypt_tlen;
0310     atomic64_t verify_cnt;
0311     atomic64_t sign_cnt;
0312     atomic64_t err_cnt;
0313 };
0314 
0315 /*
0316  * struct crypto_istat_cipher - statistics for cipher algorithm
0317  * @encrypt_cnt:    number of encrypt requests
0318  * @encrypt_tlen:   total data size handled by encrypt requests
0319  * @decrypt_cnt:    number of decrypt requests
0320  * @decrypt_tlen:   total data size handled by decrypt requests
0321  * @err_cnt:        number of error for cipher requests
0322  */
0323 struct crypto_istat_cipher {
0324     atomic64_t encrypt_cnt;
0325     atomic64_t encrypt_tlen;
0326     atomic64_t decrypt_cnt;
0327     atomic64_t decrypt_tlen;
0328     atomic64_t err_cnt;
0329 };
0330 
0331 /*
0332  * struct crypto_istat_compress - statistics for compress algorithm
0333  * @compress_cnt:   number of compress requests
0334  * @compress_tlen:  total data size handled by compress requests
0335  * @decompress_cnt: number of decompress requests
0336  * @decompress_tlen:    total data size handled by decompress requests
0337  * @err_cnt:        number of error for compress requests
0338  */
0339 struct crypto_istat_compress {
0340     atomic64_t compress_cnt;
0341     atomic64_t compress_tlen;
0342     atomic64_t decompress_cnt;
0343     atomic64_t decompress_tlen;
0344     atomic64_t err_cnt;
0345 };
0346 
0347 /*
0348  * struct crypto_istat_hash - statistics for has algorithm
0349  * @hash_cnt:       number of hash requests
0350  * @hash_tlen:      total data size hashed
0351  * @err_cnt:        number of error for hash requests
0352  */
0353 struct crypto_istat_hash {
0354     atomic64_t hash_cnt;
0355     atomic64_t hash_tlen;
0356     atomic64_t err_cnt;
0357 };
0358 
0359 /*
0360  * struct crypto_istat_kpp - statistics for KPP algorithm
0361  * @setsecret_cnt:      number of setsecrey operation
0362  * @generate_public_key_cnt:    number of generate_public_key operation
0363  * @compute_shared_secret_cnt:  number of compute_shared_secret operation
0364  * @err_cnt:            number of error for KPP requests
0365  */
0366 struct crypto_istat_kpp {
0367     atomic64_t setsecret_cnt;
0368     atomic64_t generate_public_key_cnt;
0369     atomic64_t compute_shared_secret_cnt;
0370     atomic64_t err_cnt;
0371 };
0372 
0373 /*
0374  * struct crypto_istat_rng: statistics for RNG algorithm
0375  * @generate_cnt:   number of RNG generate requests
0376  * @generate_tlen:  total data size of generated data by the RNG
0377  * @seed_cnt:       number of times the RNG was seeded
0378  * @err_cnt:        number of error for RNG requests
0379  */
0380 struct crypto_istat_rng {
0381     atomic64_t generate_cnt;
0382     atomic64_t generate_tlen;
0383     atomic64_t seed_cnt;
0384     atomic64_t err_cnt;
0385 };
0386 #endif /* CONFIG_CRYPTO_STATS */
0387 
0388 #define cra_cipher  cra_u.cipher
0389 #define cra_compress    cra_u.compress
0390 
0391 /**
0392  * struct crypto_alg - definition of a cryptograpic cipher algorithm
0393  * @cra_flags: Flags describing this transformation. See include/linux/crypto.h
0394  *         CRYPTO_ALG_* flags for the flags which go in here. Those are
0395  *         used for fine-tuning the description of the transformation
0396  *         algorithm.
0397  * @cra_blocksize: Minimum block size of this transformation. The size in bytes
0398  *         of the smallest possible unit which can be transformed with
0399  *         this algorithm. The users must respect this value.
0400  *         In case of HASH transformation, it is possible for a smaller
0401  *         block than @cra_blocksize to be passed to the crypto API for
0402  *         transformation, in case of any other transformation type, an
0403  *         error will be returned upon any attempt to transform smaller
0404  *         than @cra_blocksize chunks.
0405  * @cra_ctxsize: Size of the operational context of the transformation. This
0406  *       value informs the kernel crypto API about the memory size
0407  *       needed to be allocated for the transformation context.
0408  * @cra_alignmask: Alignment mask for the input and output data buffer. The data
0409  *         buffer containing the input data for the algorithm must be
0410  *         aligned to this alignment mask. The data buffer for the
0411  *         output data must be aligned to this alignment mask. Note that
0412  *         the Crypto API will do the re-alignment in software, but
0413  *         only under special conditions and there is a performance hit.
0414  *         The re-alignment happens at these occasions for different
0415  *         @cra_u types: cipher -- For both input data and output data
0416  *         buffer; ahash -- For output hash destination buf; shash --
0417  *         For output hash destination buf.
0418  *         This is needed on hardware which is flawed by design and
0419  *         cannot pick data from arbitrary addresses.
0420  * @cra_priority: Priority of this transformation implementation. In case
0421  *        multiple transformations with same @cra_name are available to
0422  *        the Crypto API, the kernel will use the one with highest
0423  *        @cra_priority.
0424  * @cra_name: Generic name (usable by multiple implementations) of the
0425  *        transformation algorithm. This is the name of the transformation
0426  *        itself. This field is used by the kernel when looking up the
0427  *        providers of particular transformation.
0428  * @cra_driver_name: Unique name of the transformation provider. This is the
0429  *           name of the provider of the transformation. This can be any
0430  *           arbitrary value, but in the usual case, this contains the
0431  *           name of the chip or provider and the name of the
0432  *           transformation algorithm.
0433  * @cra_type: Type of the cryptographic transformation. This is a pointer to
0434  *        struct crypto_type, which implements callbacks common for all
0435  *        transformation types. There are multiple options, such as
0436  *        &crypto_skcipher_type, &crypto_ahash_type, &crypto_rng_type.
0437  *        This field might be empty. In that case, there are no common
0438  *        callbacks. This is the case for: cipher, compress, shash.
0439  * @cra_u: Callbacks implementing the transformation. This is a union of
0440  *     multiple structures. Depending on the type of transformation selected
0441  *     by @cra_type and @cra_flags above, the associated structure must be
0442  *     filled with callbacks. This field might be empty. This is the case
0443  *     for ahash, shash.
0444  * @cra_init: Initialize the cryptographic transformation object. This function
0445  *        is used to initialize the cryptographic transformation object.
0446  *        This function is called only once at the instantiation time, right
0447  *        after the transformation context was allocated. In case the
0448  *        cryptographic hardware has some special requirements which need to
0449  *        be handled by software, this function shall check for the precise
0450  *        requirement of the transformation and put any software fallbacks
0451  *        in place.
0452  * @cra_exit: Deinitialize the cryptographic transformation object. This is a
0453  *        counterpart to @cra_init, used to remove various changes set in
0454  *        @cra_init.
0455  * @cra_u.cipher: Union member which contains a single-block symmetric cipher
0456  *        definition. See @struct @cipher_alg.
0457  * @cra_u.compress: Union member which contains a (de)compression algorithm.
0458  *          See @struct @compress_alg.
0459  * @cra_module: Owner of this transformation implementation. Set to THIS_MODULE
0460  * @cra_list: internally used
0461  * @cra_users: internally used
0462  * @cra_refcnt: internally used
0463  * @cra_destroy: internally used
0464  *
0465  * @stats: union of all possible crypto_istat_xxx structures
0466  * @stats.aead:     statistics for AEAD algorithm
0467  * @stats.akcipher: statistics for akcipher algorithm
0468  * @stats.cipher:   statistics for cipher algorithm
0469  * @stats.compress: statistics for compress algorithm
0470  * @stats.hash:     statistics for hash algorithm
0471  * @stats.rng:      statistics for rng algorithm
0472  * @stats.kpp:      statistics for KPP algorithm
0473  *
0474  * The struct crypto_alg describes a generic Crypto API algorithm and is common
0475  * for all of the transformations. Any variable not documented here shall not
0476  * be used by a cipher implementation as it is internal to the Crypto API.
0477  */
0478 struct crypto_alg {
0479     struct list_head cra_list;
0480     struct list_head cra_users;
0481 
0482     u32 cra_flags;
0483     unsigned int cra_blocksize;
0484     unsigned int cra_ctxsize;
0485     unsigned int cra_alignmask;
0486 
0487     int cra_priority;
0488     refcount_t cra_refcnt;
0489 
0490     char cra_name[CRYPTO_MAX_ALG_NAME];
0491     char cra_driver_name[CRYPTO_MAX_ALG_NAME];
0492 
0493     const struct crypto_type *cra_type;
0494 
0495     union {
0496         struct cipher_alg cipher;
0497         struct compress_alg compress;
0498     } cra_u;
0499 
0500     int (*cra_init)(struct crypto_tfm *tfm);
0501     void (*cra_exit)(struct crypto_tfm *tfm);
0502     void (*cra_destroy)(struct crypto_alg *alg);
0503     
0504     struct module *cra_module;
0505 
0506 #ifdef CONFIG_CRYPTO_STATS
0507     union {
0508         struct crypto_istat_aead aead;
0509         struct crypto_istat_akcipher akcipher;
0510         struct crypto_istat_cipher cipher;
0511         struct crypto_istat_compress compress;
0512         struct crypto_istat_hash hash;
0513         struct crypto_istat_rng rng;
0514         struct crypto_istat_kpp kpp;
0515     } stats;
0516 #endif /* CONFIG_CRYPTO_STATS */
0517 
0518 } CRYPTO_MINALIGN_ATTR;
0519 
0520 #ifdef CONFIG_CRYPTO_STATS
0521 void crypto_stats_init(struct crypto_alg *alg);
0522 void crypto_stats_get(struct crypto_alg *alg);
0523 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
0524 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
0525 void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg);
0526 void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg);
0527 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
0528 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
0529 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg);
0530 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg);
0531 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg);
0532 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg);
0533 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret);
0534 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret);
0535 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret);
0536 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret);
0537 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret);
0538 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
0539 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
0540 #else
0541 static inline void crypto_stats_init(struct crypto_alg *alg)
0542 {}
0543 static inline void crypto_stats_get(struct crypto_alg *alg)
0544 {}
0545 static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
0546 {}
0547 static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
0548 {}
0549 static inline void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg)
0550 {}
0551 static inline void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg)
0552 {}
0553 static inline void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
0554 {}
0555 static inline void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
0556 {}
0557 static inline void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
0558 {}
0559 static inline void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
0560 {}
0561 static inline void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
0562 {}
0563 static inline void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
0564 {}
0565 static inline void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
0566 {}
0567 static inline void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
0568 {}
0569 static inline void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
0570 {}
0571 static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
0572 {}
0573 static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret)
0574 {}
0575 static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
0576 {}
0577 static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
0578 {}
0579 #endif
0580 /*
0581  * A helper struct for waiting for completion of async crypto ops
0582  */
0583 struct crypto_wait {
0584     struct completion completion;
0585     int err;
0586 };
0587 
0588 /*
0589  * Macro for declaring a crypto op async wait object on stack
0590  */
0591 #define DECLARE_CRYPTO_WAIT(_wait) \
0592     struct crypto_wait _wait = { \
0593         COMPLETION_INITIALIZER_ONSTACK((_wait).completion), 0 }
0594 
0595 /*
0596  * Async ops completion helper functioons
0597  */
0598 void crypto_req_done(struct crypto_async_request *req, int err);
0599 
0600 static inline int crypto_wait_req(int err, struct crypto_wait *wait)
0601 {
0602     switch (err) {
0603     case -EINPROGRESS:
0604     case -EBUSY:
0605         wait_for_completion(&wait->completion);
0606         reinit_completion(&wait->completion);
0607         err = wait->err;
0608         break;
0609     }
0610 
0611     return err;
0612 }
0613 
0614 static inline void crypto_init_wait(struct crypto_wait *wait)
0615 {
0616     init_completion(&wait->completion);
0617 }
0618 
0619 /*
0620  * Algorithm registration interface.
0621  */
0622 int crypto_register_alg(struct crypto_alg *alg);
0623 void crypto_unregister_alg(struct crypto_alg *alg);
0624 int crypto_register_algs(struct crypto_alg *algs, int count);
0625 void crypto_unregister_algs(struct crypto_alg *algs, int count);
0626 
0627 /*
0628  * Algorithm query interface.
0629  */
0630 int crypto_has_alg(const char *name, u32 type, u32 mask);
0631 
0632 /*
0633  * Transforms: user-instantiated objects which encapsulate algorithms
0634  * and core processing logic.  Managed via crypto_alloc_*() and
0635  * crypto_free_*(), as well as the various helpers below.
0636  */
0637 
0638 struct crypto_tfm {
0639 
0640     u32 crt_flags;
0641 
0642     int node;
0643     
0644     void (*exit)(struct crypto_tfm *tfm);
0645     
0646     struct crypto_alg *__crt_alg;
0647 
0648     void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
0649 };
0650 
0651 struct crypto_comp {
0652     struct crypto_tfm base;
0653 };
0654 
0655 /* 
0656  * Transform user interface.
0657  */
0658  
0659 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
0660 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
0661 
0662 static inline void crypto_free_tfm(struct crypto_tfm *tfm)
0663 {
0664     return crypto_destroy_tfm(tfm, tfm);
0665 }
0666 
0667 int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
0668 
0669 /*
0670  * Transform helpers which query the underlying algorithm.
0671  */
0672 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
0673 {
0674     return tfm->__crt_alg->cra_name;
0675 }
0676 
0677 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
0678 {
0679     return tfm->__crt_alg->cra_driver_name;
0680 }
0681 
0682 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
0683 {
0684     return tfm->__crt_alg->cra_priority;
0685 }
0686 
0687 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
0688 {
0689     return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
0690 }
0691 
0692 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
0693 {
0694     return tfm->__crt_alg->cra_blocksize;
0695 }
0696 
0697 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
0698 {
0699     return tfm->__crt_alg->cra_alignmask;
0700 }
0701 
0702 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
0703 {
0704     return tfm->crt_flags;
0705 }
0706 
0707 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
0708 {
0709     tfm->crt_flags |= flags;
0710 }
0711 
0712 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
0713 {
0714     tfm->crt_flags &= ~flags;
0715 }
0716 
0717 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
0718 {
0719     return tfm->__crt_ctx;
0720 }
0721 
0722 static inline unsigned int crypto_tfm_ctx_alignment(void)
0723 {
0724     struct crypto_tfm *tfm;
0725     return __alignof__(tfm->__crt_ctx);
0726 }
0727 
0728 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
0729 {
0730     return (struct crypto_comp *)tfm;
0731 }
0732 
0733 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
0734                             u32 type, u32 mask)
0735 {
0736     type &= ~CRYPTO_ALG_TYPE_MASK;
0737     type |= CRYPTO_ALG_TYPE_COMPRESS;
0738     mask |= CRYPTO_ALG_TYPE_MASK;
0739 
0740     return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
0741 }
0742 
0743 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
0744 {
0745     return &tfm->base;
0746 }
0747 
0748 static inline void crypto_free_comp(struct crypto_comp *tfm)
0749 {
0750     crypto_free_tfm(crypto_comp_tfm(tfm));
0751 }
0752 
0753 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
0754 {
0755     type &= ~CRYPTO_ALG_TYPE_MASK;
0756     type |= CRYPTO_ALG_TYPE_COMPRESS;
0757     mask |= CRYPTO_ALG_TYPE_MASK;
0758 
0759     return crypto_has_alg(alg_name, type, mask);
0760 }
0761 
0762 static inline const char *crypto_comp_name(struct crypto_comp *tfm)
0763 {
0764     return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
0765 }
0766 
0767 int crypto_comp_compress(struct crypto_comp *tfm,
0768              const u8 *src, unsigned int slen,
0769              u8 *dst, unsigned int *dlen);
0770 
0771 int crypto_comp_decompress(struct crypto_comp *tfm,
0772                const u8 *src, unsigned int slen,
0773                u8 *dst, unsigned int *dlen);
0774 
0775 #endif  /* _LINUX_CRYPTO_H */
0776