Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0-or-later */
0002 /*
0003  * Crypto engine API
0004  *
0005  * Copyright (c) 2016 Baolin Wang <baolin.wang@linaro.org>
0006  */
0007 #ifndef _CRYPTO_ENGINE_H
0008 #define _CRYPTO_ENGINE_H
0009 
0010 #include <linux/crypto.h>
0011 #include <linux/list.h>
0012 #include <linux/kthread.h>
0013 #include <linux/spinlock.h>
0014 #include <linux/types.h>
0015 
0016 #include <crypto/algapi.h>
0017 #include <crypto/aead.h>
0018 #include <crypto/akcipher.h>
0019 #include <crypto/hash.h>
0020 #include <crypto/skcipher.h>
0021 #include <crypto/kpp.h>
0022 
0023 struct device;
0024 
0025 #define ENGINE_NAME_LEN 30
0026 /*
0027  * struct crypto_engine - crypto hardware engine
0028  * @name: the engine name
0029  * @idling: the engine is entering idle state
0030  * @busy: request pump is busy
0031  * @running: the engine is on working
0032  * @retry_support: indication that the hardware allows re-execution
0033  * of a failed backlog request
0034  * crypto-engine, in head position to keep order
0035  * @list: link with the global crypto engine list
0036  * @queue_lock: spinlock to synchronise access to request queue
0037  * @queue: the crypto queue of the engine
0038  * @rt: whether this queue is set to run as a realtime task
0039  * @prepare_crypt_hardware: a request will soon arrive from the queue
0040  * so the subsystem requests the driver to prepare the hardware
0041  * by issuing this call
0042  * @unprepare_crypt_hardware: there are currently no more requests on the
0043  * queue so the subsystem notifies the driver that it may relax the
0044  * hardware by issuing this call
0045  * @do_batch_requests: execute a batch of requests. Depends on multiple
0046  * requests support.
0047  * @kworker: kthread worker struct for request pump
0048  * @pump_requests: work struct for scheduling work to the request pump
0049  * @priv_data: the engine private data
0050  * @cur_req: the current request which is on processing
0051  */
0052 struct crypto_engine {
0053     char            name[ENGINE_NAME_LEN];
0054     bool            idling;
0055     bool            busy;
0056     bool            running;
0057 
0058     bool            retry_support;
0059 
0060     struct list_head    list;
0061     spinlock_t      queue_lock;
0062     struct crypto_queue queue;
0063     struct device       *dev;
0064 
0065     bool            rt;
0066 
0067     int (*prepare_crypt_hardware)(struct crypto_engine *engine);
0068     int (*unprepare_crypt_hardware)(struct crypto_engine *engine);
0069     int (*do_batch_requests)(struct crypto_engine *engine);
0070 
0071 
0072     struct kthread_worker           *kworker;
0073     struct kthread_work             pump_requests;
0074 
0075     void                *priv_data;
0076     struct crypto_async_request *cur_req;
0077 };
0078 
0079 /*
0080  * struct crypto_engine_op - crypto hardware engine operations
0081  * @prepare__request: do some prepare if need before handle the current request
0082  * @unprepare_request: undo any work done by prepare_request()
0083  * @do_one_request: do encryption for current request
0084  */
0085 struct crypto_engine_op {
0086     int (*prepare_request)(struct crypto_engine *engine,
0087                    void *areq);
0088     int (*unprepare_request)(struct crypto_engine *engine,
0089                  void *areq);
0090     int (*do_one_request)(struct crypto_engine *engine,
0091                   void *areq);
0092 };
0093 
0094 struct crypto_engine_ctx {
0095     struct crypto_engine_op op;
0096 };
0097 
0098 int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine,
0099                        struct aead_request *req);
0100 int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine,
0101                            struct akcipher_request *req);
0102 int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine,
0103                            struct ahash_request *req);
0104 int crypto_transfer_kpp_request_to_engine(struct crypto_engine *engine,
0105                       struct kpp_request *req);
0106 int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine,
0107                            struct skcipher_request *req);
0108 void crypto_finalize_aead_request(struct crypto_engine *engine,
0109                   struct aead_request *req, int err);
0110 void crypto_finalize_akcipher_request(struct crypto_engine *engine,
0111                       struct akcipher_request *req, int err);
0112 void crypto_finalize_hash_request(struct crypto_engine *engine,
0113                   struct ahash_request *req, int err);
0114 void crypto_finalize_kpp_request(struct crypto_engine *engine,
0115                  struct kpp_request *req, int err);
0116 void crypto_finalize_skcipher_request(struct crypto_engine *engine,
0117                       struct skcipher_request *req, int err);
0118 int crypto_engine_start(struct crypto_engine *engine);
0119 int crypto_engine_stop(struct crypto_engine *engine);
0120 struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt);
0121 struct crypto_engine *crypto_engine_alloc_init_and_set(struct device *dev,
0122                                bool retry_support,
0123                                int (*cbk_do_batch)(struct crypto_engine *engine),
0124                                bool rt, int qlen);
0125 int crypto_engine_exit(struct crypto_engine *engine);
0126 
0127 #endif /* _CRYPTO_ENGINE_H */