1 /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 /* 3 * Crypto engine API 4 * 5 * Copyright (c) 2016 Baolin Wang <baolin.wang@linaro.org> 6 */ 7 #ifndef _CRYPTO_ENGINE_H 8 #define _CRYPTO_ENGINE_H 9 10 #include <linux/crypto.h> 11 #include <linux/list.h> 12 #include <linux/kernel.h> 13 #include <linux/kthread.h> 14 #include <crypto/algapi.h> 15 #include <crypto/aead.h> 16 #include <crypto/akcipher.h> 17 #include <crypto/hash.h> 18 #include <crypto/skcipher.h> 19 #include <crypto/kpp.h> 20 21 #define ENGINE_NAME_LEN 30 22 /* 23 * struct crypto_engine - crypto hardware engine 24 * @name: the engine name 25 * @idling: the engine is entering idle state 26 * @busy: request pump is busy 27 * @running: the engine is on working 28 * @retry_support: indication that the hardware allows re-execution 29 * of a failed backlog request 30 * crypto-engine, in head position to keep order 31 * @list: link with the global crypto engine list 32 * @queue_lock: spinlock to synchronise access to request queue 33 * @queue: the crypto queue of the engine 34 * @rt: whether this queue is set to run as a realtime task 35 * @prepare_crypt_hardware: a request will soon arrive from the queue 36 * so the subsystem requests the driver to prepare the hardware 37 * by issuing this call 38 * @unprepare_crypt_hardware: there are currently no more requests on the 39 * queue so the subsystem notifies the driver that it may relax the 40 * hardware by issuing this call 41 * @do_batch_requests: execute a batch of requests. Depends on multiple 42 * requests support. 43 * @kworker: kthread worker struct for request pump 44 * @pump_requests: work struct for scheduling work to the request pump 45 * @priv_data: the engine private data 46 * @cur_req: the current request which is on processing 47 */ 48 struct crypto_engine { 49 char name[ENGINE_NAME_LEN]; 50 bool idling; 51 bool busy; 52 bool running; 53 54 bool retry_support; 55 56 struct list_head list; 57 spinlock_t queue_lock; 58 struct crypto_queue queue; 59 struct device *dev; 60 61 bool rt; 62 63 int (*prepare_crypt_hardware)(struct crypto_engine *engine); 64 int (*unprepare_crypt_hardware)(struct crypto_engine *engine); 65 int (*do_batch_requests)(struct crypto_engine *engine); 66 67 68 struct kthread_worker *kworker; 69 struct kthread_work pump_requests; 70 71 void *priv_data; 72 struct crypto_async_request *cur_req; 73 }; 74 75 /* 76 * struct crypto_engine_op - crypto hardware engine operations 77 * @prepare__request: do some prepare if need before handle the current request 78 * @unprepare_request: undo any work done by prepare_request() 79 * @do_one_request: do encryption for current request 80 */ 81 struct crypto_engine_op { 82 int (*prepare_request)(struct crypto_engine *engine, 83 void *areq); 84 int (*unprepare_request)(struct crypto_engine *engine, 85 void *areq); 86 int (*do_one_request)(struct crypto_engine *engine, 87 void *areq); 88 }; 89 90 struct crypto_engine_ctx { 91 struct crypto_engine_op op; 92 }; 93 94 int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine, 95 struct aead_request *req); 96 int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine, 97 struct akcipher_request *req); 98 int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine, 99 struct ahash_request *req); 100 int crypto_transfer_kpp_request_to_engine(struct crypto_engine *engine, 101 struct kpp_request *req); 102 int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine, 103 struct skcipher_request *req); 104 void crypto_finalize_aead_request(struct crypto_engine *engine, 105 struct aead_request *req, int err); 106 void crypto_finalize_akcipher_request(struct crypto_engine *engine, 107 struct akcipher_request *req, int err); 108 void crypto_finalize_hash_request(struct crypto_engine *engine, 109 struct ahash_request *req, int err); 110 void crypto_finalize_kpp_request(struct crypto_engine *engine, 111 struct kpp_request *req, int err); 112 void crypto_finalize_skcipher_request(struct crypto_engine *engine, 113 struct skcipher_request *req, int err); 114 int crypto_engine_start(struct crypto_engine *engine); 115 int crypto_engine_stop(struct crypto_engine *engine); 116 struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt); 117 struct crypto_engine *crypto_engine_alloc_init_and_set(struct device *dev, 118 bool retry_support, 119 int (*cbk_do_batch)(struct crypto_engine *engine), 120 bool rt, int qlen); 121 int crypto_engine_exit(struct crypto_engine *engine); 122 123 #endif /* _CRYPTO_ENGINE_H */ 124