1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3 * Symmetric key ciphers.
4 *
5 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8 #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
9 #define _CRYPTO_INTERNAL_SKCIPHER_H
10
11 #include <crypto/algapi.h>
12 #include <crypto/internal/cipher.h>
13 #include <crypto/skcipher.h>
14 #include <linux/list.h>
15 #include <linux/types.h>
16
17 struct aead_request;
18 struct rtattr;
19
20 struct skcipher_instance {
21 void (*free)(struct skcipher_instance *inst);
22 union {
23 struct {
24 char head[offsetof(struct skcipher_alg, base)];
25 struct crypto_instance base;
26 } s;
27 struct skcipher_alg alg;
28 };
29 };
30
31 struct crypto_skcipher_spawn {
32 struct crypto_spawn base;
33 };
34
35 struct skcipher_walk {
36 union {
37 struct {
38 struct page *page;
39 unsigned long offset;
40 } phys;
41
42 struct {
43 u8 *page;
44 void *addr;
45 } virt;
46 } src, dst;
47
48 struct scatter_walk in;
49 unsigned int nbytes;
50
51 struct scatter_walk out;
52 unsigned int total;
53
54 struct list_head buffers;
55
56 u8 *page;
57 u8 *buffer;
58 u8 *oiv;
59 void *iv;
60
61 unsigned int ivsize;
62
63 int flags;
64 unsigned int blocksize;
65 unsigned int stride;
66 unsigned int alignmask;
67 };
68
skcipher_crypto_instance(struct skcipher_instance * inst)69 static inline struct crypto_instance *skcipher_crypto_instance(
70 struct skcipher_instance *inst)
71 {
72 return &inst->s.base;
73 }
74
skcipher_alg_instance(struct crypto_skcipher * skcipher)75 static inline struct skcipher_instance *skcipher_alg_instance(
76 struct crypto_skcipher *skcipher)
77 {
78 return container_of(crypto_skcipher_alg(skcipher),
79 struct skcipher_instance, alg);
80 }
81
skcipher_instance_ctx(struct skcipher_instance * inst)82 static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
83 {
84 return crypto_instance_ctx(skcipher_crypto_instance(inst));
85 }
86
skcipher_request_complete(struct skcipher_request * req,int err)87 static inline void skcipher_request_complete(struct skcipher_request *req, int err)
88 {
89 req->base.complete(&req->base, err);
90 }
91
92 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
93 struct crypto_instance *inst,
94 const char *name, u32 type, u32 mask);
95
crypto_drop_skcipher(struct crypto_skcipher_spawn * spawn)96 static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
97 {
98 crypto_drop_spawn(&spawn->base);
99 }
100
crypto_skcipher_spawn_alg(struct crypto_skcipher_spawn * spawn)101 static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
102 struct crypto_skcipher_spawn *spawn)
103 {
104 return container_of(spawn->base.alg, struct skcipher_alg, base);
105 }
106
crypto_spawn_skcipher_alg(struct crypto_skcipher_spawn * spawn)107 static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
108 struct crypto_skcipher_spawn *spawn)
109 {
110 return crypto_skcipher_spawn_alg(spawn);
111 }
112
crypto_spawn_skcipher(struct crypto_skcipher_spawn * spawn)113 static inline struct crypto_skcipher *crypto_spawn_skcipher(
114 struct crypto_skcipher_spawn *spawn)
115 {
116 return crypto_spawn_tfm2(&spawn->base);
117 }
118
crypto_skcipher_set_reqsize(struct crypto_skcipher * skcipher,unsigned int reqsize)119 static inline void crypto_skcipher_set_reqsize(
120 struct crypto_skcipher *skcipher, unsigned int reqsize)
121 {
122 skcipher->reqsize = reqsize;
123 }
124
125 int crypto_register_skcipher(struct skcipher_alg *alg);
126 void crypto_unregister_skcipher(struct skcipher_alg *alg);
127 int crypto_register_skciphers(struct skcipher_alg *algs, int count);
128 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
129 int skcipher_register_instance(struct crypto_template *tmpl,
130 struct skcipher_instance *inst);
131
132 int skcipher_walk_done(struct skcipher_walk *walk, int err);
133 int skcipher_walk_virt(struct skcipher_walk *walk,
134 struct skcipher_request *req,
135 bool atomic);
136 int skcipher_walk_async(struct skcipher_walk *walk,
137 struct skcipher_request *req);
138 int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
139 struct aead_request *req, bool atomic);
140 int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
141 struct aead_request *req, bool atomic);
142 void skcipher_walk_complete(struct skcipher_walk *walk, int err);
143
skcipher_walk_abort(struct skcipher_walk * walk)144 static inline void skcipher_walk_abort(struct skcipher_walk *walk)
145 {
146 skcipher_walk_done(walk, -ECANCELED);
147 }
148
crypto_skcipher_ctx(struct crypto_skcipher * tfm)149 static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
150 {
151 return crypto_tfm_ctx(&tfm->base);
152 }
153
skcipher_request_ctx(struct skcipher_request * req)154 static inline void *skcipher_request_ctx(struct skcipher_request *req)
155 {
156 return req->__ctx;
157 }
158
skcipher_request_flags(struct skcipher_request * req)159 static inline u32 skcipher_request_flags(struct skcipher_request *req)
160 {
161 return req->base.flags;
162 }
163
crypto_skcipher_alg_min_keysize(struct skcipher_alg * alg)164 static inline unsigned int crypto_skcipher_alg_min_keysize(
165 struct skcipher_alg *alg)
166 {
167 return alg->min_keysize;
168 }
169
crypto_skcipher_alg_max_keysize(struct skcipher_alg * alg)170 static inline unsigned int crypto_skcipher_alg_max_keysize(
171 struct skcipher_alg *alg)
172 {
173 return alg->max_keysize;
174 }
175
crypto_skcipher_alg_walksize(struct skcipher_alg * alg)176 static inline unsigned int crypto_skcipher_alg_walksize(
177 struct skcipher_alg *alg)
178 {
179 return alg->walksize;
180 }
181
182 /**
183 * crypto_skcipher_walksize() - obtain walk size
184 * @tfm: cipher handle
185 *
186 * In some cases, algorithms can only perform optimally when operating on
187 * multiple blocks in parallel. This is reflected by the walksize, which
188 * must be a multiple of the chunksize (or equal if the concern does not
189 * apply)
190 *
191 * Return: walk size in bytes
192 */
crypto_skcipher_walksize(struct crypto_skcipher * tfm)193 static inline unsigned int crypto_skcipher_walksize(
194 struct crypto_skcipher *tfm)
195 {
196 return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
197 }
198
199 /* Helpers for simple block cipher modes of operation */
200 struct skcipher_ctx_simple {
201 struct crypto_cipher *cipher; /* underlying block cipher */
202 };
203 static inline struct crypto_cipher *
skcipher_cipher_simple(struct crypto_skcipher * tfm)204 skcipher_cipher_simple(struct crypto_skcipher *tfm)
205 {
206 struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
207
208 return ctx->cipher;
209 }
210
211 struct skcipher_instance *skcipher_alloc_instance_simple(
212 struct crypto_template *tmpl, struct rtattr **tb);
213
skcipher_ialg_simple(struct skcipher_instance * inst)214 static inline struct crypto_alg *skcipher_ialg_simple(
215 struct skcipher_instance *inst)
216 {
217 struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
218
219 return crypto_spawn_cipher_alg(spawn);
220 }
221
222 #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */
223
224