xref: /linux/include/crypto/internal/skcipher.h (revision ab93e0dd72c37d378dd936f031ffb83ff2bd87ce)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Symmetric key ciphers.
4  *
5  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
9 #define _CRYPTO_INTERNAL_SKCIPHER_H
10 
11 #include <crypto/algapi.h>
12 #include <crypto/internal/cipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <crypto/skcipher.h>
15 #include <linux/types.h>
16 
17 /*
18  * Set this if your algorithm is sync but needs a reqsize larger
19  * than MAX_SYNC_SKCIPHER_REQSIZE.
20  *
21  * Reuse bit that is specific to hash algorithms.
22  */
23 #define CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE CRYPTO_ALG_OPTIONAL_KEY
24 
25 struct aead_request;
26 struct rtattr;
27 
28 struct skcipher_instance {
29 	void (*free)(struct skcipher_instance *inst);
30 	union {
31 		struct {
32 			char head[offsetof(struct skcipher_alg, base)];
33 			struct crypto_instance base;
34 		} s;
35 		struct skcipher_alg alg;
36 	};
37 };
38 
39 struct lskcipher_instance {
40 	void (*free)(struct lskcipher_instance *inst);
41 	union {
42 		struct {
43 			char head[offsetof(struct lskcipher_alg, co.base)];
44 			struct crypto_instance base;
45 		} s;
46 		struct lskcipher_alg alg;
47 	};
48 };
49 
50 struct crypto_skcipher_spawn {
51 	struct crypto_spawn base;
52 };
53 
54 struct crypto_lskcipher_spawn {
55 	struct crypto_spawn base;
56 };
57 
skcipher_crypto_instance(struct skcipher_instance * inst)58 static inline struct crypto_instance *skcipher_crypto_instance(
59 	struct skcipher_instance *inst)
60 {
61 	return &inst->s.base;
62 }
63 
lskcipher_crypto_instance(struct lskcipher_instance * inst)64 static inline struct crypto_instance *lskcipher_crypto_instance(
65 	struct lskcipher_instance *inst)
66 {
67 	return &inst->s.base;
68 }
69 
skcipher_alg_instance(struct crypto_skcipher * skcipher)70 static inline struct skcipher_instance *skcipher_alg_instance(
71 	struct crypto_skcipher *skcipher)
72 {
73 	return container_of(crypto_skcipher_alg(skcipher),
74 			    struct skcipher_instance, alg);
75 }
76 
lskcipher_alg_instance(struct crypto_lskcipher * lskcipher)77 static inline struct lskcipher_instance *lskcipher_alg_instance(
78 	struct crypto_lskcipher *lskcipher)
79 {
80 	return container_of(crypto_lskcipher_alg(lskcipher),
81 			    struct lskcipher_instance, alg);
82 }
83 
skcipher_instance_ctx(struct skcipher_instance * inst)84 static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
85 {
86 	return crypto_instance_ctx(skcipher_crypto_instance(inst));
87 }
88 
lskcipher_instance_ctx(struct lskcipher_instance * inst)89 static inline void *lskcipher_instance_ctx(struct lskcipher_instance *inst)
90 {
91 	return crypto_instance_ctx(lskcipher_crypto_instance(inst));
92 }
93 
skcipher_request_complete(struct skcipher_request * req,int err)94 static inline void skcipher_request_complete(struct skcipher_request *req, int err)
95 {
96 	crypto_request_complete(&req->base, err);
97 }
98 
99 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
100 			 struct crypto_instance *inst,
101 			 const char *name, u32 type, u32 mask);
102 
103 int crypto_grab_lskcipher(struct crypto_lskcipher_spawn *spawn,
104 			  struct crypto_instance *inst,
105 			  const char *name, u32 type, u32 mask);
106 
crypto_drop_skcipher(struct crypto_skcipher_spawn * spawn)107 static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
108 {
109 	crypto_drop_spawn(&spawn->base);
110 }
111 
crypto_drop_lskcipher(struct crypto_lskcipher_spawn * spawn)112 static inline void crypto_drop_lskcipher(struct crypto_lskcipher_spawn *spawn)
113 {
114 	crypto_drop_spawn(&spawn->base);
115 }
116 
crypto_lskcipher_spawn_alg(struct crypto_lskcipher_spawn * spawn)117 static inline struct lskcipher_alg *crypto_lskcipher_spawn_alg(
118 	struct crypto_lskcipher_spawn *spawn)
119 {
120 	return container_of(spawn->base.alg, struct lskcipher_alg, co.base);
121 }
122 
crypto_spawn_skcipher_alg_common(struct crypto_skcipher_spawn * spawn)123 static inline struct skcipher_alg_common *crypto_spawn_skcipher_alg_common(
124 	struct crypto_skcipher_spawn *spawn)
125 {
126 	return container_of(spawn->base.alg, struct skcipher_alg_common, base);
127 }
128 
crypto_spawn_lskcipher_alg(struct crypto_lskcipher_spawn * spawn)129 static inline struct lskcipher_alg *crypto_spawn_lskcipher_alg(
130 	struct crypto_lskcipher_spawn *spawn)
131 {
132 	return crypto_lskcipher_spawn_alg(spawn);
133 }
134 
crypto_spawn_skcipher(struct crypto_skcipher_spawn * spawn)135 static inline struct crypto_skcipher *crypto_spawn_skcipher(
136 	struct crypto_skcipher_spawn *spawn)
137 {
138 	return crypto_spawn_tfm2(&spawn->base);
139 }
140 
crypto_spawn_lskcipher(struct crypto_lskcipher_spawn * spawn)141 static inline struct crypto_lskcipher *crypto_spawn_lskcipher(
142 	struct crypto_lskcipher_spawn *spawn)
143 {
144 	return crypto_spawn_tfm2(&spawn->base);
145 }
146 
crypto_skcipher_set_reqsize(struct crypto_skcipher * skcipher,unsigned int reqsize)147 static inline void crypto_skcipher_set_reqsize(
148 	struct crypto_skcipher *skcipher, unsigned int reqsize)
149 {
150 	skcipher->reqsize = reqsize;
151 }
152 
crypto_skcipher_set_reqsize_dma(struct crypto_skcipher * skcipher,unsigned int reqsize)153 static inline void crypto_skcipher_set_reqsize_dma(
154 	struct crypto_skcipher *skcipher, unsigned int reqsize)
155 {
156 	reqsize += crypto_dma_align() & ~(crypto_tfm_ctx_alignment() - 1);
157 	skcipher->reqsize = reqsize;
158 }
159 
160 int crypto_register_skcipher(struct skcipher_alg *alg);
161 void crypto_unregister_skcipher(struct skcipher_alg *alg);
162 int crypto_register_skciphers(struct skcipher_alg *algs, int count);
163 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
164 int skcipher_register_instance(struct crypto_template *tmpl,
165 			       struct skcipher_instance *inst);
166 
167 int crypto_register_lskcipher(struct lskcipher_alg *alg);
168 void crypto_unregister_lskcipher(struct lskcipher_alg *alg);
169 int crypto_register_lskciphers(struct lskcipher_alg *algs, int count);
170 void crypto_unregister_lskciphers(struct lskcipher_alg *algs, int count);
171 int lskcipher_register_instance(struct crypto_template *tmpl,
172 				struct lskcipher_instance *inst);
173 
174 int skcipher_walk_virt(struct skcipher_walk *__restrict walk,
175 		       struct skcipher_request *__restrict req,
176 		       bool atomic);
177 int skcipher_walk_aead_encrypt(struct skcipher_walk *__restrict walk,
178 			       struct aead_request *__restrict req,
179 			       bool atomic);
180 int skcipher_walk_aead_decrypt(struct skcipher_walk *__restrict walk,
181 			       struct aead_request *__restrict req,
182 			       bool atomic);
183 
crypto_skcipher_ctx(struct crypto_skcipher * tfm)184 static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
185 {
186 	return crypto_tfm_ctx(&tfm->base);
187 }
188 
crypto_lskcipher_ctx(struct crypto_lskcipher * tfm)189 static inline void *crypto_lskcipher_ctx(struct crypto_lskcipher *tfm)
190 {
191 	return crypto_tfm_ctx(&tfm->base);
192 }
193 
crypto_skcipher_ctx_dma(struct crypto_skcipher * tfm)194 static inline void *crypto_skcipher_ctx_dma(struct crypto_skcipher *tfm)
195 {
196 	return crypto_tfm_ctx_dma(&tfm->base);
197 }
198 
skcipher_request_ctx(struct skcipher_request * req)199 static inline void *skcipher_request_ctx(struct skcipher_request *req)
200 {
201 	return req->__ctx;
202 }
203 
skcipher_request_ctx_dma(struct skcipher_request * req)204 static inline void *skcipher_request_ctx_dma(struct skcipher_request *req)
205 {
206 	unsigned int align = crypto_dma_align();
207 
208 	if (align <= crypto_tfm_ctx_alignment())
209 		align = 1;
210 
211 	return PTR_ALIGN(skcipher_request_ctx(req), align);
212 }
213 
skcipher_request_flags(struct skcipher_request * req)214 static inline u32 skcipher_request_flags(struct skcipher_request *req)
215 {
216 	return req->base.flags;
217 }
218 
219 /* Helpers for simple block cipher modes of operation */
220 struct skcipher_ctx_simple {
221 	struct crypto_cipher *cipher;	/* underlying block cipher */
222 };
223 static inline struct crypto_cipher *
skcipher_cipher_simple(struct crypto_skcipher * tfm)224 skcipher_cipher_simple(struct crypto_skcipher *tfm)
225 {
226 	struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
227 
228 	return ctx->cipher;
229 }
230 
231 struct skcipher_instance *skcipher_alloc_instance_simple(
232 	struct crypto_template *tmpl, struct rtattr **tb);
233 
skcipher_ialg_simple(struct skcipher_instance * inst)234 static inline struct crypto_alg *skcipher_ialg_simple(
235 	struct skcipher_instance *inst)
236 {
237 	struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
238 
239 	return crypto_spawn_cipher_alg(spawn);
240 }
241 
lskcipher_cipher_simple(struct crypto_lskcipher * tfm)242 static inline struct crypto_lskcipher *lskcipher_cipher_simple(
243 	struct crypto_lskcipher *tfm)
244 {
245 	struct crypto_lskcipher **ctx = crypto_lskcipher_ctx(tfm);
246 
247 	return *ctx;
248 }
249 
250 struct lskcipher_instance *lskcipher_alloc_instance_simple(
251 	struct crypto_template *tmpl, struct rtattr **tb);
252 
lskcipher_ialg_simple(struct lskcipher_instance * inst)253 static inline struct lskcipher_alg *lskcipher_ialg_simple(
254 	struct lskcipher_instance *inst)
255 {
256 	struct crypto_lskcipher_spawn *spawn = lskcipher_instance_ctx(inst);
257 
258 	return crypto_lskcipher_spawn_alg(spawn);
259 }
260 
261 #endif	/* _CRYPTO_INTERNAL_SKCIPHER_H */
262 
263