1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Cryptographic API for algorithms (i.e., low-level API).
4  *
5  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 #ifndef _CRYPTO_ALGAPI_H
8 #define _CRYPTO_ALGAPI_H
9 
10 #include <crypto/utils.h>
11 #include <linux/align.h>
12 #include <linux/cache.h>
13 #include <linux/crypto.h>
14 #include <linux/list.h>
15 #include <linux/types.h>
16 #include <linux/workqueue.h>
17 
18 /*
19  * Maximum values for blocksize and alignmask, used to allocate
20  * static buffers that are big enough for any combination of
21  * algs and architectures. Ciphers have a lower maximum size.
22  */
23 #define MAX_ALGAPI_BLOCKSIZE		160
24 #define MAX_ALGAPI_ALIGNMASK		127
25 #define MAX_CIPHER_BLOCKSIZE		16
26 #define MAX_CIPHER_ALIGNMASK		15
27 
28 #ifdef ARCH_DMA_MINALIGN
29 #define CRYPTO_DMA_ALIGN ARCH_DMA_MINALIGN
30 #else
31 #define CRYPTO_DMA_ALIGN CRYPTO_MINALIGN
32 #endif
33 
34 #define CRYPTO_DMA_PADDING ((CRYPTO_DMA_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
35 
36 /*
37  * Autoloaded crypto modules should only use a prefixed name to avoid allowing
38  * arbitrary modules to be loaded. Loading from userspace may still need the
39  * unprefixed names, so retains those aliases as well.
40  * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
41  * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
42  * expands twice on the same line. Instead, use a separate base name for the
43  * alias.
44  */
45 #define MODULE_ALIAS_CRYPTO(name)	\
46 		__MODULE_INFO(alias, alias_userspace, name);	\
47 		__MODULE_INFO(alias, alias_crypto, "crypto-" name)
48 
49 struct crypto_aead;
50 struct crypto_instance;
51 struct module;
52 struct notifier_block;
53 struct rtattr;
54 struct scatterlist;
55 struct seq_file;
56 struct sk_buff;
57 union crypto_no_such_thing;
58 
59 struct crypto_instance {
60 	struct crypto_alg alg;
61 
62 	struct crypto_template *tmpl;
63 
64 	union {
65 		/* Node in list of instances after registration. */
66 		struct hlist_node list;
67 		/* List of attached spawns before registration. */
68 		struct crypto_spawn *spawns;
69 	};
70 
71 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
72 };
73 
74 struct crypto_template {
75 	struct list_head list;
76 	struct hlist_head instances;
77 	struct hlist_head dead;
78 	struct module *module;
79 
80 	struct work_struct free_work;
81 
82 	int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
83 
84 	char name[CRYPTO_MAX_ALG_NAME];
85 };
86 
87 struct crypto_spawn {
88 	struct list_head list;
89 	struct crypto_alg *alg;
90 	union {
91 		/* Back pointer to instance after registration.*/
92 		struct crypto_instance *inst;
93 		/* Spawn list pointer prior to registration. */
94 		struct crypto_spawn *next;
95 	};
96 	const struct crypto_type *frontend;
97 	u32 mask;
98 	bool dead;
99 	bool registered;
100 };
101 
102 struct crypto_queue {
103 	struct list_head list;
104 	struct list_head *backlog;
105 
106 	unsigned int qlen;
107 	unsigned int max_qlen;
108 };
109 
110 struct crypto_attr_alg {
111 	char name[CRYPTO_MAX_ALG_NAME];
112 };
113 
114 struct crypto_attr_type {
115 	u32 type;
116 	u32 mask;
117 };
118 
119 /*
120  * Algorithm registration interface.
121  */
122 int crypto_register_alg(struct crypto_alg *alg);
123 void crypto_unregister_alg(struct crypto_alg *alg);
124 int crypto_register_algs(struct crypto_alg *algs, int count);
125 void crypto_unregister_algs(struct crypto_alg *algs, int count);
126 
127 void crypto_mod_put(struct crypto_alg *alg);
128 
129 int crypto_register_template(struct crypto_template *tmpl);
130 int crypto_register_templates(struct crypto_template *tmpls, int count);
131 void crypto_unregister_template(struct crypto_template *tmpl);
132 void crypto_unregister_templates(struct crypto_template *tmpls, int count);
133 struct crypto_template *crypto_lookup_template(const char *name);
134 
135 int crypto_register_instance(struct crypto_template *tmpl,
136 			     struct crypto_instance *inst);
137 void crypto_unregister_instance(struct crypto_instance *inst);
138 
139 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
140 		      const char *name, u32 type, u32 mask);
141 void crypto_drop_spawn(struct crypto_spawn *spawn);
142 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
143 				    u32 mask);
144 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
145 
146 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
147 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
148 const char *crypto_attr_alg_name(struct rtattr *rta);
149 int __crypto_inst_setname(struct crypto_instance *inst, const char *name,
150 			  const char *driver, struct crypto_alg *alg);
151 
152 #define crypto_inst_setname(inst, name, ...) \
153 	CONCATENATE(crypto_inst_setname_, COUNT_ARGS(__VA_ARGS__))( \
154 		inst, name, ##__VA_ARGS__)
155 #define crypto_inst_setname_1(inst, name, alg) \
156 	__crypto_inst_setname(inst, name, name, alg)
157 #define crypto_inst_setname_2(inst, name, driver, alg) \
158 	__crypto_inst_setname(inst, name, driver, alg)
159 
160 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
161 int crypto_enqueue_request(struct crypto_queue *queue,
162 			   struct crypto_async_request *request);
163 void crypto_enqueue_request_head(struct crypto_queue *queue,
164 				 struct crypto_async_request *request);
165 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
166 static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
167 {
168 	return queue->qlen;
169 }
170 
171 void crypto_inc(u8 *a, unsigned int size);
172 
173 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
174 {
175 	return tfm->__crt_ctx;
176 }
177 
178 static inline void *crypto_tfm_ctx_align(struct crypto_tfm *tfm,
179 					 unsigned int align)
180 {
181 	if (align <= crypto_tfm_ctx_alignment())
182 		align = 1;
183 
184 	return PTR_ALIGN(crypto_tfm_ctx(tfm), align);
185 }
186 
187 static inline unsigned int crypto_dma_align(void)
188 {
189 	return CRYPTO_DMA_ALIGN;
190 }
191 
192 static inline unsigned int crypto_dma_padding(void)
193 {
194 	return (crypto_dma_align() - 1) & ~(crypto_tfm_ctx_alignment() - 1);
195 }
196 
197 static inline void *crypto_tfm_ctx_dma(struct crypto_tfm *tfm)
198 {
199 	return crypto_tfm_ctx_align(tfm, crypto_dma_align());
200 }
201 
202 static inline struct crypto_instance *crypto_tfm_alg_instance(
203 	struct crypto_tfm *tfm)
204 {
205 	return container_of(tfm->__crt_alg, struct crypto_instance, alg);
206 }
207 
208 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
209 {
210 	return inst->__ctx;
211 }
212 
213 static inline struct crypto_async_request *crypto_get_backlog(
214 	struct crypto_queue *queue)
215 {
216 	return queue->backlog == &queue->list ? NULL :
217 	       container_of(queue->backlog, struct crypto_async_request, list);
218 }
219 
220 static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
221 {
222 	return (algt->type ^ off) & algt->mask & off;
223 }
224 
225 /*
226  * When an algorithm uses another algorithm (e.g., if it's an instance of a
227  * template), these are the flags that should always be set on the "outer"
228  * algorithm if any "inner" algorithm has them set.
229  */
230 #define CRYPTO_ALG_INHERITED_FLAGS	\
231 	(CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK |	\
232 	 CRYPTO_ALG_ALLOCATES_MEMORY)
233 
234 /*
235  * Given the type and mask that specify the flags restrictions on a template
236  * instance being created, return the mask that should be passed to
237  * crypto_grab_*() (along with type=0) to honor any request the user made to
238  * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
239  */
240 static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
241 {
242 	return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
243 }
244 
245 int crypto_register_notifier(struct notifier_block *nb);
246 int crypto_unregister_notifier(struct notifier_block *nb);
247 
248 /* Crypto notification events. */
249 enum {
250 	CRYPTO_MSG_ALG_REQUEST,
251 	CRYPTO_MSG_ALG_REGISTER,
252 	CRYPTO_MSG_ALG_LOADED,
253 };
254 
255 static inline void crypto_request_complete(struct crypto_async_request *req,
256 					   int err)
257 {
258 	req->complete(req->data, err);
259 }
260 
261 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
262 {
263 	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
264 }
265 
266 static inline bool crypto_tfm_req_virt(struct crypto_tfm *tfm)
267 {
268 	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_REQ_VIRT;
269 }
270 
271 static inline u32 crypto_request_flags(struct crypto_async_request *req)
272 {
273 	return req->flags & ~CRYPTO_TFM_REQ_ON_STACK;
274 }
275 
276 #endif	/* _CRYPTO_ALGAPI_H */
277