xref: /linux/include/crypto/skcipher.h (revision aec2f682d47c54ef434b2d440992626d80b1ebdc)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Symmetric key ciphers.
4  *
5  * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #ifndef _CRYPTO_SKCIPHER_H
9 #define _CRYPTO_SKCIPHER_H
10 
11 #include <linux/atomic.h>
12 #include <linux/container_of.h>
13 #include <linux/crypto.h>
14 #include <linux/slab.h>
15 #include <linux/string.h>
16 #include <linux/types.h>
17 
18 /* Set this bit if the lskcipher operation is a continuation. */
19 #define CRYPTO_LSKCIPHER_FLAG_CONT	0x00000001
20 /* Set this bit if the lskcipher operation is final. */
21 #define CRYPTO_LSKCIPHER_FLAG_FINAL	0x00000002
22 /* The bit CRYPTO_TFM_REQ_MAY_SLEEP can also be set if needed. */
23 
24 /* Set this bit if the skcipher operation is a continuation. */
25 #define CRYPTO_SKCIPHER_REQ_CONT	0x00000001
26 /* Set this bit if the skcipher operation is not final. */
27 #define CRYPTO_SKCIPHER_REQ_NOTFINAL	0x00000002
28 
29 struct scatterlist;
30 
31 /**
32  *	struct skcipher_request - Symmetric key cipher request
33  *	@cryptlen: Number of bytes to encrypt or decrypt
34  *	@iv: Initialisation Vector
35  *	@src: Source SG list
36  *	@dst: Destination SG list
37  *	@base: Underlying async request
38  *	@__ctx: Start of private context data
39  */
40 struct skcipher_request {
41 	unsigned int cryptlen;
42 
43 	u8 *iv;
44 
45 	struct scatterlist *src;
46 	struct scatterlist *dst;
47 
48 	struct crypto_async_request base;
49 
50 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
51 };
52 
53 struct crypto_skcipher {
54 	unsigned int reqsize;
55 
56 	struct crypto_tfm base;
57 };
58 
59 struct crypto_sync_skcipher {
60 	struct crypto_skcipher base;
61 };
62 
63 struct crypto_lskcipher {
64 	struct crypto_tfm base;
65 };
66 
67 /*
68  * struct skcipher_alg_common - common properties of skcipher_alg
69  * @min_keysize: Minimum key size supported by the transformation. This is the
70  *		 smallest key length supported by this transformation algorithm.
71  *		 This must be set to one of the pre-defined values as this is
72  *		 not hardware specific. Possible values for this field can be
73  *		 found via git grep "_MIN_KEY_SIZE" include/crypto/
74  * @max_keysize: Maximum key size supported by the transformation. This is the
75  *		 largest key length supported by this transformation algorithm.
76  *		 This must be set to one of the pre-defined values as this is
77  *		 not hardware specific. Possible values for this field can be
78  *		 found via git grep "_MAX_KEY_SIZE" include/crypto/
79  * @ivsize: IV size applicable for transformation. The consumer must provide an
80  *	    IV of exactly that size to perform the encrypt or decrypt operation.
81  * @chunksize: Equal to the block size except for stream ciphers such as
82  *	       CTR where it is set to the underlying block size.
83  * @statesize: Size of the internal state for the algorithm.
84  * @base: Definition of a generic crypto algorithm.
85  */
86 #define SKCIPHER_ALG_COMMON {		\
87 	unsigned int min_keysize;	\
88 	unsigned int max_keysize;	\
89 	unsigned int ivsize;		\
90 	unsigned int chunksize;		\
91 	unsigned int statesize;		\
92 					\
93 	struct crypto_alg base;		\
94 }
95 struct skcipher_alg_common SKCIPHER_ALG_COMMON;
96 
97 /**
98  * struct skcipher_alg - symmetric key cipher definition
99  * @setkey: Set key for the transformation. This function is used to either
100  *	    program a supplied key into the hardware or store the key in the
101  *	    transformation context for programming it later. Note that this
102  *	    function does modify the transformation context. This function can
103  *	    be called multiple times during the existence of the transformation
104  *	    object, so one must make sure the key is properly reprogrammed into
105  *	    the hardware. This function is also responsible for checking the key
106  *	    length for validity. In case a software fallback was put in place in
107  *	    the @cra_init call, this function might need to use the fallback if
108  *	    the algorithm doesn't support all of the key sizes.
109  * @encrypt: Encrypt a scatterlist of blocks. This function is used to encrypt
110  *	     the supplied scatterlist containing the blocks of data. The crypto
111  *	     API consumer is responsible for aligning the entries of the
112  *	     scatterlist properly and making sure the chunks are correctly
113  *	     sized. In case a software fallback was put in place in the
114  *	     @cra_init call, this function might need to use the fallback if
115  *	     the algorithm doesn't support all of the key sizes. In case the
116  *	     key was stored in transformation context, the key might need to be
117  *	     re-programmed into the hardware in this function. This function
118  *	     shall not modify the transformation context, as this function may
119  *	     be called in parallel with the same transformation object.
120  * @decrypt: Decrypt a single block. This is a reverse counterpart to @encrypt
121  *	     and the conditions are exactly the same.
122  * @export: Export partial state of the transformation. This function dumps the
123  *	    entire state of the ongoing transformation into a provided block of
124  *	    data so it can be @import 'ed back later on. This is useful in case
125  *	    you want to save partial result of the transformation after
126  *	    processing certain amount of data and reload this partial result
127  *	    multiple times later on for multiple re-use. No data processing
128  *	    happens at this point.
129  * @import: Import partial state of the transformation. This function loads the
130  *	    entire state of the ongoing transformation from a provided block of
131  *	    data so the transformation can continue from this point onward. No
132  *	    data processing happens at this point.
133  * @init: Initialize the cryptographic transformation object. This function
134  *	  is used to initialize the cryptographic transformation object.
135  *	  This function is called only once at the instantiation time, right
136  *	  after the transformation context was allocated. In case the
137  *	  cryptographic hardware has some special requirements which need to
138  *	  be handled by software, this function shall check for the precise
139  *	  requirement of the transformation and put any software fallbacks
140  *	  in place.
141  * @exit: Deinitialize the cryptographic transformation object. This is a
142  *	  counterpart to @init, used to remove various changes set in
143  *	  @init.
144  * @walksize: Equal to the chunk size except in cases where the algorithm is
145  * 	      considerably more efficient if it can operate on multiple chunks
146  * 	      in parallel. Should be a multiple of chunksize.
147  * @co: see struct skcipher_alg_common
148  * @SKCIPHER_ALG_COMMON: see struct skcipher_alg_common
149  *
150  * All fields except @ivsize are mandatory and must be filled.
151  */
152 struct skcipher_alg {
153 	int (*setkey)(struct crypto_skcipher *tfm, const u8 *key,
154 	              unsigned int keylen);
155 	int (*encrypt)(struct skcipher_request *req);
156 	int (*decrypt)(struct skcipher_request *req);
157 	int (*export)(struct skcipher_request *req, void *out);
158 	int (*import)(struct skcipher_request *req, const void *in);
159 	int (*init)(struct crypto_skcipher *tfm);
160 	void (*exit)(struct crypto_skcipher *tfm);
161 
162 	unsigned int walksize;
163 
164 	union {
165 		struct SKCIPHER_ALG_COMMON;
166 		struct skcipher_alg_common co;
167 	};
168 };
169 
170 /**
171  * struct lskcipher_alg - linear symmetric key cipher definition
172  * @setkey: Set key for the transformation. This function is used to either
173  *	    program a supplied key into the hardware or store the key in the
174  *	    transformation context for programming it later. Note that this
175  *	    function does modify the transformation context. This function can
176  *	    be called multiple times during the existence of the transformation
177  *	    object, so one must make sure the key is properly reprogrammed into
178  *	    the hardware. This function is also responsible for checking the key
179  *	    length for validity. In case a software fallback was put in place in
180  *	    the @cra_init call, this function might need to use the fallback if
181  *	    the algorithm doesn't support all of the key sizes.
182  * @encrypt: Encrypt a number of bytes. This function is used to encrypt
183  *	     the supplied data.  This function shall not modify
184  *	     the transformation context, as this function may be called
185  *	     in parallel with the same transformation object.  Data
186  *	     may be left over if length is not a multiple of blocks
187  *	     and there is more to come (final == false).  The number of
188  *	     left-over bytes should be returned in case of success.
189  *	     The siv field shall be as long as ivsize + statesize with
190  *	     the IV placed at the front.  The state will be used by the
191  *	     algorithm internally.
192  * @decrypt: Decrypt a number of bytes. This is a reverse counterpart to
193  *	     @encrypt and the conditions are exactly the same.
194  * @init: Initialize the cryptographic transformation object. This function
195  *	  is used to initialize the cryptographic transformation object.
196  *	  This function is called only once at the instantiation time, right
197  *	  after the transformation context was allocated.
198  * @exit: Deinitialize the cryptographic transformation object. This is a
199  *	  counterpart to @init, used to remove various changes set in
200  *	  @init.
201  * @co: see struct skcipher_alg_common
202  */
203 struct lskcipher_alg {
204 	int (*setkey)(struct crypto_lskcipher *tfm, const u8 *key,
205 	              unsigned int keylen);
206 	int (*encrypt)(struct crypto_lskcipher *tfm, const u8 *src,
207 		       u8 *dst, unsigned len, u8 *siv, u32 flags);
208 	int (*decrypt)(struct crypto_lskcipher *tfm, const u8 *src,
209 		       u8 *dst, unsigned len, u8 *siv, u32 flags);
210 	int (*init)(struct crypto_lskcipher *tfm);
211 	void (*exit)(struct crypto_lskcipher *tfm);
212 
213 	struct skcipher_alg_common co;
214 };
215 
216 #define MAX_SYNC_SKCIPHER_REQSIZE      384
217 /*
218  * This performs a type-check against the "_tfm" argument to make sure
219  * all users have the correct skcipher tfm for doing on-stack requests.
220  */
221 #define SYNC_SKCIPHER_REQUEST_ON_STACK(name, _tfm) \
222 	char __##name##_desc[sizeof(struct skcipher_request) + \
223 			     MAX_SYNC_SKCIPHER_REQSIZE \
224 			    ] CRYPTO_MINALIGN_ATTR; \
225 	struct skcipher_request *name = \
226 		(((struct skcipher_request *)__##name##_desc)->base.tfm = \
227 			crypto_sync_skcipher_tfm((_tfm)), \
228 		 (void *)__##name##_desc)
229 
230 /**
231  * DOC: Symmetric Key Cipher API
232  *
233  * Symmetric key cipher API is used with the ciphers of type
234  * CRYPTO_ALG_TYPE_SKCIPHER (listed as type "skcipher" in /proc/crypto).
235  *
236  * Asynchronous cipher operations imply that the function invocation for a
237  * cipher request returns immediately before the completion of the operation.
238  * The cipher request is scheduled as a separate kernel thread and therefore
239  * load-balanced on the different CPUs via the process scheduler. To allow
240  * the kernel crypto API to inform the caller about the completion of a cipher
241  * request, the caller must provide a callback function. That function is
242  * invoked with the cipher handle when the request completes.
243  *
244  * To support the asynchronous operation, additional information than just the
245  * cipher handle must be supplied to the kernel crypto API. That additional
246  * information is given by filling in the skcipher_request data structure.
247  *
248  * For the symmetric key cipher API, the state is maintained with the tfm
249  * cipher handle. A single tfm can be used across multiple calls and in
250  * parallel. For asynchronous block cipher calls, context data supplied and
251  * only used by the caller can be referenced the request data structure in
252  * addition to the IV used for the cipher request. The maintenance of such
253  * state information would be important for a crypto driver implementer to
254  * have, because when calling the callback function upon completion of the
255  * cipher operation, that callback function may need some information about
256  * which operation just finished if it invoked multiple in parallel. This
257  * state information is unused by the kernel crypto API.
258  */
259 
__crypto_skcipher_cast(struct crypto_tfm * tfm)260 static inline struct crypto_skcipher *__crypto_skcipher_cast(
261 	struct crypto_tfm *tfm)
262 {
263 	return container_of(tfm, struct crypto_skcipher, base);
264 }
265 
266 /**
267  * crypto_alloc_skcipher() - allocate symmetric key cipher handle
268  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
269  *	      skcipher cipher
270  * @type: specifies the type of the cipher
271  * @mask: specifies the mask for the cipher
272  *
273  * Allocate a cipher handle for an skcipher. The returned struct
274  * crypto_skcipher is the cipher handle that is required for any subsequent
275  * API invocation for that skcipher.
276  *
277  * Return: allocated cipher handle in case of success; IS_ERR() is true in case
278  *	   of an error, PTR_ERR() returns the error code.
279  */
280 struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name,
281 					      u32 type, u32 mask);
282 
283 struct crypto_sync_skcipher *crypto_alloc_sync_skcipher(const char *alg_name,
284 					      u32 type, u32 mask);
285 
286 
287 /**
288  * crypto_alloc_lskcipher() - allocate linear symmetric key cipher handle
289  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
290  *	      lskcipher
291  * @type: specifies the type of the cipher
292  * @mask: specifies the mask for the cipher
293  *
294  * Allocate a cipher handle for an lskcipher. The returned struct
295  * crypto_lskcipher is the cipher handle that is required for any subsequent
296  * API invocation for that lskcipher.
297  *
298  * Return: allocated cipher handle in case of success; IS_ERR() is true in case
299  *	   of an error, PTR_ERR() returns the error code.
300  */
301 struct crypto_lskcipher *crypto_alloc_lskcipher(const char *alg_name,
302 						u32 type, u32 mask);
303 
crypto_skcipher_tfm(struct crypto_skcipher * tfm)304 static inline struct crypto_tfm *crypto_skcipher_tfm(
305 	struct crypto_skcipher *tfm)
306 {
307 	return &tfm->base;
308 }
309 
crypto_lskcipher_tfm(struct crypto_lskcipher * tfm)310 static inline struct crypto_tfm *crypto_lskcipher_tfm(
311 	struct crypto_lskcipher *tfm)
312 {
313 	return &tfm->base;
314 }
315 
crypto_sync_skcipher_tfm(struct crypto_sync_skcipher * tfm)316 static inline struct crypto_tfm *crypto_sync_skcipher_tfm(
317 	struct crypto_sync_skcipher *tfm)
318 {
319 	return crypto_skcipher_tfm(&tfm->base);
320 }
321 
322 /**
323  * crypto_free_skcipher() - zeroize and free cipher handle
324  * @tfm: cipher handle to be freed
325  *
326  * If @tfm is a NULL or error pointer, this function does nothing.
327  */
crypto_free_skcipher(struct crypto_skcipher * tfm)328 static inline void crypto_free_skcipher(struct crypto_skcipher *tfm)
329 {
330 	crypto_destroy_tfm(tfm, crypto_skcipher_tfm(tfm));
331 }
332 
crypto_free_sync_skcipher(struct crypto_sync_skcipher * tfm)333 static inline void crypto_free_sync_skcipher(struct crypto_sync_skcipher *tfm)
334 {
335 	crypto_free_skcipher(&tfm->base);
336 }
337 
338 /**
339  * crypto_free_lskcipher() - zeroize and free cipher handle
340  * @tfm: cipher handle to be freed
341  *
342  * If @tfm is a NULL or error pointer, this function does nothing.
343  */
crypto_free_lskcipher(struct crypto_lskcipher * tfm)344 static inline void crypto_free_lskcipher(struct crypto_lskcipher *tfm)
345 {
346 	crypto_destroy_tfm(tfm, crypto_lskcipher_tfm(tfm));
347 }
348 
349 /**
350  * crypto_has_skcipher() - Search for the availability of an skcipher.
351  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
352  *	      skcipher
353  * @type: specifies the type of the skcipher
354  * @mask: specifies the mask for the skcipher
355  *
356  * Return: true when the skcipher is known to the kernel crypto API; false
357  *	   otherwise
358  */
359 int crypto_has_skcipher(const char *alg_name, u32 type, u32 mask);
360 
crypto_skcipher_driver_name(struct crypto_skcipher * tfm)361 static inline const char *crypto_skcipher_driver_name(
362 	struct crypto_skcipher *tfm)
363 {
364 	return crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
365 }
366 
crypto_lskcipher_driver_name(struct crypto_lskcipher * tfm)367 static inline const char *crypto_lskcipher_driver_name(
368 	struct crypto_lskcipher *tfm)
369 {
370 	return crypto_tfm_alg_driver_name(crypto_lskcipher_tfm(tfm));
371 }
372 
crypto_skcipher_alg_common(struct crypto_skcipher * tfm)373 static inline struct skcipher_alg_common *crypto_skcipher_alg_common(
374 	struct crypto_skcipher *tfm)
375 {
376 	return container_of(crypto_skcipher_tfm(tfm)->__crt_alg,
377 			    struct skcipher_alg_common, base);
378 }
379 
crypto_skcipher_alg(struct crypto_skcipher * tfm)380 static inline struct skcipher_alg *crypto_skcipher_alg(
381 	struct crypto_skcipher *tfm)
382 {
383 	return container_of(crypto_skcipher_tfm(tfm)->__crt_alg,
384 			    struct skcipher_alg, base);
385 }
386 
crypto_lskcipher_alg(struct crypto_lskcipher * tfm)387 static inline struct lskcipher_alg *crypto_lskcipher_alg(
388 	struct crypto_lskcipher *tfm)
389 {
390 	return container_of(crypto_lskcipher_tfm(tfm)->__crt_alg,
391 			    struct lskcipher_alg, co.base);
392 }
393 
394 /**
395  * crypto_skcipher_ivsize() - obtain IV size
396  * @tfm: cipher handle
397  *
398  * The size of the IV for the skcipher referenced by the cipher handle is
399  * returned. This IV size may be zero if the cipher does not need an IV.
400  *
401  * Return: IV size in bytes
402  */
crypto_skcipher_ivsize(struct crypto_skcipher * tfm)403 static inline unsigned int crypto_skcipher_ivsize(struct crypto_skcipher *tfm)
404 {
405 	return crypto_skcipher_alg_common(tfm)->ivsize;
406 }
407 
crypto_sync_skcipher_ivsize(struct crypto_sync_skcipher * tfm)408 static inline unsigned int crypto_sync_skcipher_ivsize(
409 	struct crypto_sync_skcipher *tfm)
410 {
411 	return crypto_skcipher_ivsize(&tfm->base);
412 }
413 
414 /**
415  * crypto_lskcipher_ivsize() - obtain IV size
416  * @tfm: cipher handle
417  *
418  * The size of the IV for the lskcipher referenced by the cipher handle is
419  * returned. This IV size may be zero if the cipher does not need an IV.
420  *
421  * Return: IV size in bytes
422  */
crypto_lskcipher_ivsize(struct crypto_lskcipher * tfm)423 static inline unsigned int crypto_lskcipher_ivsize(
424 	struct crypto_lskcipher *tfm)
425 {
426 	return crypto_lskcipher_alg(tfm)->co.ivsize;
427 }
428 
429 /**
430  * crypto_skcipher_blocksize() - obtain block size of cipher
431  * @tfm: cipher handle
432  *
433  * The block size for the skcipher referenced with the cipher handle is
434  * returned. The caller may use that information to allocate appropriate
435  * memory for the data returned by the encryption or decryption operation
436  *
437  * Return: block size of cipher
438  */
crypto_skcipher_blocksize(struct crypto_skcipher * tfm)439 static inline unsigned int crypto_skcipher_blocksize(
440 	struct crypto_skcipher *tfm)
441 {
442 	return crypto_tfm_alg_blocksize(crypto_skcipher_tfm(tfm));
443 }
444 
445 /**
446  * crypto_lskcipher_blocksize() - obtain block size of cipher
447  * @tfm: cipher handle
448  *
449  * The block size for the lskcipher referenced with the cipher handle is
450  * returned. The caller may use that information to allocate appropriate
451  * memory for the data returned by the encryption or decryption operation
452  *
453  * Return: block size of cipher
454  */
crypto_lskcipher_blocksize(struct crypto_lskcipher * tfm)455 static inline unsigned int crypto_lskcipher_blocksize(
456 	struct crypto_lskcipher *tfm)
457 {
458 	return crypto_tfm_alg_blocksize(crypto_lskcipher_tfm(tfm));
459 }
460 
461 /**
462  * crypto_skcipher_chunksize() - obtain chunk size
463  * @tfm: cipher handle
464  *
465  * The block size is set to one for ciphers such as CTR.  However,
466  * you still need to provide incremental updates in multiples of
467  * the underlying block size as the IV does not have sub-block
468  * granularity.  This is known in this API as the chunk size.
469  *
470  * Return: chunk size in bytes
471  */
crypto_skcipher_chunksize(struct crypto_skcipher * tfm)472 static inline unsigned int crypto_skcipher_chunksize(
473 	struct crypto_skcipher *tfm)
474 {
475 	return crypto_skcipher_alg_common(tfm)->chunksize;
476 }
477 
478 /**
479  * crypto_lskcipher_chunksize() - obtain chunk size
480  * @tfm: cipher handle
481  *
482  * The block size is set to one for ciphers such as CTR.  However,
483  * you still need to provide incremental updates in multiples of
484  * the underlying block size as the IV does not have sub-block
485  * granularity.  This is known in this API as the chunk size.
486  *
487  * Return: chunk size in bytes
488  */
crypto_lskcipher_chunksize(struct crypto_lskcipher * tfm)489 static inline unsigned int crypto_lskcipher_chunksize(
490 	struct crypto_lskcipher *tfm)
491 {
492 	return crypto_lskcipher_alg(tfm)->co.chunksize;
493 }
494 
495 /**
496  * crypto_skcipher_statesize() - obtain state size
497  * @tfm: cipher handle
498  *
499  * Some algorithms cannot be chained with the IV alone.  They carry
500  * internal state which must be replicated if data is to be processed
501  * incrementally.  The size of that state can be obtained with this
502  * function.
503  *
504  * Return: state size in bytes
505  */
crypto_skcipher_statesize(struct crypto_skcipher * tfm)506 static inline unsigned int crypto_skcipher_statesize(
507 	struct crypto_skcipher *tfm)
508 {
509 	return crypto_skcipher_alg_common(tfm)->statesize;
510 }
511 
512 /**
513  * crypto_lskcipher_statesize() - obtain state size
514  * @tfm: cipher handle
515  *
516  * Some algorithms cannot be chained with the IV alone.  They carry
517  * internal state which must be replicated if data is to be processed
518  * incrementally.  The size of that state can be obtained with this
519  * function.
520  *
521  * Return: state size in bytes
522  */
crypto_lskcipher_statesize(struct crypto_lskcipher * tfm)523 static inline unsigned int crypto_lskcipher_statesize(
524 	struct crypto_lskcipher *tfm)
525 {
526 	return crypto_lskcipher_alg(tfm)->co.statesize;
527 }
528 
crypto_sync_skcipher_blocksize(struct crypto_sync_skcipher * tfm)529 static inline unsigned int crypto_sync_skcipher_blocksize(
530 	struct crypto_sync_skcipher *tfm)
531 {
532 	return crypto_skcipher_blocksize(&tfm->base);
533 }
534 
crypto_skcipher_alignmask(struct crypto_skcipher * tfm)535 static inline unsigned int crypto_skcipher_alignmask(
536 	struct crypto_skcipher *tfm)
537 {
538 	return crypto_tfm_alg_alignmask(crypto_skcipher_tfm(tfm));
539 }
540 
crypto_lskcipher_alignmask(struct crypto_lskcipher * tfm)541 static inline unsigned int crypto_lskcipher_alignmask(
542 	struct crypto_lskcipher *tfm)
543 {
544 	return crypto_tfm_alg_alignmask(crypto_lskcipher_tfm(tfm));
545 }
546 
crypto_skcipher_get_flags(struct crypto_skcipher * tfm)547 static inline u32 crypto_skcipher_get_flags(struct crypto_skcipher *tfm)
548 {
549 	return crypto_tfm_get_flags(crypto_skcipher_tfm(tfm));
550 }
551 
crypto_skcipher_set_flags(struct crypto_skcipher * tfm,u32 flags)552 static inline void crypto_skcipher_set_flags(struct crypto_skcipher *tfm,
553 					       u32 flags)
554 {
555 	crypto_tfm_set_flags(crypto_skcipher_tfm(tfm), flags);
556 }
557 
crypto_skcipher_clear_flags(struct crypto_skcipher * tfm,u32 flags)558 static inline void crypto_skcipher_clear_flags(struct crypto_skcipher *tfm,
559 						 u32 flags)
560 {
561 	crypto_tfm_clear_flags(crypto_skcipher_tfm(tfm), flags);
562 }
563 
crypto_sync_skcipher_get_flags(struct crypto_sync_skcipher * tfm)564 static inline u32 crypto_sync_skcipher_get_flags(
565 	struct crypto_sync_skcipher *tfm)
566 {
567 	return crypto_skcipher_get_flags(&tfm->base);
568 }
569 
crypto_sync_skcipher_set_flags(struct crypto_sync_skcipher * tfm,u32 flags)570 static inline void crypto_sync_skcipher_set_flags(
571 	struct crypto_sync_skcipher *tfm, u32 flags)
572 {
573 	crypto_skcipher_set_flags(&tfm->base, flags);
574 }
575 
crypto_sync_skcipher_clear_flags(struct crypto_sync_skcipher * tfm,u32 flags)576 static inline void crypto_sync_skcipher_clear_flags(
577 	struct crypto_sync_skcipher *tfm, u32 flags)
578 {
579 	crypto_skcipher_clear_flags(&tfm->base, flags);
580 }
581 
crypto_lskcipher_get_flags(struct crypto_lskcipher * tfm)582 static inline u32 crypto_lskcipher_get_flags(struct crypto_lskcipher *tfm)
583 {
584 	return crypto_tfm_get_flags(crypto_lskcipher_tfm(tfm));
585 }
586 
crypto_lskcipher_set_flags(struct crypto_lskcipher * tfm,u32 flags)587 static inline void crypto_lskcipher_set_flags(struct crypto_lskcipher *tfm,
588 					       u32 flags)
589 {
590 	crypto_tfm_set_flags(crypto_lskcipher_tfm(tfm), flags);
591 }
592 
crypto_lskcipher_clear_flags(struct crypto_lskcipher * tfm,u32 flags)593 static inline void crypto_lskcipher_clear_flags(struct crypto_lskcipher *tfm,
594 						 u32 flags)
595 {
596 	crypto_tfm_clear_flags(crypto_lskcipher_tfm(tfm), flags);
597 }
598 
599 /**
600  * crypto_skcipher_setkey() - set key for cipher
601  * @tfm: cipher handle
602  * @key: buffer holding the key
603  * @keylen: length of the key in bytes
604  *
605  * The caller provided key is set for the skcipher referenced by the cipher
606  * handle.
607  *
608  * Note, the key length determines the cipher type. Many block ciphers implement
609  * different cipher modes depending on the key size, such as AES-128 vs AES-192
610  * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
611  * is performed.
612  *
613  * Return: 0 if the setting of the key was successful; < 0 if an error occurred
614  */
615 int crypto_skcipher_setkey(struct crypto_skcipher *tfm,
616 			   const u8 *key, unsigned int keylen);
617 
crypto_sync_skcipher_setkey(struct crypto_sync_skcipher * tfm,const u8 * key,unsigned int keylen)618 static inline int crypto_sync_skcipher_setkey(struct crypto_sync_skcipher *tfm,
619 					 const u8 *key, unsigned int keylen)
620 {
621 	return crypto_skcipher_setkey(&tfm->base, key, keylen);
622 }
623 
624 /**
625  * crypto_lskcipher_setkey() - set key for cipher
626  * @tfm: cipher handle
627  * @key: buffer holding the key
628  * @keylen: length of the key in bytes
629  *
630  * The caller provided key is set for the lskcipher referenced by the cipher
631  * handle.
632  *
633  * Note, the key length determines the cipher type. Many block ciphers implement
634  * different cipher modes depending on the key size, such as AES-128 vs AES-192
635  * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
636  * is performed.
637  *
638  * Return: 0 if the setting of the key was successful; < 0 if an error occurred
639  */
640 int crypto_lskcipher_setkey(struct crypto_lskcipher *tfm,
641 			    const u8 *key, unsigned int keylen);
642 
crypto_skcipher_min_keysize(struct crypto_skcipher * tfm)643 static inline unsigned int crypto_skcipher_min_keysize(
644 	struct crypto_skcipher *tfm)
645 {
646 	return crypto_skcipher_alg_common(tfm)->min_keysize;
647 }
648 
crypto_skcipher_max_keysize(struct crypto_skcipher * tfm)649 static inline unsigned int crypto_skcipher_max_keysize(
650 	struct crypto_skcipher *tfm)
651 {
652 	return crypto_skcipher_alg_common(tfm)->max_keysize;
653 }
654 
crypto_lskcipher_min_keysize(struct crypto_lskcipher * tfm)655 static inline unsigned int crypto_lskcipher_min_keysize(
656 	struct crypto_lskcipher *tfm)
657 {
658 	return crypto_lskcipher_alg(tfm)->co.min_keysize;
659 }
660 
crypto_lskcipher_max_keysize(struct crypto_lskcipher * tfm)661 static inline unsigned int crypto_lskcipher_max_keysize(
662 	struct crypto_lskcipher *tfm)
663 {
664 	return crypto_lskcipher_alg(tfm)->co.max_keysize;
665 }
666 
667 /**
668  * crypto_skcipher_reqtfm() - obtain cipher handle from request
669  * @req: skcipher_request out of which the cipher handle is to be obtained
670  *
671  * Return the crypto_skcipher handle when furnishing an skcipher_request
672  * data structure.
673  *
674  * Return: crypto_skcipher handle
675  */
crypto_skcipher_reqtfm(struct skcipher_request * req)676 static inline struct crypto_skcipher *crypto_skcipher_reqtfm(
677 	struct skcipher_request *req)
678 {
679 	return __crypto_skcipher_cast(req->base.tfm);
680 }
681 
crypto_sync_skcipher_reqtfm(struct skcipher_request * req)682 static inline struct crypto_sync_skcipher *crypto_sync_skcipher_reqtfm(
683 	struct skcipher_request *req)
684 {
685 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
686 
687 	return container_of(tfm, struct crypto_sync_skcipher, base);
688 }
689 
690 /**
691  * crypto_skcipher_encrypt() - encrypt plaintext
692  * @req: reference to the skcipher_request handle that holds all information
693  *	 needed to perform the cipher operation
694  *
695  * Encrypt plaintext data using the skcipher_request handle. That data
696  * structure and how it is filled with data is discussed with the
697  * skcipher_request_* functions.
698  *
699  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
700  */
701 int crypto_skcipher_encrypt(struct skcipher_request *req);
702 
703 /**
704  * crypto_skcipher_decrypt() - decrypt ciphertext
705  * @req: reference to the skcipher_request handle that holds all information
706  *	 needed to perform the cipher operation
707  *
708  * Decrypt ciphertext data using the skcipher_request handle. That data
709  * structure and how it is filled with data is discussed with the
710  * skcipher_request_* functions.
711  *
712  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
713  */
714 int crypto_skcipher_decrypt(struct skcipher_request *req);
715 
716 /**
717  * crypto_skcipher_export() - export partial state
718  * @req: reference to the skcipher_request handle that holds all information
719  *	 needed to perform the operation
720  * @out: output buffer of sufficient size that can hold the state
721  *
722  * Export partial state of the transformation. This function dumps the
723  * entire state of the ongoing transformation into a provided block of
724  * data so it can be @import 'ed back later on. This is useful in case
725  * you want to save partial result of the transformation after
726  * processing certain amount of data and reload this partial result
727  * multiple times later on for multiple re-use. No data processing
728  * happens at this point.
729  *
730  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
731  */
732 int crypto_skcipher_export(struct skcipher_request *req, void *out);
733 
734 /**
735  * crypto_skcipher_import() - import partial state
736  * @req: reference to the skcipher_request handle that holds all information
737  *	 needed to perform the operation
738  * @in: buffer holding the state
739  *
740  * Import partial state of the transformation. This function loads the
741  * entire state of the ongoing transformation from a provided block of
742  * data so the transformation can continue from this point onward. No
743  * data processing happens at this point.
744  *
745  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
746  */
747 int crypto_skcipher_import(struct skcipher_request *req, const void *in);
748 
749 /**
750  * crypto_lskcipher_encrypt() - encrypt plaintext
751  * @tfm: lskcipher handle
752  * @src: source buffer
753  * @dst: destination buffer
754  * @len: number of bytes to process
755  * @siv: IV + state for the cipher operation.  The length of the IV must
756  *	 comply with the IV size defined by crypto_lskcipher_ivsize.  The
757  *	 IV is then followed with a buffer with the length as specified by
758  *	 crypto_lskcipher_statesize.
759  * Encrypt plaintext data using the lskcipher handle.
760  *
761  * Return: >=0 if the cipher operation was successful, if positive
762  *	   then this many bytes have been left unprocessed;
763  *	   < 0 if an error occurred
764  */
765 int crypto_lskcipher_encrypt(struct crypto_lskcipher *tfm, const u8 *src,
766 			     u8 *dst, unsigned len, u8 *siv);
767 
768 /**
769  * crypto_lskcipher_decrypt() - decrypt ciphertext
770  * @tfm: lskcipher handle
771  * @src: source buffer
772  * @dst: destination buffer
773  * @len: number of bytes to process
774  * @siv: IV + state for the cipher operation.  The length of the IV must
775  *	 comply with the IV size defined by crypto_lskcipher_ivsize.  The
776  *	 IV is then followed with a buffer with the length as specified by
777  *	 crypto_lskcipher_statesize.
778  *
779  * Decrypt ciphertext data using the lskcipher handle.
780  *
781  * Return: >=0 if the cipher operation was successful, if positive
782  *	   then this many bytes have been left unprocessed;
783  *	   < 0 if an error occurred
784  */
785 int crypto_lskcipher_decrypt(struct crypto_lskcipher *tfm, const u8 *src,
786 			     u8 *dst, unsigned len, u8 *siv);
787 
788 /**
789  * DOC: Symmetric Key Cipher Request Handle
790  *
791  * The skcipher_request data structure contains all pointers to data
792  * required for the symmetric key cipher operation. This includes the cipher
793  * handle (which can be used by multiple skcipher_request instances), pointer
794  * to plaintext and ciphertext, asynchronous callback function, etc. It acts
795  * as a handle to the skcipher_request_* API calls in a similar way as
796  * skcipher handle to the crypto_skcipher_* API calls.
797  */
798 
799 /**
800  * crypto_skcipher_reqsize() - obtain size of the request data structure
801  * @tfm: cipher handle
802  *
803  * Return: number of bytes
804  */
crypto_skcipher_reqsize(struct crypto_skcipher * tfm)805 static inline unsigned int crypto_skcipher_reqsize(struct crypto_skcipher *tfm)
806 {
807 	return tfm->reqsize;
808 }
809 
810 /**
811  * skcipher_request_set_tfm() - update cipher handle reference in request
812  * @req: request handle to be modified
813  * @tfm: cipher handle that shall be added to the request handle
814  *
815  * Allow the caller to replace the existing skcipher handle in the request
816  * data structure with a different one.
817  */
skcipher_request_set_tfm(struct skcipher_request * req,struct crypto_skcipher * tfm)818 static inline void skcipher_request_set_tfm(struct skcipher_request *req,
819 					    struct crypto_skcipher *tfm)
820 {
821 	req->base.tfm = crypto_skcipher_tfm(tfm);
822 }
823 
skcipher_request_set_sync_tfm(struct skcipher_request * req,struct crypto_sync_skcipher * tfm)824 static inline void skcipher_request_set_sync_tfm(struct skcipher_request *req,
825 					    struct crypto_sync_skcipher *tfm)
826 {
827 	skcipher_request_set_tfm(req, &tfm->base);
828 }
829 
skcipher_request_cast(struct crypto_async_request * req)830 static inline struct skcipher_request *skcipher_request_cast(
831 	struct crypto_async_request *req)
832 {
833 	return container_of(req, struct skcipher_request, base);
834 }
835 
836 /**
837  * skcipher_request_alloc() - allocate request data structure
838  * @tfm: cipher handle to be registered with the request
839  * @gfp: memory allocation flag that is handed to kmalloc by the API call.
840  *
841  * Allocate the request data structure that must be used with the skcipher
842  * encrypt and decrypt API calls. During the allocation, the provided skcipher
843  * handle is registered in the request data structure.
844  *
845  * Return: allocated request handle in case of success, or NULL if out of memory
846  */
skcipher_request_alloc_noprof(struct crypto_skcipher * tfm,gfp_t gfp)847 static inline struct skcipher_request *skcipher_request_alloc_noprof(
848 	struct crypto_skcipher *tfm, gfp_t gfp)
849 {
850 	struct skcipher_request *req;
851 
852 	req = kmalloc_noprof(sizeof(struct skcipher_request) +
853 			     crypto_skcipher_reqsize(tfm), gfp);
854 
855 	if (likely(req))
856 		skcipher_request_set_tfm(req, tfm);
857 
858 	return req;
859 }
860 #define skcipher_request_alloc(...)	alloc_hooks(skcipher_request_alloc_noprof(__VA_ARGS__))
861 
862 /**
863  * skcipher_request_free() - zeroize and free request data structure
864  * @req: request data structure cipher handle to be freed
865  */
skcipher_request_free(struct skcipher_request * req)866 static inline void skcipher_request_free(struct skcipher_request *req)
867 {
868 	kfree_sensitive(req);
869 }
870 
skcipher_request_zero(struct skcipher_request * req)871 static inline void skcipher_request_zero(struct skcipher_request *req)
872 {
873 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
874 
875 	memzero_explicit(req, sizeof(*req) + crypto_skcipher_reqsize(tfm));
876 }
877 
878 /**
879  * skcipher_request_set_callback() - set asynchronous callback function
880  * @req: request handle
881  * @flags: specify zero or an ORing of the flags
882  *	   CRYPTO_TFM_REQ_MAY_BACKLOG the request queue may back log and
883  *	   increase the wait queue beyond the initial maximum size;
884  *	   CRYPTO_TFM_REQ_MAY_SLEEP the request processing may sleep
885  * @compl: callback function pointer to be registered with the request handle
886  * @data: The data pointer refers to memory that is not used by the kernel
887  *	  crypto API, but provided to the callback function for it to use. Here,
888  *	  the caller can provide a reference to memory the callback function can
889  *	  operate on. As the callback function is invoked asynchronously to the
890  *	  related functionality, it may need to access data structures of the
891  *	  related functionality which can be referenced using this pointer. The
892  *	  callback function can access the memory via the "data" field in the
893  *	  crypto_async_request data structure provided to the callback function.
894  *
895  * This function allows setting the callback function that is triggered once the
896  * cipher operation completes.
897  *
898  * The callback function is registered with the skcipher_request handle and
899  * must comply with the following template::
900  *
901  *	void callback_function(struct crypto_async_request *req, int error)
902  */
skcipher_request_set_callback(struct skcipher_request * req,u32 flags,crypto_completion_t compl,void * data)903 static inline void skcipher_request_set_callback(struct skcipher_request *req,
904 						 u32 flags,
905 						 crypto_completion_t compl,
906 						 void *data)
907 {
908 	req->base.complete = compl;
909 	req->base.data = data;
910 	req->base.flags = flags;
911 }
912 
913 /**
914  * skcipher_request_set_crypt() - set data buffers
915  * @req: request handle
916  * @src: source scatter / gather list
917  * @dst: destination scatter / gather list
918  * @cryptlen: number of bytes to process from @src
919  * @iv: IV for the cipher operation which must comply with the IV size defined
920  *      by crypto_skcipher_ivsize
921  *
922  * This function allows setting of the source data and destination data
923  * scatter / gather lists.
924  *
925  * For encryption, the source is treated as the plaintext and the
926  * destination is the ciphertext. For a decryption operation, the use is
927  * reversed - the source is the ciphertext and the destination is the plaintext.
928  */
skcipher_request_set_crypt(struct skcipher_request * req,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,void * iv)929 static inline void skcipher_request_set_crypt(
930 	struct skcipher_request *req,
931 	struct scatterlist *src, struct scatterlist *dst,
932 	unsigned int cryptlen, void *iv)
933 {
934 	req->src = src;
935 	req->dst = dst;
936 	req->cryptlen = cryptlen;
937 	req->iv = iv;
938 }
939 
940 #endif	/* _CRYPTO_SKCIPHER_H */
941 
942