1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Symmetric key ciphers.
4  *
5  * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #ifndef _CRYPTO_SKCIPHER_H
9 #define _CRYPTO_SKCIPHER_H
10 
11 #include <linux/atomic.h>
12 #include <linux/container_of.h>
13 #include <linux/crypto.h>
14 #include <linux/slab.h>
15 #include <linux/string.h>
16 #include <linux/types.h>
17 
18 /* Set this bit if the lskcipher operation is a continuation. */
19 #define CRYPTO_LSKCIPHER_FLAG_CONT	0x00000001
20 /* Set this bit if the lskcipher operation is final. */
21 #define CRYPTO_LSKCIPHER_FLAG_FINAL	0x00000002
22 /* The bit CRYPTO_TFM_REQ_MAY_SLEEP can also be set if needed. */
23 
24 /* Set this bit if the skcipher operation is a continuation. */
25 #define CRYPTO_SKCIPHER_REQ_CONT	0x00000001
26 /* Set this bit if the skcipher operation is not final. */
27 #define CRYPTO_SKCIPHER_REQ_NOTFINAL	0x00000002
28 
29 struct scatterlist;
30 
31 /**
32  *	struct skcipher_request - Symmetric key cipher request
33  *	@cryptlen: Number of bytes to encrypt or decrypt
34  *	@iv: Initialisation Vector
35  *	@src: Source SG list
36  *	@dst: Destination SG list
37  *	@base: Underlying async request
38  *	@__ctx: Start of private context data
39  */
40 struct skcipher_request {
41 	unsigned int cryptlen;
42 
43 	u8 *iv;
44 
45 	struct scatterlist *src;
46 	struct scatterlist *dst;
47 
48 	struct crypto_async_request base;
49 
50 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
51 };
52 
53 struct crypto_skcipher {
54 	unsigned int reqsize;
55 
56 	struct crypto_tfm base;
57 };
58 
59 struct crypto_sync_skcipher {
60 	struct crypto_skcipher base;
61 };
62 
63 struct crypto_lskcipher {
64 	struct crypto_tfm base;
65 };
66 
67 /*
68  * struct skcipher_alg_common - common properties of skcipher_alg
69  * @min_keysize: Minimum key size supported by the transformation. This is the
70  *		 smallest key length supported by this transformation algorithm.
71  *		 This must be set to one of the pre-defined values as this is
72  *		 not hardware specific. Possible values for this field can be
73  *		 found via git grep "_MIN_KEY_SIZE" include/crypto/
74  * @max_keysize: Maximum key size supported by the transformation. This is the
75  *		 largest key length supported by this transformation algorithm.
76  *		 This must be set to one of the pre-defined values as this is
77  *		 not hardware specific. Possible values for this field can be
78  *		 found via git grep "_MAX_KEY_SIZE" include/crypto/
79  * @ivsize: IV size applicable for transformation. The consumer must provide an
80  *	    IV of exactly that size to perform the encrypt or decrypt operation.
81  * @chunksize: Equal to the block size except for stream ciphers such as
82  *	       CTR where it is set to the underlying block size.
83  * @statesize: Size of the internal state for the algorithm.
84  * @base: Definition of a generic crypto algorithm.
85  */
86 #define SKCIPHER_ALG_COMMON {		\
87 	unsigned int min_keysize;	\
88 	unsigned int max_keysize;	\
89 	unsigned int ivsize;		\
90 	unsigned int chunksize;		\
91 	unsigned int statesize;		\
92 					\
93 	struct crypto_alg base;		\
94 }
95 struct skcipher_alg_common SKCIPHER_ALG_COMMON;
96 
97 /**
98  * struct skcipher_alg - symmetric key cipher definition
99  * @setkey: Set key for the transformation. This function is used to either
100  *	    program a supplied key into the hardware or store the key in the
101  *	    transformation context for programming it later. Note that this
102  *	    function does modify the transformation context. This function can
103  *	    be called multiple times during the existence of the transformation
104  *	    object, so one must make sure the key is properly reprogrammed into
105  *	    the hardware. This function is also responsible for checking the key
106  *	    length for validity. In case a software fallback was put in place in
107  *	    the @cra_init call, this function might need to use the fallback if
108  *	    the algorithm doesn't support all of the key sizes.
109  * @encrypt: Encrypt a scatterlist of blocks. This function is used to encrypt
110  *	     the supplied scatterlist containing the blocks of data. The crypto
111  *	     API consumer is responsible for aligning the entries of the
112  *	     scatterlist properly and making sure the chunks are correctly
113  *	     sized. In case a software fallback was put in place in the
114  *	     @cra_init call, this function might need to use the fallback if
115  *	     the algorithm doesn't support all of the key sizes. In case the
116  *	     key was stored in transformation context, the key might need to be
117  *	     re-programmed into the hardware in this function. This function
118  *	     shall not modify the transformation context, as this function may
119  *	     be called in parallel with the same transformation object.
120  * @decrypt: Decrypt a single block. This is a reverse counterpart to @encrypt
121  *	     and the conditions are exactly the same.
122  * @export: Export partial state of the transformation. This function dumps the
123  *	    entire state of the ongoing transformation into a provided block of
124  *	    data so it can be @import 'ed back later on. This is useful in case
125  *	    you want to save partial result of the transformation after
126  *	    processing certain amount of data and reload this partial result
127  *	    multiple times later on for multiple re-use. No data processing
128  *	    happens at this point.
129  * @import: Import partial state of the transformation. This function loads the
130  *	    entire state of the ongoing transformation from a provided block of
131  *	    data so the transformation can continue from this point onward. No
132  *	    data processing happens at this point.
133  * @init: Initialize the cryptographic transformation object. This function
134  *	  is used to initialize the cryptographic transformation object.
135  *	  This function is called only once at the instantiation time, right
136  *	  after the transformation context was allocated. In case the
137  *	  cryptographic hardware has some special requirements which need to
138  *	  be handled by software, this function shall check for the precise
139  *	  requirement of the transformation and put any software fallbacks
140  *	  in place.
141  * @exit: Deinitialize the cryptographic transformation object. This is a
142  *	  counterpart to @init, used to remove various changes set in
143  *	  @init.
144  * @walksize: Equal to the chunk size except in cases where the algorithm is
145  * 	      considerably more efficient if it can operate on multiple chunks
146  * 	      in parallel. Should be a multiple of chunksize.
147  * @co: see struct skcipher_alg_common
148  *
149  * All fields except @ivsize are mandatory and must be filled.
150  */
151 struct skcipher_alg {
152 	int (*setkey)(struct crypto_skcipher *tfm, const u8 *key,
153 	              unsigned int keylen);
154 	int (*encrypt)(struct skcipher_request *req);
155 	int (*decrypt)(struct skcipher_request *req);
156 	int (*export)(struct skcipher_request *req, void *out);
157 	int (*import)(struct skcipher_request *req, const void *in);
158 	int (*init)(struct crypto_skcipher *tfm);
159 	void (*exit)(struct crypto_skcipher *tfm);
160 
161 	unsigned int walksize;
162 
163 	union {
164 		struct SKCIPHER_ALG_COMMON;
165 		struct skcipher_alg_common co;
166 	};
167 };
168 
169 /**
170  * struct lskcipher_alg - linear symmetric key cipher definition
171  * @setkey: Set key for the transformation. This function is used to either
172  *	    program a supplied key into the hardware or store the key in the
173  *	    transformation context for programming it later. Note that this
174  *	    function does modify the transformation context. This function can
175  *	    be called multiple times during the existence of the transformation
176  *	    object, so one must make sure the key is properly reprogrammed into
177  *	    the hardware. This function is also responsible for checking the key
178  *	    length for validity. In case a software fallback was put in place in
179  *	    the @cra_init call, this function might need to use the fallback if
180  *	    the algorithm doesn't support all of the key sizes.
181  * @encrypt: Encrypt a number of bytes. This function is used to encrypt
182  *	     the supplied data.  This function shall not modify
183  *	     the transformation context, as this function may be called
184  *	     in parallel with the same transformation object.  Data
185  *	     may be left over if length is not a multiple of blocks
186  *	     and there is more to come (final == false).  The number of
187  *	     left-over bytes should be returned in case of success.
188  *	     The siv field shall be as long as ivsize + statesize with
189  *	     the IV placed at the front.  The state will be used by the
190  *	     algorithm internally.
191  * @decrypt: Decrypt a number of bytes. This is a reverse counterpart to
192  *	     @encrypt and the conditions are exactly the same.
193  * @init: Initialize the cryptographic transformation object. This function
194  *	  is used to initialize the cryptographic transformation object.
195  *	  This function is called only once at the instantiation time, right
196  *	  after the transformation context was allocated.
197  * @exit: Deinitialize the cryptographic transformation object. This is a
198  *	  counterpart to @init, used to remove various changes set in
199  *	  @init.
200  * @co: see struct skcipher_alg_common
201  */
202 struct lskcipher_alg {
203 	int (*setkey)(struct crypto_lskcipher *tfm, const u8 *key,
204 	              unsigned int keylen);
205 	int (*encrypt)(struct crypto_lskcipher *tfm, const u8 *src,
206 		       u8 *dst, unsigned len, u8 *siv, u32 flags);
207 	int (*decrypt)(struct crypto_lskcipher *tfm, const u8 *src,
208 		       u8 *dst, unsigned len, u8 *siv, u32 flags);
209 	int (*init)(struct crypto_lskcipher *tfm);
210 	void (*exit)(struct crypto_lskcipher *tfm);
211 
212 	struct skcipher_alg_common co;
213 };
214 
215 #define MAX_SYNC_SKCIPHER_REQSIZE      384
216 /*
217  * This performs a type-check against the "_tfm" argument to make sure
218  * all users have the correct skcipher tfm for doing on-stack requests.
219  */
220 #define SYNC_SKCIPHER_REQUEST_ON_STACK(name, _tfm) \
221 	char __##name##_desc[sizeof(struct skcipher_request) + \
222 			     MAX_SYNC_SKCIPHER_REQSIZE \
223 			    ] CRYPTO_MINALIGN_ATTR; \
224 	struct skcipher_request *name = \
225 		(((struct skcipher_request *)__##name##_desc)->base.tfm = \
226 			crypto_sync_skcipher_tfm((_tfm)), \
227 		 (void *)__##name##_desc)
228 
229 /**
230  * DOC: Symmetric Key Cipher API
231  *
232  * Symmetric key cipher API is used with the ciphers of type
233  * CRYPTO_ALG_TYPE_SKCIPHER (listed as type "skcipher" in /proc/crypto).
234  *
235  * Asynchronous cipher operations imply that the function invocation for a
236  * cipher request returns immediately before the completion of the operation.
237  * The cipher request is scheduled as a separate kernel thread and therefore
238  * load-balanced on the different CPUs via the process scheduler. To allow
239  * the kernel crypto API to inform the caller about the completion of a cipher
240  * request, the caller must provide a callback function. That function is
241  * invoked with the cipher handle when the request completes.
242  *
243  * To support the asynchronous operation, additional information than just the
244  * cipher handle must be supplied to the kernel crypto API. That additional
245  * information is given by filling in the skcipher_request data structure.
246  *
247  * For the symmetric key cipher API, the state is maintained with the tfm
248  * cipher handle. A single tfm can be used across multiple calls and in
249  * parallel. For asynchronous block cipher calls, context data supplied and
250  * only used by the caller can be referenced the request data structure in
251  * addition to the IV used for the cipher request. The maintenance of such
252  * state information would be important for a crypto driver implementer to
253  * have, because when calling the callback function upon completion of the
254  * cipher operation, that callback function may need some information about
255  * which operation just finished if it invoked multiple in parallel. This
256  * state information is unused by the kernel crypto API.
257  */
258 
259 static inline struct crypto_skcipher *__crypto_skcipher_cast(
260 	struct crypto_tfm *tfm)
261 {
262 	return container_of(tfm, struct crypto_skcipher, base);
263 }
264 
265 /**
266  * crypto_alloc_skcipher() - allocate symmetric key cipher handle
267  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
268  *	      skcipher cipher
269  * @type: specifies the type of the cipher
270  * @mask: specifies the mask for the cipher
271  *
272  * Allocate a cipher handle for an skcipher. The returned struct
273  * crypto_skcipher is the cipher handle that is required for any subsequent
274  * API invocation for that skcipher.
275  *
276  * Return: allocated cipher handle in case of success; IS_ERR() is true in case
277  *	   of an error, PTR_ERR() returns the error code.
278  */
279 struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name,
280 					      u32 type, u32 mask);
281 
282 struct crypto_sync_skcipher *crypto_alloc_sync_skcipher(const char *alg_name,
283 					      u32 type, u32 mask);
284 
285 
286 /**
287  * crypto_alloc_lskcipher() - allocate linear symmetric key cipher handle
288  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
289  *	      lskcipher
290  * @type: specifies the type of the cipher
291  * @mask: specifies the mask for the cipher
292  *
293  * Allocate a cipher handle for an lskcipher. The returned struct
294  * crypto_lskcipher is the cipher handle that is required for any subsequent
295  * API invocation for that lskcipher.
296  *
297  * Return: allocated cipher handle in case of success; IS_ERR() is true in case
298  *	   of an error, PTR_ERR() returns the error code.
299  */
300 struct crypto_lskcipher *crypto_alloc_lskcipher(const char *alg_name,
301 						u32 type, u32 mask);
302 
303 static inline struct crypto_tfm *crypto_skcipher_tfm(
304 	struct crypto_skcipher *tfm)
305 {
306 	return &tfm->base;
307 }
308 
309 static inline struct crypto_tfm *crypto_lskcipher_tfm(
310 	struct crypto_lskcipher *tfm)
311 {
312 	return &tfm->base;
313 }
314 
315 static inline struct crypto_tfm *crypto_sync_skcipher_tfm(
316 	struct crypto_sync_skcipher *tfm)
317 {
318 	return crypto_skcipher_tfm(&tfm->base);
319 }
320 
321 /**
322  * crypto_free_skcipher() - zeroize and free cipher handle
323  * @tfm: cipher handle to be freed
324  *
325  * If @tfm is a NULL or error pointer, this function does nothing.
326  */
327 static inline void crypto_free_skcipher(struct crypto_skcipher *tfm)
328 {
329 	crypto_destroy_tfm(tfm, crypto_skcipher_tfm(tfm));
330 }
331 
332 static inline void crypto_free_sync_skcipher(struct crypto_sync_skcipher *tfm)
333 {
334 	crypto_free_skcipher(&tfm->base);
335 }
336 
337 /**
338  * crypto_free_lskcipher() - zeroize and free cipher handle
339  * @tfm: cipher handle to be freed
340  *
341  * If @tfm is a NULL or error pointer, this function does nothing.
342  */
343 static inline void crypto_free_lskcipher(struct crypto_lskcipher *tfm)
344 {
345 	crypto_destroy_tfm(tfm, crypto_lskcipher_tfm(tfm));
346 }
347 
348 /**
349  * crypto_has_skcipher() - Search for the availability of an skcipher.
350  * @alg_name: is the cra_name / name or cra_driver_name / driver name of the
351  *	      skcipher
352  * @type: specifies the type of the skcipher
353  * @mask: specifies the mask for the skcipher
354  *
355  * Return: true when the skcipher is known to the kernel crypto API; false
356  *	   otherwise
357  */
358 int crypto_has_skcipher(const char *alg_name, u32 type, u32 mask);
359 
360 static inline const char *crypto_skcipher_driver_name(
361 	struct crypto_skcipher *tfm)
362 {
363 	return crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
364 }
365 
366 static inline const char *crypto_lskcipher_driver_name(
367 	struct crypto_lskcipher *tfm)
368 {
369 	return crypto_tfm_alg_driver_name(crypto_lskcipher_tfm(tfm));
370 }
371 
372 static inline struct skcipher_alg_common *crypto_skcipher_alg_common(
373 	struct crypto_skcipher *tfm)
374 {
375 	return container_of(crypto_skcipher_tfm(tfm)->__crt_alg,
376 			    struct skcipher_alg_common, base);
377 }
378 
379 static inline struct skcipher_alg *crypto_skcipher_alg(
380 	struct crypto_skcipher *tfm)
381 {
382 	return container_of(crypto_skcipher_tfm(tfm)->__crt_alg,
383 			    struct skcipher_alg, base);
384 }
385 
386 static inline struct lskcipher_alg *crypto_lskcipher_alg(
387 	struct crypto_lskcipher *tfm)
388 {
389 	return container_of(crypto_lskcipher_tfm(tfm)->__crt_alg,
390 			    struct lskcipher_alg, co.base);
391 }
392 
393 /**
394  * crypto_skcipher_ivsize() - obtain IV size
395  * @tfm: cipher handle
396  *
397  * The size of the IV for the skcipher referenced by the cipher handle is
398  * returned. This IV size may be zero if the cipher does not need an IV.
399  *
400  * Return: IV size in bytes
401  */
402 static inline unsigned int crypto_skcipher_ivsize(struct crypto_skcipher *tfm)
403 {
404 	return crypto_skcipher_alg_common(tfm)->ivsize;
405 }
406 
407 static inline unsigned int crypto_sync_skcipher_ivsize(
408 	struct crypto_sync_skcipher *tfm)
409 {
410 	return crypto_skcipher_ivsize(&tfm->base);
411 }
412 
413 /**
414  * crypto_lskcipher_ivsize() - obtain IV size
415  * @tfm: cipher handle
416  *
417  * The size of the IV for the lskcipher referenced by the cipher handle is
418  * returned. This IV size may be zero if the cipher does not need an IV.
419  *
420  * Return: IV size in bytes
421  */
422 static inline unsigned int crypto_lskcipher_ivsize(
423 	struct crypto_lskcipher *tfm)
424 {
425 	return crypto_lskcipher_alg(tfm)->co.ivsize;
426 }
427 
428 /**
429  * crypto_skcipher_blocksize() - obtain block size of cipher
430  * @tfm: cipher handle
431  *
432  * The block size for the skcipher referenced with the cipher handle is
433  * returned. The caller may use that information to allocate appropriate
434  * memory for the data returned by the encryption or decryption operation
435  *
436  * Return: block size of cipher
437  */
438 static inline unsigned int crypto_skcipher_blocksize(
439 	struct crypto_skcipher *tfm)
440 {
441 	return crypto_tfm_alg_blocksize(crypto_skcipher_tfm(tfm));
442 }
443 
444 /**
445  * crypto_lskcipher_blocksize() - obtain block size of cipher
446  * @tfm: cipher handle
447  *
448  * The block size for the lskcipher referenced with the cipher handle is
449  * returned. The caller may use that information to allocate appropriate
450  * memory for the data returned by the encryption or decryption operation
451  *
452  * Return: block size of cipher
453  */
454 static inline unsigned int crypto_lskcipher_blocksize(
455 	struct crypto_lskcipher *tfm)
456 {
457 	return crypto_tfm_alg_blocksize(crypto_lskcipher_tfm(tfm));
458 }
459 
460 /**
461  * crypto_skcipher_chunksize() - obtain chunk size
462  * @tfm: cipher handle
463  *
464  * The block size is set to one for ciphers such as CTR.  However,
465  * you still need to provide incremental updates in multiples of
466  * the underlying block size as the IV does not have sub-block
467  * granularity.  This is known in this API as the chunk size.
468  *
469  * Return: chunk size in bytes
470  */
471 static inline unsigned int crypto_skcipher_chunksize(
472 	struct crypto_skcipher *tfm)
473 {
474 	return crypto_skcipher_alg_common(tfm)->chunksize;
475 }
476 
477 /**
478  * crypto_lskcipher_chunksize() - obtain chunk size
479  * @tfm: cipher handle
480  *
481  * The block size is set to one for ciphers such as CTR.  However,
482  * you still need to provide incremental updates in multiples of
483  * the underlying block size as the IV does not have sub-block
484  * granularity.  This is known in this API as the chunk size.
485  *
486  * Return: chunk size in bytes
487  */
488 static inline unsigned int crypto_lskcipher_chunksize(
489 	struct crypto_lskcipher *tfm)
490 {
491 	return crypto_lskcipher_alg(tfm)->co.chunksize;
492 }
493 
494 /**
495  * crypto_skcipher_statesize() - obtain state size
496  * @tfm: cipher handle
497  *
498  * Some algorithms cannot be chained with the IV alone.  They carry
499  * internal state which must be replicated if data is to be processed
500  * incrementally.  The size of that state can be obtained with this
501  * function.
502  *
503  * Return: state size in bytes
504  */
505 static inline unsigned int crypto_skcipher_statesize(
506 	struct crypto_skcipher *tfm)
507 {
508 	return crypto_skcipher_alg_common(tfm)->statesize;
509 }
510 
511 /**
512  * crypto_lskcipher_statesize() - obtain state size
513  * @tfm: cipher handle
514  *
515  * Some algorithms cannot be chained with the IV alone.  They carry
516  * internal state which must be replicated if data is to be processed
517  * incrementally.  The size of that state can be obtained with this
518  * function.
519  *
520  * Return: state size in bytes
521  */
522 static inline unsigned int crypto_lskcipher_statesize(
523 	struct crypto_lskcipher *tfm)
524 {
525 	return crypto_lskcipher_alg(tfm)->co.statesize;
526 }
527 
528 static inline unsigned int crypto_sync_skcipher_blocksize(
529 	struct crypto_sync_skcipher *tfm)
530 {
531 	return crypto_skcipher_blocksize(&tfm->base);
532 }
533 
534 static inline unsigned int crypto_skcipher_alignmask(
535 	struct crypto_skcipher *tfm)
536 {
537 	return crypto_tfm_alg_alignmask(crypto_skcipher_tfm(tfm));
538 }
539 
540 static inline unsigned int crypto_lskcipher_alignmask(
541 	struct crypto_lskcipher *tfm)
542 {
543 	return crypto_tfm_alg_alignmask(crypto_lskcipher_tfm(tfm));
544 }
545 
546 static inline u32 crypto_skcipher_get_flags(struct crypto_skcipher *tfm)
547 {
548 	return crypto_tfm_get_flags(crypto_skcipher_tfm(tfm));
549 }
550 
551 static inline void crypto_skcipher_set_flags(struct crypto_skcipher *tfm,
552 					       u32 flags)
553 {
554 	crypto_tfm_set_flags(crypto_skcipher_tfm(tfm), flags);
555 }
556 
557 static inline void crypto_skcipher_clear_flags(struct crypto_skcipher *tfm,
558 						 u32 flags)
559 {
560 	crypto_tfm_clear_flags(crypto_skcipher_tfm(tfm), flags);
561 }
562 
563 static inline u32 crypto_sync_skcipher_get_flags(
564 	struct crypto_sync_skcipher *tfm)
565 {
566 	return crypto_skcipher_get_flags(&tfm->base);
567 }
568 
569 static inline void crypto_sync_skcipher_set_flags(
570 	struct crypto_sync_skcipher *tfm, u32 flags)
571 {
572 	crypto_skcipher_set_flags(&tfm->base, flags);
573 }
574 
575 static inline void crypto_sync_skcipher_clear_flags(
576 	struct crypto_sync_skcipher *tfm, u32 flags)
577 {
578 	crypto_skcipher_clear_flags(&tfm->base, flags);
579 }
580 
581 static inline u32 crypto_lskcipher_get_flags(struct crypto_lskcipher *tfm)
582 {
583 	return crypto_tfm_get_flags(crypto_lskcipher_tfm(tfm));
584 }
585 
586 static inline void crypto_lskcipher_set_flags(struct crypto_lskcipher *tfm,
587 					       u32 flags)
588 {
589 	crypto_tfm_set_flags(crypto_lskcipher_tfm(tfm), flags);
590 }
591 
592 static inline void crypto_lskcipher_clear_flags(struct crypto_lskcipher *tfm,
593 						 u32 flags)
594 {
595 	crypto_tfm_clear_flags(crypto_lskcipher_tfm(tfm), flags);
596 }
597 
598 /**
599  * crypto_skcipher_setkey() - set key for cipher
600  * @tfm: cipher handle
601  * @key: buffer holding the key
602  * @keylen: length of the key in bytes
603  *
604  * The caller provided key is set for the skcipher referenced by the cipher
605  * handle.
606  *
607  * Note, the key length determines the cipher type. Many block ciphers implement
608  * different cipher modes depending on the key size, such as AES-128 vs AES-192
609  * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
610  * is performed.
611  *
612  * Return: 0 if the setting of the key was successful; < 0 if an error occurred
613  */
614 int crypto_skcipher_setkey(struct crypto_skcipher *tfm,
615 			   const u8 *key, unsigned int keylen);
616 
617 static inline int crypto_sync_skcipher_setkey(struct crypto_sync_skcipher *tfm,
618 					 const u8 *key, unsigned int keylen)
619 {
620 	return crypto_skcipher_setkey(&tfm->base, key, keylen);
621 }
622 
623 /**
624  * crypto_lskcipher_setkey() - set key for cipher
625  * @tfm: cipher handle
626  * @key: buffer holding the key
627  * @keylen: length of the key in bytes
628  *
629  * The caller provided key is set for the lskcipher referenced by the cipher
630  * handle.
631  *
632  * Note, the key length determines the cipher type. Many block ciphers implement
633  * different cipher modes depending on the key size, such as AES-128 vs AES-192
634  * vs. AES-256. When providing a 16 byte key for an AES cipher handle, AES-128
635  * is performed.
636  *
637  * Return: 0 if the setting of the key was successful; < 0 if an error occurred
638  */
639 int crypto_lskcipher_setkey(struct crypto_lskcipher *tfm,
640 			    const u8 *key, unsigned int keylen);
641 
642 static inline unsigned int crypto_skcipher_min_keysize(
643 	struct crypto_skcipher *tfm)
644 {
645 	return crypto_skcipher_alg_common(tfm)->min_keysize;
646 }
647 
648 static inline unsigned int crypto_skcipher_max_keysize(
649 	struct crypto_skcipher *tfm)
650 {
651 	return crypto_skcipher_alg_common(tfm)->max_keysize;
652 }
653 
654 static inline unsigned int crypto_lskcipher_min_keysize(
655 	struct crypto_lskcipher *tfm)
656 {
657 	return crypto_lskcipher_alg(tfm)->co.min_keysize;
658 }
659 
660 static inline unsigned int crypto_lskcipher_max_keysize(
661 	struct crypto_lskcipher *tfm)
662 {
663 	return crypto_lskcipher_alg(tfm)->co.max_keysize;
664 }
665 
666 /**
667  * crypto_skcipher_reqtfm() - obtain cipher handle from request
668  * @req: skcipher_request out of which the cipher handle is to be obtained
669  *
670  * Return the crypto_skcipher handle when furnishing an skcipher_request
671  * data structure.
672  *
673  * Return: crypto_skcipher handle
674  */
675 static inline struct crypto_skcipher *crypto_skcipher_reqtfm(
676 	struct skcipher_request *req)
677 {
678 	return __crypto_skcipher_cast(req->base.tfm);
679 }
680 
681 static inline struct crypto_sync_skcipher *crypto_sync_skcipher_reqtfm(
682 	struct skcipher_request *req)
683 {
684 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
685 
686 	return container_of(tfm, struct crypto_sync_skcipher, base);
687 }
688 
689 /**
690  * crypto_skcipher_encrypt() - encrypt plaintext
691  * @req: reference to the skcipher_request handle that holds all information
692  *	 needed to perform the cipher operation
693  *
694  * Encrypt plaintext data using the skcipher_request handle. That data
695  * structure and how it is filled with data is discussed with the
696  * skcipher_request_* functions.
697  *
698  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
699  */
700 int crypto_skcipher_encrypt(struct skcipher_request *req);
701 
702 /**
703  * crypto_skcipher_decrypt() - decrypt ciphertext
704  * @req: reference to the skcipher_request handle that holds all information
705  *	 needed to perform the cipher operation
706  *
707  * Decrypt ciphertext data using the skcipher_request handle. That data
708  * structure and how it is filled with data is discussed with the
709  * skcipher_request_* functions.
710  *
711  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
712  */
713 int crypto_skcipher_decrypt(struct skcipher_request *req);
714 
715 /**
716  * crypto_skcipher_export() - export partial state
717  * @req: reference to the skcipher_request handle that holds all information
718  *	 needed to perform the operation
719  * @out: output buffer of sufficient size that can hold the state
720  *
721  * Export partial state of the transformation. This function dumps the
722  * entire state of the ongoing transformation into a provided block of
723  * data so it can be @import 'ed back later on. This is useful in case
724  * you want to save partial result of the transformation after
725  * processing certain amount of data and reload this partial result
726  * multiple times later on for multiple re-use. No data processing
727  * happens at this point.
728  *
729  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
730  */
731 int crypto_skcipher_export(struct skcipher_request *req, void *out);
732 
733 /**
734  * crypto_skcipher_import() - import partial state
735  * @req: reference to the skcipher_request handle that holds all information
736  *	 needed to perform the operation
737  * @in: buffer holding the state
738  *
739  * Import partial state of the transformation. This function loads the
740  * entire state of the ongoing transformation from a provided block of
741  * data so the transformation can continue from this point onward. No
742  * data processing happens at this point.
743  *
744  * Return: 0 if the cipher operation was successful; < 0 if an error occurred
745  */
746 int crypto_skcipher_import(struct skcipher_request *req, const void *in);
747 
748 /**
749  * crypto_lskcipher_encrypt() - encrypt plaintext
750  * @tfm: lskcipher handle
751  * @src: source buffer
752  * @dst: destination buffer
753  * @len: number of bytes to process
754  * @siv: IV + state for the cipher operation.  The length of the IV must
755  *	 comply with the IV size defined by crypto_lskcipher_ivsize.  The
756  *	 IV is then followed with a buffer with the length as specified by
757  *	 crypto_lskcipher_statesize.
758  * Encrypt plaintext data using the lskcipher handle.
759  *
760  * Return: >=0 if the cipher operation was successful, if positive
761  *	   then this many bytes have been left unprocessed;
762  *	   < 0 if an error occurred
763  */
764 int crypto_lskcipher_encrypt(struct crypto_lskcipher *tfm, const u8 *src,
765 			     u8 *dst, unsigned len, u8 *siv);
766 
767 /**
768  * crypto_lskcipher_decrypt() - decrypt ciphertext
769  * @tfm: lskcipher handle
770  * @src: source buffer
771  * @dst: destination buffer
772  * @len: number of bytes to process
773  * @siv: IV + state for the cipher operation.  The length of the IV must
774  *	 comply with the IV size defined by crypto_lskcipher_ivsize.  The
775  *	 IV is then followed with a buffer with the length as specified by
776  *	 crypto_lskcipher_statesize.
777  *
778  * Decrypt ciphertext data using the lskcipher handle.
779  *
780  * Return: >=0 if the cipher operation was successful, if positive
781  *	   then this many bytes have been left unprocessed;
782  *	   < 0 if an error occurred
783  */
784 int crypto_lskcipher_decrypt(struct crypto_lskcipher *tfm, const u8 *src,
785 			     u8 *dst, unsigned len, u8 *siv);
786 
787 /**
788  * DOC: Symmetric Key Cipher Request Handle
789  *
790  * The skcipher_request data structure contains all pointers to data
791  * required for the symmetric key cipher operation. This includes the cipher
792  * handle (which can be used by multiple skcipher_request instances), pointer
793  * to plaintext and ciphertext, asynchronous callback function, etc. It acts
794  * as a handle to the skcipher_request_* API calls in a similar way as
795  * skcipher handle to the crypto_skcipher_* API calls.
796  */
797 
798 /**
799  * crypto_skcipher_reqsize() - obtain size of the request data structure
800  * @tfm: cipher handle
801  *
802  * Return: number of bytes
803  */
804 static inline unsigned int crypto_skcipher_reqsize(struct crypto_skcipher *tfm)
805 {
806 	return tfm->reqsize;
807 }
808 
809 /**
810  * skcipher_request_set_tfm() - update cipher handle reference in request
811  * @req: request handle to be modified
812  * @tfm: cipher handle that shall be added to the request handle
813  *
814  * Allow the caller to replace the existing skcipher handle in the request
815  * data structure with a different one.
816  */
817 static inline void skcipher_request_set_tfm(struct skcipher_request *req,
818 					    struct crypto_skcipher *tfm)
819 {
820 	req->base.tfm = crypto_skcipher_tfm(tfm);
821 }
822 
823 static inline void skcipher_request_set_sync_tfm(struct skcipher_request *req,
824 					    struct crypto_sync_skcipher *tfm)
825 {
826 	skcipher_request_set_tfm(req, &tfm->base);
827 }
828 
829 static inline struct skcipher_request *skcipher_request_cast(
830 	struct crypto_async_request *req)
831 {
832 	return container_of(req, struct skcipher_request, base);
833 }
834 
835 /**
836  * skcipher_request_alloc() - allocate request data structure
837  * @tfm: cipher handle to be registered with the request
838  * @gfp: memory allocation flag that is handed to kmalloc by the API call.
839  *
840  * Allocate the request data structure that must be used with the skcipher
841  * encrypt and decrypt API calls. During the allocation, the provided skcipher
842  * handle is registered in the request data structure.
843  *
844  * Return: allocated request handle in case of success, or NULL if out of memory
845  */
846 static inline struct skcipher_request *skcipher_request_alloc_noprof(
847 	struct crypto_skcipher *tfm, gfp_t gfp)
848 {
849 	struct skcipher_request *req;
850 
851 	req = kmalloc_noprof(sizeof(struct skcipher_request) +
852 			     crypto_skcipher_reqsize(tfm), gfp);
853 
854 	if (likely(req))
855 		skcipher_request_set_tfm(req, tfm);
856 
857 	return req;
858 }
859 #define skcipher_request_alloc(...)	alloc_hooks(skcipher_request_alloc_noprof(__VA_ARGS__))
860 
861 /**
862  * skcipher_request_free() - zeroize and free request data structure
863  * @req: request data structure cipher handle to be freed
864  */
865 static inline void skcipher_request_free(struct skcipher_request *req)
866 {
867 	kfree_sensitive(req);
868 }
869 
870 static inline void skcipher_request_zero(struct skcipher_request *req)
871 {
872 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
873 
874 	memzero_explicit(req, sizeof(*req) + crypto_skcipher_reqsize(tfm));
875 }
876 
877 /**
878  * skcipher_request_set_callback() - set asynchronous callback function
879  * @req: request handle
880  * @flags: specify zero or an ORing of the flags
881  *	   CRYPTO_TFM_REQ_MAY_BACKLOG the request queue may back log and
882  *	   increase the wait queue beyond the initial maximum size;
883  *	   CRYPTO_TFM_REQ_MAY_SLEEP the request processing may sleep
884  * @compl: callback function pointer to be registered with the request handle
885  * @data: The data pointer refers to memory that is not used by the kernel
886  *	  crypto API, but provided to the callback function for it to use. Here,
887  *	  the caller can provide a reference to memory the callback function can
888  *	  operate on. As the callback function is invoked asynchronously to the
889  *	  related functionality, it may need to access data structures of the
890  *	  related functionality which can be referenced using this pointer. The
891  *	  callback function can access the memory via the "data" field in the
892  *	  crypto_async_request data structure provided to the callback function.
893  *
894  * This function allows setting the callback function that is triggered once the
895  * cipher operation completes.
896  *
897  * The callback function is registered with the skcipher_request handle and
898  * must comply with the following template::
899  *
900  *	void callback_function(struct crypto_async_request *req, int error)
901  */
902 static inline void skcipher_request_set_callback(struct skcipher_request *req,
903 						 u32 flags,
904 						 crypto_completion_t compl,
905 						 void *data)
906 {
907 	req->base.complete = compl;
908 	req->base.data = data;
909 	req->base.flags = flags;
910 }
911 
912 /**
913  * skcipher_request_set_crypt() - set data buffers
914  * @req: request handle
915  * @src: source scatter / gather list
916  * @dst: destination scatter / gather list
917  * @cryptlen: number of bytes to process from @src
918  * @iv: IV for the cipher operation which must comply with the IV size defined
919  *      by crypto_skcipher_ivsize
920  *
921  * This function allows setting of the source data and destination data
922  * scatter / gather lists.
923  *
924  * For encryption, the source is treated as the plaintext and the
925  * destination is the ciphertext. For a decryption operation, the use is
926  * reversed - the source is the ciphertext and the destination is the plaintext.
927  */
928 static inline void skcipher_request_set_crypt(
929 	struct skcipher_request *req,
930 	struct scatterlist *src, struct scatterlist *dst,
931 	unsigned int cryptlen, void *iv)
932 {
933 	req->src = src;
934 	req->dst = dst;
935 	req->cryptlen = cryptlen;
936 	req->iv = iv;
937 }
938 
939 #endif	/* _CRYPTO_SKCIPHER_H */
940 
941