1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Crypto API wrapper for the SHA-256 and SHA-224 library functions
4  *
5  * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
6  * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
7  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8  * SHA224 Support Copyright 2007 Intel Corporation <jonathan.lynch@intel.com>
9  */
10 #include <crypto/internal/hash.h>
11 #include <crypto/internal/sha2.h>
12 #include <linux/kernel.h>
13 #include <linux/module.h>
14 
15 const u8 sha224_zero_message_hash[SHA224_DIGEST_SIZE] = {
16 	0xd1, 0x4a, 0x02, 0x8c, 0x2a, 0x3a, 0x2b, 0xc9, 0x47,
17 	0x61, 0x02, 0xbb, 0x28, 0x82, 0x34, 0xc4, 0x15, 0xa2,
18 	0xb0, 0x1f, 0x82, 0x8e, 0xa6, 0x2a, 0xc5, 0xb3, 0xe4,
19 	0x2f
20 };
21 EXPORT_SYMBOL_GPL(sha224_zero_message_hash);
22 
23 const u8 sha256_zero_message_hash[SHA256_DIGEST_SIZE] = {
24 	0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14,
25 	0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24,
26 	0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c,
27 	0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52, 0xb8, 0x55
28 };
29 EXPORT_SYMBOL_GPL(sha256_zero_message_hash);
30 
31 static int crypto_sha256_init(struct shash_desc *desc)
32 {
33 	sha256_block_init(shash_desc_ctx(desc));
34 	return 0;
35 }
36 
37 static inline int crypto_sha256_update(struct shash_desc *desc, const u8 *data,
38 				       unsigned int len, bool force_generic)
39 {
40 	struct crypto_sha256_state *sctx = shash_desc_ctx(desc);
41 	int remain = len % SHA256_BLOCK_SIZE;
42 
43 	sctx->count += len - remain;
44 	sha256_choose_blocks(sctx->state, data, len / SHA256_BLOCK_SIZE,
45 			     force_generic, !force_generic);
46 	return remain;
47 }
48 
49 static int crypto_sha256_update_generic(struct shash_desc *desc, const u8 *data,
50 					unsigned int len)
51 {
52 	return crypto_sha256_update(desc, data, len, true);
53 }
54 
55 static int crypto_sha256_update_lib(struct shash_desc *desc, const u8 *data,
56 				    unsigned int len)
57 {
58 	sha256_update(shash_desc_ctx(desc), data, len);
59 	return 0;
60 }
61 
62 static int crypto_sha256_update_arch(struct shash_desc *desc, const u8 *data,
63 				     unsigned int len)
64 {
65 	return crypto_sha256_update(desc, data, len, false);
66 }
67 
68 static int crypto_sha256_final_lib(struct shash_desc *desc, u8 *out)
69 {
70 	sha256_final(shash_desc_ctx(desc), out);
71 	return 0;
72 }
73 
74 static __always_inline int crypto_sha256_finup(struct shash_desc *desc,
75 					       const u8 *data,
76 					       unsigned int len, u8 *out,
77 					       bool force_generic)
78 {
79 	struct crypto_sha256_state *sctx = shash_desc_ctx(desc);
80 	unsigned int remain = len;
81 	u8 *buf;
82 
83 	if (len >= SHA256_BLOCK_SIZE)
84 		remain = crypto_sha256_update(desc, data, len, force_generic);
85 	sctx->count += remain;
86 	buf = memcpy(sctx + 1, data + len - remain, remain);
87 	sha256_finup(sctx, buf, remain, out,
88 		     crypto_shash_digestsize(desc->tfm), force_generic,
89 		     !force_generic);
90 	return 0;
91 }
92 
93 static int crypto_sha256_finup_generic(struct shash_desc *desc, const u8 *data,
94 				       unsigned int len, u8 *out)
95 {
96 	return crypto_sha256_finup(desc, data, len, out, true);
97 }
98 
99 static int crypto_sha256_finup_arch(struct shash_desc *desc, const u8 *data,
100 				    unsigned int len, u8 *out)
101 {
102 	return crypto_sha256_finup(desc, data, len, out, false);
103 }
104 
105 static int crypto_sha256_digest_generic(struct shash_desc *desc, const u8 *data,
106 					unsigned int len, u8 *out)
107 {
108 	crypto_sha256_init(desc);
109 	return crypto_sha256_finup_generic(desc, data, len, out);
110 }
111 
112 static int crypto_sha256_digest_lib(struct shash_desc *desc, const u8 *data,
113 				    unsigned int len, u8 *out)
114 {
115 	sha256(data, len, out);
116 	return 0;
117 }
118 
119 static int crypto_sha256_digest_arch(struct shash_desc *desc, const u8 *data,
120 				     unsigned int len, u8 *out)
121 {
122 	crypto_sha256_init(desc);
123 	return crypto_sha256_finup_arch(desc, data, len, out);
124 }
125 
126 static int crypto_sha224_init(struct shash_desc *desc)
127 {
128 	sha224_block_init(shash_desc_ctx(desc));
129 	return 0;
130 }
131 
132 static int crypto_sha224_final_lib(struct shash_desc *desc, u8 *out)
133 {
134 	sha224_final(shash_desc_ctx(desc), out);
135 	return 0;
136 }
137 
138 static int crypto_sha256_import_lib(struct shash_desc *desc, const void *in)
139 {
140 	struct sha256_state *sctx = shash_desc_ctx(desc);
141 	const u8 *p = in;
142 
143 	memcpy(sctx, p, sizeof(*sctx));
144 	p += sizeof(*sctx);
145 	sctx->count += *p;
146 	return 0;
147 }
148 
149 static int crypto_sha256_export_lib(struct shash_desc *desc, void *out)
150 {
151 	struct sha256_state *sctx0 = shash_desc_ctx(desc);
152 	struct sha256_state sctx = *sctx0;
153 	unsigned int partial;
154 	u8 *p = out;
155 
156 	partial = sctx.count % SHA256_BLOCK_SIZE;
157 	sctx.count -= partial;
158 	memcpy(p, &sctx, sizeof(sctx));
159 	p += sizeof(sctx);
160 	*p = partial;
161 	return 0;
162 }
163 
164 static struct shash_alg algs[] = {
165 	{
166 		.base.cra_name		= "sha256",
167 		.base.cra_driver_name	= "sha256-generic",
168 		.base.cra_priority	= 100,
169 		.base.cra_flags		= CRYPTO_AHASH_ALG_BLOCK_ONLY |
170 					  CRYPTO_AHASH_ALG_FINUP_MAX,
171 		.base.cra_blocksize	= SHA256_BLOCK_SIZE,
172 		.base.cra_module	= THIS_MODULE,
173 		.digestsize		= SHA256_DIGEST_SIZE,
174 		.init			= crypto_sha256_init,
175 		.update			= crypto_sha256_update_generic,
176 		.finup			= crypto_sha256_finup_generic,
177 		.digest			= crypto_sha256_digest_generic,
178 		.descsize		= sizeof(struct crypto_sha256_state),
179 	},
180 	{
181 		.base.cra_name		= "sha224",
182 		.base.cra_driver_name	= "sha224-generic",
183 		.base.cra_priority	= 100,
184 		.base.cra_flags		= CRYPTO_AHASH_ALG_BLOCK_ONLY |
185 					  CRYPTO_AHASH_ALG_FINUP_MAX,
186 		.base.cra_blocksize	= SHA224_BLOCK_SIZE,
187 		.base.cra_module	= THIS_MODULE,
188 		.digestsize		= SHA224_DIGEST_SIZE,
189 		.init			= crypto_sha224_init,
190 		.update			= crypto_sha256_update_generic,
191 		.finup			= crypto_sha256_finup_generic,
192 		.descsize		= sizeof(struct crypto_sha256_state),
193 	},
194 	{
195 		.base.cra_name		= "sha256",
196 		.base.cra_driver_name	= "sha256-lib",
197 		.base.cra_blocksize	= SHA256_BLOCK_SIZE,
198 		.base.cra_module	= THIS_MODULE,
199 		.digestsize		= SHA256_DIGEST_SIZE,
200 		.init			= crypto_sha256_init,
201 		.update			= crypto_sha256_update_lib,
202 		.final			= crypto_sha256_final_lib,
203 		.digest			= crypto_sha256_digest_lib,
204 		.descsize		= sizeof(struct sha256_state),
205 		.statesize		= sizeof(struct crypto_sha256_state) +
206 					  SHA256_BLOCK_SIZE + 1,
207 		.import			= crypto_sha256_import_lib,
208 		.export			= crypto_sha256_export_lib,
209 	},
210 	{
211 		.base.cra_name		= "sha224",
212 		.base.cra_driver_name	= "sha224-lib",
213 		.base.cra_blocksize	= SHA224_BLOCK_SIZE,
214 		.base.cra_module	= THIS_MODULE,
215 		.digestsize		= SHA224_DIGEST_SIZE,
216 		.init			= crypto_sha224_init,
217 		.update			= crypto_sha256_update_lib,
218 		.final			= crypto_sha224_final_lib,
219 		.descsize		= sizeof(struct sha256_state),
220 		.statesize		= sizeof(struct crypto_sha256_state) +
221 					  SHA256_BLOCK_SIZE + 1,
222 		.import			= crypto_sha256_import_lib,
223 		.export			= crypto_sha256_export_lib,
224 	},
225 	{
226 		.base.cra_name		= "sha256",
227 		.base.cra_driver_name	= "sha256-" __stringify(ARCH),
228 		.base.cra_priority	= 300,
229 		.base.cra_flags		= CRYPTO_AHASH_ALG_BLOCK_ONLY |
230 					  CRYPTO_AHASH_ALG_FINUP_MAX,
231 		.base.cra_blocksize	= SHA256_BLOCK_SIZE,
232 		.base.cra_module	= THIS_MODULE,
233 		.digestsize		= SHA256_DIGEST_SIZE,
234 		.init			= crypto_sha256_init,
235 		.update			= crypto_sha256_update_arch,
236 		.finup			= crypto_sha256_finup_arch,
237 		.digest			= crypto_sha256_digest_arch,
238 		.descsize		= sizeof(struct crypto_sha256_state),
239 	},
240 	{
241 		.base.cra_name		= "sha224",
242 		.base.cra_driver_name	= "sha224-" __stringify(ARCH),
243 		.base.cra_priority	= 300,
244 		.base.cra_flags		= CRYPTO_AHASH_ALG_BLOCK_ONLY |
245 					  CRYPTO_AHASH_ALG_FINUP_MAX,
246 		.base.cra_blocksize	= SHA224_BLOCK_SIZE,
247 		.base.cra_module	= THIS_MODULE,
248 		.digestsize		= SHA224_DIGEST_SIZE,
249 		.init			= crypto_sha224_init,
250 		.update			= crypto_sha256_update_arch,
251 		.finup			= crypto_sha256_finup_arch,
252 		.descsize		= sizeof(struct crypto_sha256_state),
253 	},
254 };
255 
256 static unsigned int num_algs;
257 
258 static int __init crypto_sha256_mod_init(void)
259 {
260 	/* register the arch flavours only if they differ from generic */
261 	num_algs = ARRAY_SIZE(algs);
262 	BUILD_BUG_ON(ARRAY_SIZE(algs) <= 2);
263 	if (!sha256_is_arch_optimized())
264 		num_algs -= 2;
265 	return crypto_register_shashes(algs, ARRAY_SIZE(algs));
266 }
267 module_init(crypto_sha256_mod_init);
268 
269 static void __exit crypto_sha256_mod_exit(void)
270 {
271 	crypto_unregister_shashes(algs, num_algs);
272 }
273 module_exit(crypto_sha256_mod_exit);
274 
275 MODULE_LICENSE("GPL");
276 MODULE_DESCRIPTION("Crypto API wrapper for the SHA-256 and SHA-224 library functions");
277 
278 MODULE_ALIAS_CRYPTO("sha256");
279 MODULE_ALIAS_CRYPTO("sha256-generic");
280 MODULE_ALIAS_CRYPTO("sha256-" __stringify(ARCH));
281 MODULE_ALIAS_CRYPTO("sha224");
282 MODULE_ALIAS_CRYPTO("sha224-generic");
283 MODULE_ALIAS_CRYPTO("sha224-" __stringify(ARCH));
284