1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * SM4-CCM AEAD Algorithm using ARMv8 Crypto Extensions
4  * as specified in rfc8998
5  * https://datatracker.ietf.org/doc/html/rfc8998
6  *
7  * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
8  */
9 
10 #include <linux/module.h>
11 #include <linux/crypto.h>
12 #include <linux/kernel.h>
13 #include <linux/cpufeature.h>
14 #include <asm/neon.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/sm4.h>
19 #include "sm4-ce.h"
20 
21 asmlinkage void sm4_ce_cbcmac_update(const u32 *rkey_enc, u8 *mac,
22 				     const u8 *src, unsigned int nblocks);
23 asmlinkage void sm4_ce_ccm_enc(const u32 *rkey_enc, u8 *dst, const u8 *src,
24 			       u8 *iv, unsigned int nbytes, u8 *mac);
25 asmlinkage void sm4_ce_ccm_dec(const u32 *rkey_enc, u8 *dst, const u8 *src,
26 			       u8 *iv, unsigned int nbytes, u8 *mac);
27 asmlinkage void sm4_ce_ccm_final(const u32 *rkey_enc, u8 *iv, u8 *mac);
28 
29 
ccm_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int key_len)30 static int ccm_setkey(struct crypto_aead *tfm, const u8 *key,
31 		      unsigned int key_len)
32 {
33 	struct sm4_ctx *ctx = crypto_aead_ctx(tfm);
34 
35 	if (key_len != SM4_KEY_SIZE)
36 		return -EINVAL;
37 
38 	kernel_neon_begin();
39 	sm4_ce_expand_key(key, ctx->rkey_enc, ctx->rkey_dec,
40 			  crypto_sm4_fk, crypto_sm4_ck);
41 	kernel_neon_end();
42 
43 	return 0;
44 }
45 
ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)46 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
47 {
48 	if ((authsize & 1) || authsize < 4)
49 		return -EINVAL;
50 	return 0;
51 }
52 
ccm_format_input(u8 info[],struct aead_request * req,unsigned int msglen)53 static int ccm_format_input(u8 info[], struct aead_request *req,
54 			    unsigned int msglen)
55 {
56 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
57 	unsigned int l = req->iv[0] + 1;
58 	unsigned int m;
59 	__be32 len;
60 
61 	/* verify that CCM dimension 'L': 2 <= L <= 8 */
62 	if (l < 2 || l > 8)
63 		return -EINVAL;
64 	if (l < 4 && msglen >> (8 * l))
65 		return -EOVERFLOW;
66 
67 	memset(&req->iv[SM4_BLOCK_SIZE - l], 0, l);
68 
69 	memcpy(info, req->iv, SM4_BLOCK_SIZE);
70 
71 	m = crypto_aead_authsize(aead);
72 
73 	/* format flags field per RFC 3610/NIST 800-38C */
74 	*info |= ((m - 2) / 2) << 3;
75 	if (req->assoclen)
76 		*info |= (1 << 6);
77 
78 	/*
79 	 * format message length field,
80 	 * Linux uses a u32 type to represent msglen
81 	 */
82 	if (l >= 4)
83 		l = 4;
84 
85 	len = cpu_to_be32(msglen);
86 	memcpy(&info[SM4_BLOCK_SIZE - l], (u8 *)&len + 4 - l, l);
87 
88 	return 0;
89 }
90 
ccm_calculate_auth_mac(struct aead_request * req,u8 mac[])91 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
92 {
93 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
94 	struct sm4_ctx *ctx = crypto_aead_ctx(aead);
95 	struct __packed { __be16 l; __be32 h; } aadlen;
96 	u32 assoclen = req->assoclen;
97 	struct scatter_walk walk;
98 	unsigned int len;
99 
100 	if (assoclen < 0xff00) {
101 		aadlen.l = cpu_to_be16(assoclen);
102 		len = 2;
103 	} else {
104 		aadlen.l = cpu_to_be16(0xfffe);
105 		put_unaligned_be32(assoclen, &aadlen.h);
106 		len = 6;
107 	}
108 
109 	sm4_ce_crypt_block(ctx->rkey_enc, mac, mac);
110 	crypto_xor(mac, (const u8 *)&aadlen, len);
111 
112 	scatterwalk_start(&walk, req->src);
113 
114 	do {
115 		unsigned int n, orig_n;
116 		const u8 *p;
117 
118 		orig_n = scatterwalk_next(&walk, assoclen);
119 		p = walk.addr;
120 		n = orig_n;
121 
122 		while (n > 0) {
123 			unsigned int l, nblocks;
124 
125 			if (len == SM4_BLOCK_SIZE) {
126 				if (n < SM4_BLOCK_SIZE) {
127 					sm4_ce_crypt_block(ctx->rkey_enc,
128 							   mac, mac);
129 
130 					len = 0;
131 				} else {
132 					nblocks = n / SM4_BLOCK_SIZE;
133 					sm4_ce_cbcmac_update(ctx->rkey_enc,
134 							     mac, p, nblocks);
135 
136 					p += nblocks * SM4_BLOCK_SIZE;
137 					n %= SM4_BLOCK_SIZE;
138 
139 					continue;
140 				}
141 			}
142 
143 			l = min(n, SM4_BLOCK_SIZE - len);
144 			if (l) {
145 				crypto_xor(mac + len, p, l);
146 				len += l;
147 				p += l;
148 				n -= l;
149 			}
150 		}
151 
152 		scatterwalk_done_src(&walk, orig_n);
153 		assoclen -= orig_n;
154 	} while (assoclen);
155 }
156 
ccm_crypt(struct aead_request * req,struct skcipher_walk * walk,u32 * rkey_enc,u8 mac[],void (* sm4_ce_ccm_crypt)(const u32 * rkey_enc,u8 * dst,const u8 * src,u8 * iv,unsigned int nbytes,u8 * mac))157 static int ccm_crypt(struct aead_request *req, struct skcipher_walk *walk,
158 		     u32 *rkey_enc, u8 mac[],
159 		     void (*sm4_ce_ccm_crypt)(const u32 *rkey_enc, u8 *dst,
160 					const u8 *src, u8 *iv,
161 					unsigned int nbytes, u8 *mac))
162 {
163 	u8 __aligned(8) ctr0[SM4_BLOCK_SIZE];
164 	int err = 0;
165 
166 	/* preserve the initial ctr0 for the TAG */
167 	memcpy(ctr0, walk->iv, SM4_BLOCK_SIZE);
168 	crypto_inc(walk->iv, SM4_BLOCK_SIZE);
169 
170 	kernel_neon_begin();
171 
172 	if (req->assoclen)
173 		ccm_calculate_auth_mac(req, mac);
174 
175 	while (walk->nbytes && walk->nbytes != walk->total) {
176 		unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE;
177 
178 		sm4_ce_ccm_crypt(rkey_enc, walk->dst.virt.addr,
179 				 walk->src.virt.addr, walk->iv,
180 				 walk->nbytes - tail, mac);
181 
182 		kernel_neon_end();
183 
184 		err = skcipher_walk_done(walk, tail);
185 
186 		kernel_neon_begin();
187 	}
188 
189 	if (walk->nbytes) {
190 		sm4_ce_ccm_crypt(rkey_enc, walk->dst.virt.addr,
191 				 walk->src.virt.addr, walk->iv,
192 				 walk->nbytes, mac);
193 
194 		sm4_ce_ccm_final(rkey_enc, ctr0, mac);
195 
196 		kernel_neon_end();
197 
198 		err = skcipher_walk_done(walk, 0);
199 	} else {
200 		sm4_ce_ccm_final(rkey_enc, ctr0, mac);
201 
202 		kernel_neon_end();
203 	}
204 
205 	return err;
206 }
207 
ccm_encrypt(struct aead_request * req)208 static int ccm_encrypt(struct aead_request *req)
209 {
210 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
211 	struct sm4_ctx *ctx = crypto_aead_ctx(aead);
212 	u8 __aligned(8) mac[SM4_BLOCK_SIZE];
213 	struct skcipher_walk walk;
214 	int err;
215 
216 	err = ccm_format_input(mac, req, req->cryptlen);
217 	if (err)
218 		return err;
219 
220 	err = skcipher_walk_aead_encrypt(&walk, req, false);
221 	if (err)
222 		return err;
223 
224 	err = ccm_crypt(req, &walk, ctx->rkey_enc, mac, sm4_ce_ccm_enc);
225 	if (err)
226 		return err;
227 
228 	/* copy authtag to end of dst */
229 	scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
230 				 crypto_aead_authsize(aead), 1);
231 
232 	return 0;
233 }
234 
ccm_decrypt(struct aead_request * req)235 static int ccm_decrypt(struct aead_request *req)
236 {
237 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
238 	unsigned int authsize = crypto_aead_authsize(aead);
239 	struct sm4_ctx *ctx = crypto_aead_ctx(aead);
240 	u8 __aligned(8) mac[SM4_BLOCK_SIZE];
241 	u8 authtag[SM4_BLOCK_SIZE];
242 	struct skcipher_walk walk;
243 	int err;
244 
245 	err = ccm_format_input(mac, req, req->cryptlen - authsize);
246 	if (err)
247 		return err;
248 
249 	err = skcipher_walk_aead_decrypt(&walk, req, false);
250 	if (err)
251 		return err;
252 
253 	err = ccm_crypt(req, &walk, ctx->rkey_enc, mac, sm4_ce_ccm_dec);
254 	if (err)
255 		return err;
256 
257 	/* compare calculated auth tag with the stored one */
258 	scatterwalk_map_and_copy(authtag, req->src,
259 				 req->assoclen + req->cryptlen - authsize,
260 				 authsize, 0);
261 
262 	if (crypto_memneq(authtag, mac, authsize))
263 		return -EBADMSG;
264 
265 	return 0;
266 }
267 
268 static struct aead_alg sm4_ccm_alg = {
269 	.base = {
270 		.cra_name		= "ccm(sm4)",
271 		.cra_driver_name	= "ccm-sm4-ce",
272 		.cra_priority		= 400,
273 		.cra_blocksize		= 1,
274 		.cra_ctxsize		= sizeof(struct sm4_ctx),
275 		.cra_module		= THIS_MODULE,
276 	},
277 	.ivsize		= SM4_BLOCK_SIZE,
278 	.chunksize	= SM4_BLOCK_SIZE,
279 	.maxauthsize	= SM4_BLOCK_SIZE,
280 	.setkey		= ccm_setkey,
281 	.setauthsize	= ccm_setauthsize,
282 	.encrypt	= ccm_encrypt,
283 	.decrypt	= ccm_decrypt,
284 };
285 
sm4_ce_ccm_init(void)286 static int __init sm4_ce_ccm_init(void)
287 {
288 	return crypto_register_aead(&sm4_ccm_alg);
289 }
290 
sm4_ce_ccm_exit(void)291 static void __exit sm4_ce_ccm_exit(void)
292 {
293 	crypto_unregister_aead(&sm4_ccm_alg);
294 }
295 
296 module_cpu_feature_match(SM4, sm4_ce_ccm_init);
297 module_exit(sm4_ce_ccm_exit);
298 
299 MODULE_DESCRIPTION("Synchronous SM4 in CCM mode using ARMv8 Crypto Extensions");
300 MODULE_ALIAS_CRYPTO("ccm(sm4)");
301 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
302 MODULE_LICENSE("GPL v2");
303