xref: /linux/drivers/crypto/inside-secure/safexcel_hash.c (revision ab93e0dd72c37d378dd936f031ffb83ff2bd87ce)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7 
8 #include <crypto/aes.h>
9 #include <crypto/hmac.h>
10 #include <crypto/md5.h>
11 #include <crypto/sha1.h>
12 #include <crypto/sha2.h>
13 #include <crypto/sha3.h>
14 #include <crypto/skcipher.h>
15 #include <crypto/sm3.h>
16 #include <crypto/internal/cipher.h>
17 #include <linux/device.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/dmapool.h>
20 
21 #include "safexcel.h"
22 
23 struct safexcel_ahash_ctx {
24 	struct safexcel_context base;
25 
26 	u32 alg;
27 	u8  key_sz;
28 	bool cbcmac;
29 	bool do_fallback;
30 	bool fb_init_done;
31 	bool fb_do_setkey;
32 
33 	struct crypto_aes_ctx *aes;
34 	struct crypto_ahash *fback;
35 	struct crypto_shash *shpre;
36 	struct shash_desc *shdesc;
37 };
38 
39 struct safexcel_ahash_req {
40 	bool last_req;
41 	bool finish;
42 	bool hmac;
43 	bool needs_inv;
44 	bool hmac_zlen;
45 	bool len_is_le;
46 	bool not_first;
47 	bool xcbcmac;
48 
49 	int nents;
50 	dma_addr_t result_dma;
51 
52 	u32 digest;
53 
54 	u8 state_sz;    /* expected state size, only set once */
55 	u8 block_sz;    /* block size, only set once */
56 	u8 digest_sz;   /* output digest size, only set once */
57 	__le32 state[SHA3_512_BLOCK_SIZE /
58 		     sizeof(__le32)] __aligned(sizeof(__le32));
59 
60 	u64 len;
61 	u64 processed;
62 
63 	u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
64 	dma_addr_t cache_dma;
65 	unsigned int cache_sz;
66 
67 	u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
68 };
69 
safexcel_queued_len(struct safexcel_ahash_req * req)70 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
71 {
72 	return req->len - req->processed;
73 }
74 
safexcel_hash_token(struct safexcel_command_desc * cdesc,u32 input_length,u32 result_length,bool cbcmac)75 static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
76 				u32 input_length, u32 result_length,
77 				bool cbcmac)
78 {
79 	struct safexcel_token *token =
80 		(struct safexcel_token *)cdesc->control_data.token;
81 
82 	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
83 	token[0].packet_length = input_length;
84 	token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
85 
86 	input_length &= 15;
87 	if (unlikely(cbcmac && input_length)) {
88 		token[0].stat =  0;
89 		token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
90 		token[1].packet_length = 16 - input_length;
91 		token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
92 		token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
93 	} else {
94 		token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
95 		eip197_noop_token(&token[1]);
96 	}
97 
98 	token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
99 	token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
100 			EIP197_TOKEN_STAT_LAST_PACKET;
101 	token[2].packet_length = result_length;
102 	token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
103 				EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
104 
105 	eip197_noop_token(&token[3]);
106 }
107 
safexcel_context_control(struct safexcel_ahash_ctx * ctx,struct safexcel_ahash_req * req,struct safexcel_command_desc * cdesc)108 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
109 				     struct safexcel_ahash_req *req,
110 				     struct safexcel_command_desc *cdesc)
111 {
112 	struct safexcel_crypto_priv *priv = ctx->base.priv;
113 	u64 count = 0;
114 
115 	cdesc->control_data.control0 = ctx->alg;
116 	cdesc->control_data.control1 = 0;
117 
118 	/*
119 	 * Copy the input digest if needed, and setup the context
120 	 * fields. Do this now as we need it to setup the first command
121 	 * descriptor.
122 	 */
123 	if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
124 		if (req->xcbcmac)
125 			memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
126 		else
127 			memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
128 
129 		if (!req->finish && req->xcbcmac)
130 			cdesc->control_data.control0 |=
131 				CONTEXT_CONTROL_DIGEST_XCM |
132 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
133 				CONTEXT_CONTROL_NO_FINISH_HASH |
134 				CONTEXT_CONTROL_SIZE(req->state_sz /
135 						     sizeof(u32));
136 		else
137 			cdesc->control_data.control0 |=
138 				CONTEXT_CONTROL_DIGEST_XCM |
139 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
140 				CONTEXT_CONTROL_SIZE(req->state_sz /
141 						     sizeof(u32));
142 		return;
143 	} else if (!req->processed) {
144 		/* First - and possibly only - block of basic hash only */
145 		if (req->finish)
146 			cdesc->control_data.control0 |= req->digest |
147 				CONTEXT_CONTROL_TYPE_HASH_OUT |
148 				CONTEXT_CONTROL_RESTART_HASH  |
149 				/* ensure its not 0! */
150 				CONTEXT_CONTROL_SIZE(1);
151 		else
152 			cdesc->control_data.control0 |= req->digest |
153 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
154 				CONTEXT_CONTROL_RESTART_HASH   |
155 				CONTEXT_CONTROL_NO_FINISH_HASH |
156 				/* ensure its not 0! */
157 				CONTEXT_CONTROL_SIZE(1);
158 		return;
159 	}
160 
161 	/* Hash continuation or HMAC, setup (inner) digest from state */
162 	memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
163 
164 	if (req->finish) {
165 		/* Compute digest count for hash/HMAC finish operations */
166 		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
167 		    req->hmac_zlen || (req->processed != req->block_sz)) {
168 			count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
169 
170 			/* This is a hardware limitation, as the
171 			 * counter must fit into an u32. This represents
172 			 * a fairly big amount of input data, so we
173 			 * shouldn't see this.
174 			 */
175 			if (unlikely(count & 0xffffffff00000000ULL)) {
176 				dev_warn(priv->dev,
177 					 "Input data is too big\n");
178 				return;
179 			}
180 		}
181 
182 		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
183 		    /* Special case: zero length HMAC */
184 		    req->hmac_zlen ||
185 		    /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
186 		    (req->processed != req->block_sz)) {
187 			/* Basic hash continue operation, need digest + cnt */
188 			cdesc->control_data.control0 |=
189 				CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
190 				CONTEXT_CONTROL_TYPE_HASH_OUT |
191 				CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
192 			/* For zero-len HMAC, don't finalize, already padded! */
193 			if (req->hmac_zlen)
194 				cdesc->control_data.control0 |=
195 					CONTEXT_CONTROL_NO_FINISH_HASH;
196 			cdesc->control_data.control1 |=
197 				CONTEXT_CONTROL_DIGEST_CNT;
198 			ctx->base.ctxr->data[req->state_sz >> 2] =
199 				cpu_to_le32(count);
200 			req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
201 
202 			/* Clear zero-length HMAC flag for next operation! */
203 			req->hmac_zlen = false;
204 		} else { /* HMAC */
205 			/* Need outer digest for HMAC finalization */
206 			memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
207 			       &ctx->base.opad, req->state_sz);
208 
209 			/* Single pass HMAC - no digest count */
210 			cdesc->control_data.control0 |=
211 				CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
212 				CONTEXT_CONTROL_TYPE_HASH_OUT |
213 				CONTEXT_CONTROL_DIGEST_HMAC;
214 		}
215 	} else { /* Hash continuation, do not finish yet */
216 		cdesc->control_data.control0 |=
217 			CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
218 			CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
219 			CONTEXT_CONTROL_TYPE_HASH_OUT |
220 			CONTEXT_CONTROL_NO_FINISH_HASH;
221 	}
222 }
223 
224 static int safexcel_ahash_enqueue(struct ahash_request *areq);
225 
safexcel_handle_req_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)226 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
227 				      int ring,
228 				      struct crypto_async_request *async,
229 				      bool *should_complete, int *ret)
230 {
231 	struct safexcel_result_desc *rdesc;
232 	struct ahash_request *areq = ahash_request_cast(async);
233 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
234 	struct safexcel_ahash_req *sreq = ahash_request_ctx_dma(areq);
235 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
236 	u64 cache_len;
237 
238 	*ret = 0;
239 
240 	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
241 	if (IS_ERR(rdesc)) {
242 		dev_err(priv->dev,
243 			"hash: result: could not retrieve the result descriptor\n");
244 		*ret = PTR_ERR(rdesc);
245 	} else {
246 		*ret = safexcel_rdesc_check_errors(priv, rdesc);
247 	}
248 
249 	safexcel_complete(priv, ring);
250 
251 	if (sreq->nents) {
252 		dma_unmap_sg(priv->dev, areq->src,
253 			     sg_nents_for_len(areq->src, areq->nbytes),
254 			     DMA_TO_DEVICE);
255 		sreq->nents = 0;
256 	}
257 
258 	if (sreq->result_dma) {
259 		dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
260 				 DMA_FROM_DEVICE);
261 		sreq->result_dma = 0;
262 	}
263 
264 	if (sreq->cache_dma) {
265 		dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
266 				 DMA_TO_DEVICE);
267 		sreq->cache_dma = 0;
268 		sreq->cache_sz = 0;
269 	}
270 
271 	if (sreq->finish) {
272 		if (sreq->hmac &&
273 		    (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
274 			/* Faking HMAC using hash - need to do outer hash */
275 			memcpy(sreq->cache, sreq->state,
276 			       crypto_ahash_digestsize(ahash));
277 
278 			memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
279 
280 			sreq->len = sreq->block_sz +
281 				    crypto_ahash_digestsize(ahash);
282 			sreq->processed = sreq->block_sz;
283 			sreq->hmac = 0;
284 
285 			if (priv->flags & EIP197_TRC_CACHE)
286 				ctx->base.needs_inv = true;
287 			areq->nbytes = 0;
288 			safexcel_ahash_enqueue(areq);
289 
290 			*should_complete = false; /* Not done yet */
291 			return 1;
292 		}
293 
294 		memcpy(areq->result, sreq->state,
295 		       crypto_ahash_digestsize(ahash));
296 	}
297 
298 	cache_len = safexcel_queued_len(sreq);
299 	if (cache_len)
300 		memcpy(sreq->cache, sreq->cache_next, cache_len);
301 
302 	*should_complete = true;
303 
304 	return 1;
305 }
306 
safexcel_ahash_send_req(struct crypto_async_request * async,int ring,int * commands,int * results)307 static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
308 				   int *commands, int *results)
309 {
310 	struct ahash_request *areq = ahash_request_cast(async);
311 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
312 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
313 	struct safexcel_crypto_priv *priv = ctx->base.priv;
314 	struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
315 	struct safexcel_result_desc *rdesc;
316 	struct scatterlist *sg;
317 	struct safexcel_token *dmmy;
318 	int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
319 	u64 queued, len;
320 
321 	queued = safexcel_queued_len(req);
322 	if (queued <= HASH_CACHE_SIZE)
323 		cache_len = queued;
324 	else
325 		cache_len = queued - areq->nbytes;
326 
327 	if (!req->finish && !req->last_req) {
328 		/* If this is not the last request and the queued data does not
329 		 * fit into full cache blocks, cache it for the next send call.
330 		 */
331 		extra = queued & (HASH_CACHE_SIZE - 1);
332 
333 		/* If this is not the last request and the queued data
334 		 * is a multiple of a block, cache the last one for now.
335 		 */
336 		if (!extra)
337 			extra = HASH_CACHE_SIZE;
338 
339 		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
340 				   req->cache_next, extra,
341 				   areq->nbytes - extra);
342 
343 		queued -= extra;
344 
345 		if (!queued) {
346 			*commands = 0;
347 			*results = 0;
348 			return 0;
349 		}
350 
351 		extra = 0;
352 	}
353 
354 	if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
355 		if (unlikely(cache_len < AES_BLOCK_SIZE)) {
356 			/*
357 			 * Cache contains less than 1 full block, complete.
358 			 */
359 			extra = AES_BLOCK_SIZE - cache_len;
360 			if (queued > cache_len) {
361 				/* More data follows: borrow bytes */
362 				u64 tmp = queued - cache_len;
363 
364 				skip = min_t(u64, tmp, extra);
365 				sg_pcopy_to_buffer(areq->src,
366 					sg_nents(areq->src),
367 					req->cache + cache_len,
368 					skip, 0);
369 			}
370 			extra -= skip;
371 			memset(req->cache + cache_len + skip, 0, extra);
372 			if (!ctx->cbcmac && extra) {
373 				// 10- padding for XCBCMAC & CMAC
374 				req->cache[cache_len + skip] = 0x80;
375 				// HW will use K2 iso K3 - compensate!
376 				for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
377 					u32 *cache = (void *)req->cache;
378 					u32 *ipad = ctx->base.ipad.word;
379 					u32 x;
380 
381 					x = ipad[i] ^ ipad[i + 4];
382 					cache[i] ^= swab32(x);
383 				}
384 			}
385 			cache_len = AES_BLOCK_SIZE;
386 			queued = queued + extra;
387 		}
388 
389 		/* XCBC continue: XOR previous result into 1st word */
390 		crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
391 	}
392 
393 	len = queued;
394 	/* Add a command descriptor for the cached data, if any */
395 	if (cache_len) {
396 		req->cache_dma = dma_map_single(priv->dev, req->cache,
397 						cache_len, DMA_TO_DEVICE);
398 		if (dma_mapping_error(priv->dev, req->cache_dma))
399 			return -EINVAL;
400 
401 		req->cache_sz = cache_len;
402 		first_cdesc = safexcel_add_cdesc(priv, ring, 1,
403 						 (cache_len == len),
404 						 req->cache_dma, cache_len,
405 						 len, ctx->base.ctxr_dma,
406 						 &dmmy);
407 		if (IS_ERR(first_cdesc)) {
408 			ret = PTR_ERR(first_cdesc);
409 			goto unmap_cache;
410 		}
411 		n_cdesc++;
412 
413 		queued -= cache_len;
414 		if (!queued)
415 			goto send_command;
416 	}
417 
418 	/* Now handle the current ahash request buffer(s) */
419 	req->nents = dma_map_sg(priv->dev, areq->src,
420 				sg_nents_for_len(areq->src,
421 						 areq->nbytes),
422 				DMA_TO_DEVICE);
423 	if (!req->nents) {
424 		ret = -ENOMEM;
425 		goto cdesc_rollback;
426 	}
427 
428 	for_each_sg(areq->src, sg, req->nents, i) {
429 		int sglen = sg_dma_len(sg);
430 
431 		if (unlikely(sglen <= skip)) {
432 			skip -= sglen;
433 			continue;
434 		}
435 
436 		/* Do not overflow the request */
437 		if ((queued + skip) <= sglen)
438 			sglen = queued;
439 		else
440 			sglen -= skip;
441 
442 		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
443 					   !(queued - sglen),
444 					   sg_dma_address(sg) + skip, sglen,
445 					   len, ctx->base.ctxr_dma, &dmmy);
446 		if (IS_ERR(cdesc)) {
447 			ret = PTR_ERR(cdesc);
448 			goto unmap_sg;
449 		}
450 
451 		if (!n_cdesc)
452 			first_cdesc = cdesc;
453 		n_cdesc++;
454 
455 		queued -= sglen;
456 		if (!queued)
457 			break;
458 		skip = 0;
459 	}
460 
461 send_command:
462 	/* Setup the context options */
463 	safexcel_context_control(ctx, req, first_cdesc);
464 
465 	/* Add the token */
466 	safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
467 
468 	req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
469 					 DMA_FROM_DEVICE);
470 	if (dma_mapping_error(priv->dev, req->result_dma)) {
471 		ret = -EINVAL;
472 		goto unmap_sg;
473 	}
474 
475 	/* Add a result descriptor */
476 	rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
477 				   req->digest_sz);
478 	if (IS_ERR(rdesc)) {
479 		ret = PTR_ERR(rdesc);
480 		goto unmap_result;
481 	}
482 
483 	safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
484 
485 	req->processed += len - extra;
486 
487 	*commands = n_cdesc;
488 	*results = 1;
489 	return 0;
490 
491 unmap_result:
492 	dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
493 			 DMA_FROM_DEVICE);
494 unmap_sg:
495 	if (req->nents) {
496 		dma_unmap_sg(priv->dev, areq->src,
497 			     sg_nents_for_len(areq->src, areq->nbytes),
498 			     DMA_TO_DEVICE);
499 		req->nents = 0;
500 	}
501 cdesc_rollback:
502 	for (i = 0; i < n_cdesc; i++)
503 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
504 unmap_cache:
505 	if (req->cache_dma) {
506 		dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
507 				 DMA_TO_DEVICE);
508 		req->cache_dma = 0;
509 		req->cache_sz = 0;
510 	}
511 
512 	return ret;
513 }
514 
safexcel_handle_inv_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)515 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
516 				      int ring,
517 				      struct crypto_async_request *async,
518 				      bool *should_complete, int *ret)
519 {
520 	struct safexcel_result_desc *rdesc;
521 	struct ahash_request *areq = ahash_request_cast(async);
522 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
523 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
524 	int enq_ret;
525 
526 	*ret = 0;
527 
528 	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
529 	if (IS_ERR(rdesc)) {
530 		dev_err(priv->dev,
531 			"hash: invalidate: could not retrieve the result descriptor\n");
532 		*ret = PTR_ERR(rdesc);
533 	} else {
534 		*ret = safexcel_rdesc_check_errors(priv, rdesc);
535 	}
536 
537 	safexcel_complete(priv, ring);
538 
539 	if (ctx->base.exit_inv) {
540 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
541 			      ctx->base.ctxr_dma);
542 
543 		*should_complete = true;
544 		return 1;
545 	}
546 
547 	ring = safexcel_select_ring(priv);
548 	ctx->base.ring = ring;
549 
550 	spin_lock_bh(&priv->ring[ring].queue_lock);
551 	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
552 	spin_unlock_bh(&priv->ring[ring].queue_lock);
553 
554 	if (enq_ret != -EINPROGRESS)
555 		*ret = enq_ret;
556 
557 	queue_work(priv->ring[ring].workqueue,
558 		   &priv->ring[ring].work_data.work);
559 
560 	*should_complete = false;
561 
562 	return 1;
563 }
564 
safexcel_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)565 static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
566 				  struct crypto_async_request *async,
567 				  bool *should_complete, int *ret)
568 {
569 	struct ahash_request *areq = ahash_request_cast(async);
570 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
571 	int err;
572 
573 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
574 
575 	if (req->needs_inv) {
576 		req->needs_inv = false;
577 		err = safexcel_handle_inv_result(priv, ring, async,
578 						 should_complete, ret);
579 	} else {
580 		err = safexcel_handle_req_result(priv, ring, async,
581 						 should_complete, ret);
582 	}
583 
584 	return err;
585 }
586 
safexcel_ahash_send_inv(struct crypto_async_request * async,int ring,int * commands,int * results)587 static int safexcel_ahash_send_inv(struct crypto_async_request *async,
588 				   int ring, int *commands, int *results)
589 {
590 	struct ahash_request *areq = ahash_request_cast(async);
591 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
592 	int ret;
593 
594 	ret = safexcel_invalidate_cache(async, ctx->base.priv,
595 					ctx->base.ctxr_dma, ring);
596 	if (unlikely(ret))
597 		return ret;
598 
599 	*commands = 1;
600 	*results = 1;
601 
602 	return 0;
603 }
604 
safexcel_ahash_send(struct crypto_async_request * async,int ring,int * commands,int * results)605 static int safexcel_ahash_send(struct crypto_async_request *async,
606 			       int ring, int *commands, int *results)
607 {
608 	struct ahash_request *areq = ahash_request_cast(async);
609 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
610 	int ret;
611 
612 	if (req->needs_inv)
613 		ret = safexcel_ahash_send_inv(async, ring, commands, results);
614 	else
615 		ret = safexcel_ahash_send_req(async, ring, commands, results);
616 
617 	return ret;
618 }
619 
safexcel_ahash_exit_inv(struct crypto_tfm * tfm)620 static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
621 {
622 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
623 	struct safexcel_crypto_priv *priv = ctx->base.priv;
624 	EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
625 	struct safexcel_ahash_req *rctx = ahash_request_ctx_dma(req);
626 	DECLARE_CRYPTO_WAIT(result);
627 	int ring = ctx->base.ring;
628 	int err;
629 
630 	memset(req, 0, EIP197_AHASH_REQ_SIZE);
631 
632 	/* create invalidation request */
633 	init_completion(&result.completion);
634 	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
635 				   crypto_req_done, &result);
636 
637 	ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
638 	ctx = crypto_tfm_ctx(req->base.tfm);
639 	ctx->base.exit_inv = true;
640 	rctx->needs_inv = true;
641 
642 	spin_lock_bh(&priv->ring[ring].queue_lock);
643 	crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
644 	spin_unlock_bh(&priv->ring[ring].queue_lock);
645 
646 	queue_work(priv->ring[ring].workqueue,
647 		   &priv->ring[ring].work_data.work);
648 
649 	err = crypto_wait_req(-EINPROGRESS, &result);
650 
651 	if (err) {
652 		dev_warn(priv->dev, "hash: completion error (%d)\n", err);
653 		return err;
654 	}
655 
656 	return 0;
657 }
658 
659 /* safexcel_ahash_cache: cache data until at least one request can be sent to
660  * the engine, aka. when there is at least 1 block size in the pipe.
661  */
safexcel_ahash_cache(struct ahash_request * areq)662 static int safexcel_ahash_cache(struct ahash_request *areq)
663 {
664 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
665 	u64 cache_len;
666 
667 	/* cache_len: everything accepted by the driver but not sent yet,
668 	 * tot sz handled by update() - last req sz - tot sz handled by send()
669 	 */
670 	cache_len = safexcel_queued_len(req);
671 
672 	/*
673 	 * In case there isn't enough bytes to proceed (less than a
674 	 * block size), cache the data until we have enough.
675 	 */
676 	if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
677 		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
678 				   req->cache + cache_len,
679 				   areq->nbytes, 0);
680 		return 0;
681 	}
682 
683 	/* We couldn't cache all the data */
684 	return -E2BIG;
685 }
686 
safexcel_ahash_enqueue(struct ahash_request * areq)687 static int safexcel_ahash_enqueue(struct ahash_request *areq)
688 {
689 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
690 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
691 	struct safexcel_crypto_priv *priv = ctx->base.priv;
692 	int ret, ring;
693 
694 	req->needs_inv = false;
695 
696 	if (ctx->base.ctxr) {
697 		if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
698 		     /* invalidate for *any* non-XCBC continuation */
699 		   ((req->not_first && !req->xcbcmac) ||
700 		     /* invalidate if (i)digest changed */
701 		     memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
702 		     /* invalidate for HMAC finish with odigest changed */
703 		     (req->finish && req->hmac &&
704 		      memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
705 			     &ctx->base.opad, req->state_sz))))
706 			/*
707 			 * We're still setting needs_inv here, even though it is
708 			 * cleared right away, because the needs_inv flag can be
709 			 * set in other functions and we want to keep the same
710 			 * logic.
711 			 */
712 			ctx->base.needs_inv = true;
713 
714 		if (ctx->base.needs_inv) {
715 			ctx->base.needs_inv = false;
716 			req->needs_inv = true;
717 		}
718 	} else {
719 		ctx->base.ring = safexcel_select_ring(priv);
720 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
721 						 EIP197_GFP_FLAGS(areq->base),
722 						 &ctx->base.ctxr_dma);
723 		if (!ctx->base.ctxr)
724 			return -ENOMEM;
725 	}
726 	req->not_first = true;
727 
728 	ring = ctx->base.ring;
729 
730 	spin_lock_bh(&priv->ring[ring].queue_lock);
731 	ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
732 	spin_unlock_bh(&priv->ring[ring].queue_lock);
733 
734 	queue_work(priv->ring[ring].workqueue,
735 		   &priv->ring[ring].work_data.work);
736 
737 	return ret;
738 }
739 
safexcel_ahash_update(struct ahash_request * areq)740 static int safexcel_ahash_update(struct ahash_request *areq)
741 {
742 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
743 	int ret;
744 
745 	/* If the request is 0 length, do nothing */
746 	if (!areq->nbytes)
747 		return 0;
748 
749 	/* Add request to the cache if it fits */
750 	ret = safexcel_ahash_cache(areq);
751 
752 	/* Update total request length */
753 	req->len += areq->nbytes;
754 
755 	/* If not all data could fit into the cache, go process the excess.
756 	 * Also go process immediately for an HMAC IV precompute, which
757 	 * will never be finished at all, but needs to be processed anyway.
758 	 */
759 	if ((ret && !req->finish) || req->last_req)
760 		return safexcel_ahash_enqueue(areq);
761 
762 	return 0;
763 }
764 
safexcel_ahash_final(struct ahash_request * areq)765 static int safexcel_ahash_final(struct ahash_request *areq)
766 {
767 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
768 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
769 
770 	req->finish = true;
771 
772 	if (unlikely(!req->len && !areq->nbytes)) {
773 		/*
774 		 * If we have an overall 0 length *hash* request:
775 		 * The HW cannot do 0 length hash, so we provide the correct
776 		 * result directly here.
777 		 */
778 		if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
779 			memcpy(areq->result, md5_zero_message_hash,
780 			       MD5_DIGEST_SIZE);
781 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
782 			memcpy(areq->result, sha1_zero_message_hash,
783 			       SHA1_DIGEST_SIZE);
784 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
785 			memcpy(areq->result, sha224_zero_message_hash,
786 			       SHA224_DIGEST_SIZE);
787 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
788 			memcpy(areq->result, sha256_zero_message_hash,
789 			       SHA256_DIGEST_SIZE);
790 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
791 			memcpy(areq->result, sha384_zero_message_hash,
792 			       SHA384_DIGEST_SIZE);
793 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
794 			memcpy(areq->result, sha512_zero_message_hash,
795 			       SHA512_DIGEST_SIZE);
796 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
797 			memcpy(areq->result,
798 			       EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
799 		}
800 
801 		return 0;
802 	} else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
803 			    ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
804 			    req->len == sizeof(u32) && !areq->nbytes)) {
805 		/* Zero length CRC32 */
806 		memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
807 		return 0;
808 	} else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
809 			    !areq->nbytes)) {
810 		/* Zero length CBC MAC */
811 		memset(areq->result, 0, AES_BLOCK_SIZE);
812 		return 0;
813 	} else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
814 			    !areq->nbytes)) {
815 		/* Zero length (X)CBC/CMAC */
816 		int i;
817 
818 		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
819 			u32 *result = (void *)areq->result;
820 
821 			/* K3 */
822 			result[i] = swab32(ctx->base.ipad.word[i + 4]);
823 		}
824 		areq->result[0] ^= 0x80;			// 10- padding
825 		aes_encrypt(ctx->aes, areq->result, areq->result);
826 		return 0;
827 	} else if (unlikely(req->hmac &&
828 			    (req->len == req->block_sz) &&
829 			    !areq->nbytes)) {
830 		/*
831 		 * If we have an overall 0 length *HMAC* request:
832 		 * For HMAC, we need to finalize the inner digest
833 		 * and then perform the outer hash.
834 		 */
835 
836 		/* generate pad block in the cache */
837 		/* start with a hash block of all zeroes */
838 		memset(req->cache, 0, req->block_sz);
839 		/* set the first byte to 0x80 to 'append a 1 bit' */
840 		req->cache[0] = 0x80;
841 		/* add the length in bits in the last 2 bytes */
842 		if (req->len_is_le) {
843 			/* Little endian length word (e.g. MD5) */
844 			req->cache[req->block_sz-8] = (req->block_sz << 3) &
845 						      255;
846 			req->cache[req->block_sz-7] = (req->block_sz >> 5);
847 		} else {
848 			/* Big endian length word (e.g. any SHA) */
849 			req->cache[req->block_sz-2] = (req->block_sz >> 5);
850 			req->cache[req->block_sz-1] = (req->block_sz << 3) &
851 						      255;
852 		}
853 
854 		req->len += req->block_sz; /* plus 1 hash block */
855 
856 		/* Set special zero-length HMAC flag */
857 		req->hmac_zlen = true;
858 
859 		/* Finalize HMAC */
860 		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
861 	} else if (req->hmac) {
862 		/* Finalize HMAC */
863 		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
864 	}
865 
866 	return safexcel_ahash_enqueue(areq);
867 }
868 
safexcel_ahash_finup(struct ahash_request * areq)869 static int safexcel_ahash_finup(struct ahash_request *areq)
870 {
871 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
872 
873 	req->finish = true;
874 
875 	safexcel_ahash_update(areq);
876 	return safexcel_ahash_final(areq);
877 }
878 
safexcel_ahash_export(struct ahash_request * areq,void * out)879 static int safexcel_ahash_export(struct ahash_request *areq, void *out)
880 {
881 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
882 	struct safexcel_ahash_export_state *export = out;
883 
884 	export->len = req->len;
885 	export->processed = req->processed;
886 
887 	export->digest = req->digest;
888 
889 	memcpy(export->state, req->state, req->state_sz);
890 	memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
891 
892 	return 0;
893 }
894 
safexcel_ahash_import(struct ahash_request * areq,const void * in)895 static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
896 {
897 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
898 	const struct safexcel_ahash_export_state *export = in;
899 	int ret;
900 
901 	ret = crypto_ahash_init(areq);
902 	if (ret)
903 		return ret;
904 
905 	req->len = export->len;
906 	req->processed = export->processed;
907 
908 	req->digest = export->digest;
909 
910 	memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
911 	memcpy(req->state, export->state, req->state_sz);
912 
913 	return 0;
914 }
915 
safexcel_ahash_cra_init(struct crypto_tfm * tfm)916 static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
917 {
918 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
919 	struct safexcel_alg_template *tmpl =
920 		container_of(__crypto_ahash_alg(tfm->__crt_alg),
921 			     struct safexcel_alg_template, alg.ahash);
922 
923 	ctx->base.priv = tmpl->priv;
924 	ctx->base.send = safexcel_ahash_send;
925 	ctx->base.handle_result = safexcel_handle_result;
926 	ctx->fb_do_setkey = false;
927 
928 	crypto_ahash_set_reqsize_dma(__crypto_ahash_cast(tfm),
929 				     sizeof(struct safexcel_ahash_req));
930 	return 0;
931 }
932 
safexcel_sha1_init(struct ahash_request * areq)933 static int safexcel_sha1_init(struct ahash_request *areq)
934 {
935 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
936 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
937 
938 	memset(req, 0, sizeof(*req));
939 
940 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
941 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
942 	req->state_sz = SHA1_DIGEST_SIZE;
943 	req->digest_sz = SHA1_DIGEST_SIZE;
944 	req->block_sz = SHA1_BLOCK_SIZE;
945 
946 	return 0;
947 }
948 
safexcel_sha1_digest(struct ahash_request * areq)949 static int safexcel_sha1_digest(struct ahash_request *areq)
950 {
951 	int ret = safexcel_sha1_init(areq);
952 
953 	if (ret)
954 		return ret;
955 
956 	return safexcel_ahash_finup(areq);
957 }
958 
safexcel_ahash_cra_exit(struct crypto_tfm * tfm)959 static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
960 {
961 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
962 	struct safexcel_crypto_priv *priv = ctx->base.priv;
963 	int ret;
964 
965 	/* context not allocated, skip invalidation */
966 	if (!ctx->base.ctxr)
967 		return;
968 
969 	if (priv->flags & EIP197_TRC_CACHE) {
970 		ret = safexcel_ahash_exit_inv(tfm);
971 		if (ret)
972 			dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
973 	} else {
974 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
975 			      ctx->base.ctxr_dma);
976 	}
977 }
978 
979 struct safexcel_alg_template safexcel_alg_sha1 = {
980 	.type = SAFEXCEL_ALG_TYPE_AHASH,
981 	.algo_mask = SAFEXCEL_ALG_SHA1,
982 	.alg.ahash = {
983 		.init = safexcel_sha1_init,
984 		.update = safexcel_ahash_update,
985 		.final = safexcel_ahash_final,
986 		.finup = safexcel_ahash_finup,
987 		.digest = safexcel_sha1_digest,
988 		.export = safexcel_ahash_export,
989 		.import = safexcel_ahash_import,
990 		.halg = {
991 			.digestsize = SHA1_DIGEST_SIZE,
992 			.statesize = sizeof(struct safexcel_ahash_export_state),
993 			.base = {
994 				.cra_name = "sha1",
995 				.cra_driver_name = "safexcel-sha1",
996 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
997 				.cra_flags = CRYPTO_ALG_ASYNC |
998 					     CRYPTO_ALG_ALLOCATES_MEMORY |
999 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1000 				.cra_blocksize = SHA1_BLOCK_SIZE,
1001 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1002 				.cra_init = safexcel_ahash_cra_init,
1003 				.cra_exit = safexcel_ahash_cra_exit,
1004 				.cra_module = THIS_MODULE,
1005 			},
1006 		},
1007 	},
1008 };
1009 
safexcel_hmac_sha1_init(struct ahash_request * areq)1010 static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1011 {
1012 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1013 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1014 
1015 	memset(req, 0, sizeof(*req));
1016 
1017 	/* Start from ipad precompute */
1018 	memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
1019 	/* Already processed the key^ipad part now! */
1020 	req->len	= SHA1_BLOCK_SIZE;
1021 	req->processed	= SHA1_BLOCK_SIZE;
1022 
1023 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1024 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1025 	req->state_sz = SHA1_DIGEST_SIZE;
1026 	req->digest_sz = SHA1_DIGEST_SIZE;
1027 	req->block_sz = SHA1_BLOCK_SIZE;
1028 	req->hmac = true;
1029 
1030 	return 0;
1031 }
1032 
safexcel_hmac_sha1_digest(struct ahash_request * areq)1033 static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1034 {
1035 	int ret = safexcel_hmac_sha1_init(areq);
1036 
1037 	if (ret)
1038 		return ret;
1039 
1040 	return safexcel_ahash_finup(areq);
1041 }
1042 
safexcel_hmac_init_pad(struct ahash_request * areq,unsigned int blocksize,const u8 * key,unsigned int keylen,u8 * ipad,u8 * opad)1043 static int safexcel_hmac_init_pad(struct ahash_request *areq,
1044 				  unsigned int blocksize, const u8 *key,
1045 				  unsigned int keylen, u8 *ipad, u8 *opad)
1046 {
1047 	DECLARE_CRYPTO_WAIT(result);
1048 	struct scatterlist sg;
1049 	int ret, i;
1050 	u8 *keydup;
1051 
1052 	if (keylen <= blocksize) {
1053 		memcpy(ipad, key, keylen);
1054 	} else {
1055 		keydup = kmemdup(key, keylen, GFP_KERNEL);
1056 		if (!keydup)
1057 			return -ENOMEM;
1058 
1059 		ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1060 					   crypto_req_done, &result);
1061 		sg_init_one(&sg, keydup, keylen);
1062 		ahash_request_set_crypt(areq, &sg, ipad, keylen);
1063 
1064 		ret = crypto_ahash_digest(areq);
1065 		ret = crypto_wait_req(ret, &result);
1066 
1067 		/* Avoid leaking */
1068 		kfree_sensitive(keydup);
1069 
1070 		if (ret)
1071 			return ret;
1072 
1073 		keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1074 	}
1075 
1076 	memset(ipad + keylen, 0, blocksize - keylen);
1077 	memcpy(opad, ipad, blocksize);
1078 
1079 	for (i = 0; i < blocksize; i++) {
1080 		ipad[i] ^= HMAC_IPAD_VALUE;
1081 		opad[i] ^= HMAC_OPAD_VALUE;
1082 	}
1083 
1084 	return 0;
1085 }
1086 
safexcel_hmac_init_iv(struct ahash_request * areq,unsigned int blocksize,u8 * pad,void * state)1087 static int safexcel_hmac_init_iv(struct ahash_request *areq,
1088 				 unsigned int blocksize, u8 *pad, void *state)
1089 {
1090 	struct safexcel_ahash_req *req;
1091 	DECLARE_CRYPTO_WAIT(result);
1092 	struct scatterlist sg;
1093 	int ret;
1094 
1095 	ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1096 				   crypto_req_done, &result);
1097 	sg_init_one(&sg, pad, blocksize);
1098 	ahash_request_set_crypt(areq, &sg, pad, blocksize);
1099 
1100 	ret = crypto_ahash_init(areq);
1101 	if (ret)
1102 		return ret;
1103 
1104 	req = ahash_request_ctx_dma(areq);
1105 	req->hmac = true;
1106 	req->last_req = true;
1107 
1108 	ret = crypto_ahash_update(areq);
1109 	ret = crypto_wait_req(ret, &result);
1110 
1111 	return ret ?: crypto_ahash_export(areq, state);
1112 }
1113 
__safexcel_hmac_setkey(const char * alg,const u8 * key,unsigned int keylen,void * istate,void * ostate)1114 static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
1115 				  unsigned int keylen,
1116 				  void *istate, void *ostate)
1117 {
1118 	struct ahash_request *areq;
1119 	struct crypto_ahash *tfm;
1120 	unsigned int blocksize;
1121 	u8 *ipad, *opad;
1122 	int ret;
1123 
1124 	tfm = crypto_alloc_ahash(alg, 0, 0);
1125 	if (IS_ERR(tfm))
1126 		return PTR_ERR(tfm);
1127 
1128 	areq = ahash_request_alloc(tfm, GFP_KERNEL);
1129 	if (!areq) {
1130 		ret = -ENOMEM;
1131 		goto free_ahash;
1132 	}
1133 
1134 	crypto_ahash_clear_flags(tfm, ~0);
1135 	blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1136 
1137 	ipad = kcalloc(2, blocksize, GFP_KERNEL);
1138 	if (!ipad) {
1139 		ret = -ENOMEM;
1140 		goto free_request;
1141 	}
1142 
1143 	opad = ipad + blocksize;
1144 
1145 	ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1146 	if (ret)
1147 		goto free_ipad;
1148 
1149 	ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1150 	if (ret)
1151 		goto free_ipad;
1152 
1153 	ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1154 
1155 free_ipad:
1156 	kfree(ipad);
1157 free_request:
1158 	ahash_request_free(areq);
1159 free_ahash:
1160 	crypto_free_ahash(tfm);
1161 
1162 	return ret;
1163 }
1164 
safexcel_hmac_setkey(struct safexcel_context * base,const u8 * key,unsigned int keylen,const char * alg,unsigned int state_sz)1165 int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
1166 			 unsigned int keylen, const char *alg,
1167 			 unsigned int state_sz)
1168 {
1169 	struct safexcel_crypto_priv *priv = base->priv;
1170 	struct safexcel_ahash_export_state istate, ostate;
1171 	int ret;
1172 
1173 	ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1174 	if (ret)
1175 		return ret;
1176 
1177 	if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
1178 	    (memcmp(&base->ipad, istate.state, state_sz) ||
1179 	     memcmp(&base->opad, ostate.state, state_sz)))
1180 		base->needs_inv = true;
1181 
1182 	memcpy(&base->ipad, &istate.state, state_sz);
1183 	memcpy(&base->opad, &ostate.state, state_sz);
1184 
1185 	return 0;
1186 }
1187 
safexcel_hmac_alg_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen,const char * alg,unsigned int state_sz)1188 static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1189 				    unsigned int keylen, const char *alg,
1190 				    unsigned int state_sz)
1191 {
1192 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
1193 
1194 	return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
1195 }
1196 
safexcel_hmac_sha1_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1197 static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1198 				     unsigned int keylen)
1199 {
1200 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1201 					SHA1_DIGEST_SIZE);
1202 }
1203 
1204 struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1205 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1206 	.algo_mask = SAFEXCEL_ALG_SHA1,
1207 	.alg.ahash = {
1208 		.init = safexcel_hmac_sha1_init,
1209 		.update = safexcel_ahash_update,
1210 		.final = safexcel_ahash_final,
1211 		.finup = safexcel_ahash_finup,
1212 		.digest = safexcel_hmac_sha1_digest,
1213 		.setkey = safexcel_hmac_sha1_setkey,
1214 		.export = safexcel_ahash_export,
1215 		.import = safexcel_ahash_import,
1216 		.halg = {
1217 			.digestsize = SHA1_DIGEST_SIZE,
1218 			.statesize = sizeof(struct safexcel_ahash_export_state),
1219 			.base = {
1220 				.cra_name = "hmac(sha1)",
1221 				.cra_driver_name = "safexcel-hmac-sha1",
1222 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1223 				.cra_flags = CRYPTO_ALG_ASYNC |
1224 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1225 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1226 				.cra_blocksize = SHA1_BLOCK_SIZE,
1227 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1228 				.cra_init = safexcel_ahash_cra_init,
1229 				.cra_exit = safexcel_ahash_cra_exit,
1230 				.cra_module = THIS_MODULE,
1231 			},
1232 		},
1233 	},
1234 };
1235 
safexcel_sha256_init(struct ahash_request * areq)1236 static int safexcel_sha256_init(struct ahash_request *areq)
1237 {
1238 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1239 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1240 
1241 	memset(req, 0, sizeof(*req));
1242 
1243 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1244 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1245 	req->state_sz = SHA256_DIGEST_SIZE;
1246 	req->digest_sz = SHA256_DIGEST_SIZE;
1247 	req->block_sz = SHA256_BLOCK_SIZE;
1248 
1249 	return 0;
1250 }
1251 
safexcel_sha256_digest(struct ahash_request * areq)1252 static int safexcel_sha256_digest(struct ahash_request *areq)
1253 {
1254 	int ret = safexcel_sha256_init(areq);
1255 
1256 	if (ret)
1257 		return ret;
1258 
1259 	return safexcel_ahash_finup(areq);
1260 }
1261 
1262 struct safexcel_alg_template safexcel_alg_sha256 = {
1263 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1264 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1265 	.alg.ahash = {
1266 		.init = safexcel_sha256_init,
1267 		.update = safexcel_ahash_update,
1268 		.final = safexcel_ahash_final,
1269 		.finup = safexcel_ahash_finup,
1270 		.digest = safexcel_sha256_digest,
1271 		.export = safexcel_ahash_export,
1272 		.import = safexcel_ahash_import,
1273 		.halg = {
1274 			.digestsize = SHA256_DIGEST_SIZE,
1275 			.statesize = sizeof(struct safexcel_ahash_export_state),
1276 			.base = {
1277 				.cra_name = "sha256",
1278 				.cra_driver_name = "safexcel-sha256",
1279 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1280 				.cra_flags = CRYPTO_ALG_ASYNC |
1281 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1282 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1283 				.cra_blocksize = SHA256_BLOCK_SIZE,
1284 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1285 				.cra_init = safexcel_ahash_cra_init,
1286 				.cra_exit = safexcel_ahash_cra_exit,
1287 				.cra_module = THIS_MODULE,
1288 			},
1289 		},
1290 	},
1291 };
1292 
safexcel_sha224_init(struct ahash_request * areq)1293 static int safexcel_sha224_init(struct ahash_request *areq)
1294 {
1295 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1296 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1297 
1298 	memset(req, 0, sizeof(*req));
1299 
1300 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1301 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1302 	req->state_sz = SHA256_DIGEST_SIZE;
1303 	req->digest_sz = SHA256_DIGEST_SIZE;
1304 	req->block_sz = SHA256_BLOCK_SIZE;
1305 
1306 	return 0;
1307 }
1308 
safexcel_sha224_digest(struct ahash_request * areq)1309 static int safexcel_sha224_digest(struct ahash_request *areq)
1310 {
1311 	int ret = safexcel_sha224_init(areq);
1312 
1313 	if (ret)
1314 		return ret;
1315 
1316 	return safexcel_ahash_finup(areq);
1317 }
1318 
1319 struct safexcel_alg_template safexcel_alg_sha224 = {
1320 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1321 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1322 	.alg.ahash = {
1323 		.init = safexcel_sha224_init,
1324 		.update = safexcel_ahash_update,
1325 		.final = safexcel_ahash_final,
1326 		.finup = safexcel_ahash_finup,
1327 		.digest = safexcel_sha224_digest,
1328 		.export = safexcel_ahash_export,
1329 		.import = safexcel_ahash_import,
1330 		.halg = {
1331 			.digestsize = SHA224_DIGEST_SIZE,
1332 			.statesize = sizeof(struct safexcel_ahash_export_state),
1333 			.base = {
1334 				.cra_name = "sha224",
1335 				.cra_driver_name = "safexcel-sha224",
1336 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1337 				.cra_flags = CRYPTO_ALG_ASYNC |
1338 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1339 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1340 				.cra_blocksize = SHA224_BLOCK_SIZE,
1341 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1342 				.cra_init = safexcel_ahash_cra_init,
1343 				.cra_exit = safexcel_ahash_cra_exit,
1344 				.cra_module = THIS_MODULE,
1345 			},
1346 		},
1347 	},
1348 };
1349 
safexcel_hmac_sha224_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1350 static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1351 				       unsigned int keylen)
1352 {
1353 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1354 					SHA256_DIGEST_SIZE);
1355 }
1356 
safexcel_hmac_sha224_init(struct ahash_request * areq)1357 static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1358 {
1359 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1360 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1361 
1362 	memset(req, 0, sizeof(*req));
1363 
1364 	/* Start from ipad precompute */
1365 	memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1366 	/* Already processed the key^ipad part now! */
1367 	req->len	= SHA256_BLOCK_SIZE;
1368 	req->processed	= SHA256_BLOCK_SIZE;
1369 
1370 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1371 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1372 	req->state_sz = SHA256_DIGEST_SIZE;
1373 	req->digest_sz = SHA256_DIGEST_SIZE;
1374 	req->block_sz = SHA256_BLOCK_SIZE;
1375 	req->hmac = true;
1376 
1377 	return 0;
1378 }
1379 
safexcel_hmac_sha224_digest(struct ahash_request * areq)1380 static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1381 {
1382 	int ret = safexcel_hmac_sha224_init(areq);
1383 
1384 	if (ret)
1385 		return ret;
1386 
1387 	return safexcel_ahash_finup(areq);
1388 }
1389 
1390 struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1391 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1392 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1393 	.alg.ahash = {
1394 		.init = safexcel_hmac_sha224_init,
1395 		.update = safexcel_ahash_update,
1396 		.final = safexcel_ahash_final,
1397 		.finup = safexcel_ahash_finup,
1398 		.digest = safexcel_hmac_sha224_digest,
1399 		.setkey = safexcel_hmac_sha224_setkey,
1400 		.export = safexcel_ahash_export,
1401 		.import = safexcel_ahash_import,
1402 		.halg = {
1403 			.digestsize = SHA224_DIGEST_SIZE,
1404 			.statesize = sizeof(struct safexcel_ahash_export_state),
1405 			.base = {
1406 				.cra_name = "hmac(sha224)",
1407 				.cra_driver_name = "safexcel-hmac-sha224",
1408 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1409 				.cra_flags = CRYPTO_ALG_ASYNC |
1410 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1411 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1412 				.cra_blocksize = SHA224_BLOCK_SIZE,
1413 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1414 				.cra_init = safexcel_ahash_cra_init,
1415 				.cra_exit = safexcel_ahash_cra_exit,
1416 				.cra_module = THIS_MODULE,
1417 			},
1418 		},
1419 	},
1420 };
1421 
safexcel_hmac_sha256_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1422 static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1423 				     unsigned int keylen)
1424 {
1425 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1426 					SHA256_DIGEST_SIZE);
1427 }
1428 
safexcel_hmac_sha256_init(struct ahash_request * areq)1429 static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1430 {
1431 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1432 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1433 
1434 	memset(req, 0, sizeof(*req));
1435 
1436 	/* Start from ipad precompute */
1437 	memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1438 	/* Already processed the key^ipad part now! */
1439 	req->len	= SHA256_BLOCK_SIZE;
1440 	req->processed	= SHA256_BLOCK_SIZE;
1441 
1442 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1443 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1444 	req->state_sz = SHA256_DIGEST_SIZE;
1445 	req->digest_sz = SHA256_DIGEST_SIZE;
1446 	req->block_sz = SHA256_BLOCK_SIZE;
1447 	req->hmac = true;
1448 
1449 	return 0;
1450 }
1451 
safexcel_hmac_sha256_digest(struct ahash_request * areq)1452 static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1453 {
1454 	int ret = safexcel_hmac_sha256_init(areq);
1455 
1456 	if (ret)
1457 		return ret;
1458 
1459 	return safexcel_ahash_finup(areq);
1460 }
1461 
1462 struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1463 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1464 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1465 	.alg.ahash = {
1466 		.init = safexcel_hmac_sha256_init,
1467 		.update = safexcel_ahash_update,
1468 		.final = safexcel_ahash_final,
1469 		.finup = safexcel_ahash_finup,
1470 		.digest = safexcel_hmac_sha256_digest,
1471 		.setkey = safexcel_hmac_sha256_setkey,
1472 		.export = safexcel_ahash_export,
1473 		.import = safexcel_ahash_import,
1474 		.halg = {
1475 			.digestsize = SHA256_DIGEST_SIZE,
1476 			.statesize = sizeof(struct safexcel_ahash_export_state),
1477 			.base = {
1478 				.cra_name = "hmac(sha256)",
1479 				.cra_driver_name = "safexcel-hmac-sha256",
1480 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1481 				.cra_flags = CRYPTO_ALG_ASYNC |
1482 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1483 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1484 				.cra_blocksize = SHA256_BLOCK_SIZE,
1485 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1486 				.cra_init = safexcel_ahash_cra_init,
1487 				.cra_exit = safexcel_ahash_cra_exit,
1488 				.cra_module = THIS_MODULE,
1489 			},
1490 		},
1491 	},
1492 };
1493 
safexcel_sha512_init(struct ahash_request * areq)1494 static int safexcel_sha512_init(struct ahash_request *areq)
1495 {
1496 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1497 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1498 
1499 	memset(req, 0, sizeof(*req));
1500 
1501 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1502 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1503 	req->state_sz = SHA512_DIGEST_SIZE;
1504 	req->digest_sz = SHA512_DIGEST_SIZE;
1505 	req->block_sz = SHA512_BLOCK_SIZE;
1506 
1507 	return 0;
1508 }
1509 
safexcel_sha512_digest(struct ahash_request * areq)1510 static int safexcel_sha512_digest(struct ahash_request *areq)
1511 {
1512 	int ret = safexcel_sha512_init(areq);
1513 
1514 	if (ret)
1515 		return ret;
1516 
1517 	return safexcel_ahash_finup(areq);
1518 }
1519 
1520 struct safexcel_alg_template safexcel_alg_sha512 = {
1521 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1522 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1523 	.alg.ahash = {
1524 		.init = safexcel_sha512_init,
1525 		.update = safexcel_ahash_update,
1526 		.final = safexcel_ahash_final,
1527 		.finup = safexcel_ahash_finup,
1528 		.digest = safexcel_sha512_digest,
1529 		.export = safexcel_ahash_export,
1530 		.import = safexcel_ahash_import,
1531 		.halg = {
1532 			.digestsize = SHA512_DIGEST_SIZE,
1533 			.statesize = sizeof(struct safexcel_ahash_export_state),
1534 			.base = {
1535 				.cra_name = "sha512",
1536 				.cra_driver_name = "safexcel-sha512",
1537 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1538 				.cra_flags = CRYPTO_ALG_ASYNC |
1539 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1540 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1541 				.cra_blocksize = SHA512_BLOCK_SIZE,
1542 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1543 				.cra_init = safexcel_ahash_cra_init,
1544 				.cra_exit = safexcel_ahash_cra_exit,
1545 				.cra_module = THIS_MODULE,
1546 			},
1547 		},
1548 	},
1549 };
1550 
safexcel_sha384_init(struct ahash_request * areq)1551 static int safexcel_sha384_init(struct ahash_request *areq)
1552 {
1553 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1554 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1555 
1556 	memset(req, 0, sizeof(*req));
1557 
1558 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1559 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1560 	req->state_sz = SHA512_DIGEST_SIZE;
1561 	req->digest_sz = SHA512_DIGEST_SIZE;
1562 	req->block_sz = SHA512_BLOCK_SIZE;
1563 
1564 	return 0;
1565 }
1566 
safexcel_sha384_digest(struct ahash_request * areq)1567 static int safexcel_sha384_digest(struct ahash_request *areq)
1568 {
1569 	int ret = safexcel_sha384_init(areq);
1570 
1571 	if (ret)
1572 		return ret;
1573 
1574 	return safexcel_ahash_finup(areq);
1575 }
1576 
1577 struct safexcel_alg_template safexcel_alg_sha384 = {
1578 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1579 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1580 	.alg.ahash = {
1581 		.init = safexcel_sha384_init,
1582 		.update = safexcel_ahash_update,
1583 		.final = safexcel_ahash_final,
1584 		.finup = safexcel_ahash_finup,
1585 		.digest = safexcel_sha384_digest,
1586 		.export = safexcel_ahash_export,
1587 		.import = safexcel_ahash_import,
1588 		.halg = {
1589 			.digestsize = SHA384_DIGEST_SIZE,
1590 			.statesize = sizeof(struct safexcel_ahash_export_state),
1591 			.base = {
1592 				.cra_name = "sha384",
1593 				.cra_driver_name = "safexcel-sha384",
1594 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1595 				.cra_flags = CRYPTO_ALG_ASYNC |
1596 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1597 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1598 				.cra_blocksize = SHA384_BLOCK_SIZE,
1599 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1600 				.cra_init = safexcel_ahash_cra_init,
1601 				.cra_exit = safexcel_ahash_cra_exit,
1602 				.cra_module = THIS_MODULE,
1603 			},
1604 		},
1605 	},
1606 };
1607 
safexcel_hmac_sha512_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1608 static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1609 				       unsigned int keylen)
1610 {
1611 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1612 					SHA512_DIGEST_SIZE);
1613 }
1614 
safexcel_hmac_sha512_init(struct ahash_request * areq)1615 static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1616 {
1617 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1618 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1619 
1620 	memset(req, 0, sizeof(*req));
1621 
1622 	/* Start from ipad precompute */
1623 	memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1624 	/* Already processed the key^ipad part now! */
1625 	req->len	= SHA512_BLOCK_SIZE;
1626 	req->processed	= SHA512_BLOCK_SIZE;
1627 
1628 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1629 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1630 	req->state_sz = SHA512_DIGEST_SIZE;
1631 	req->digest_sz = SHA512_DIGEST_SIZE;
1632 	req->block_sz = SHA512_BLOCK_SIZE;
1633 	req->hmac = true;
1634 
1635 	return 0;
1636 }
1637 
safexcel_hmac_sha512_digest(struct ahash_request * areq)1638 static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1639 {
1640 	int ret = safexcel_hmac_sha512_init(areq);
1641 
1642 	if (ret)
1643 		return ret;
1644 
1645 	return safexcel_ahash_finup(areq);
1646 }
1647 
1648 struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1649 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1650 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1651 	.alg.ahash = {
1652 		.init = safexcel_hmac_sha512_init,
1653 		.update = safexcel_ahash_update,
1654 		.final = safexcel_ahash_final,
1655 		.finup = safexcel_ahash_finup,
1656 		.digest = safexcel_hmac_sha512_digest,
1657 		.setkey = safexcel_hmac_sha512_setkey,
1658 		.export = safexcel_ahash_export,
1659 		.import = safexcel_ahash_import,
1660 		.halg = {
1661 			.digestsize = SHA512_DIGEST_SIZE,
1662 			.statesize = sizeof(struct safexcel_ahash_export_state),
1663 			.base = {
1664 				.cra_name = "hmac(sha512)",
1665 				.cra_driver_name = "safexcel-hmac-sha512",
1666 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1667 				.cra_flags = CRYPTO_ALG_ASYNC |
1668 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1669 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1670 				.cra_blocksize = SHA512_BLOCK_SIZE,
1671 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1672 				.cra_init = safexcel_ahash_cra_init,
1673 				.cra_exit = safexcel_ahash_cra_exit,
1674 				.cra_module = THIS_MODULE,
1675 			},
1676 		},
1677 	},
1678 };
1679 
safexcel_hmac_sha384_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1680 static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1681 				       unsigned int keylen)
1682 {
1683 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1684 					SHA512_DIGEST_SIZE);
1685 }
1686 
safexcel_hmac_sha384_init(struct ahash_request * areq)1687 static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1688 {
1689 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1690 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1691 
1692 	memset(req, 0, sizeof(*req));
1693 
1694 	/* Start from ipad precompute */
1695 	memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1696 	/* Already processed the key^ipad part now! */
1697 	req->len	= SHA512_BLOCK_SIZE;
1698 	req->processed	= SHA512_BLOCK_SIZE;
1699 
1700 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1701 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1702 	req->state_sz = SHA512_DIGEST_SIZE;
1703 	req->digest_sz = SHA512_DIGEST_SIZE;
1704 	req->block_sz = SHA512_BLOCK_SIZE;
1705 	req->hmac = true;
1706 
1707 	return 0;
1708 }
1709 
safexcel_hmac_sha384_digest(struct ahash_request * areq)1710 static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1711 {
1712 	int ret = safexcel_hmac_sha384_init(areq);
1713 
1714 	if (ret)
1715 		return ret;
1716 
1717 	return safexcel_ahash_finup(areq);
1718 }
1719 
1720 struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1721 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1722 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1723 	.alg.ahash = {
1724 		.init = safexcel_hmac_sha384_init,
1725 		.update = safexcel_ahash_update,
1726 		.final = safexcel_ahash_final,
1727 		.finup = safexcel_ahash_finup,
1728 		.digest = safexcel_hmac_sha384_digest,
1729 		.setkey = safexcel_hmac_sha384_setkey,
1730 		.export = safexcel_ahash_export,
1731 		.import = safexcel_ahash_import,
1732 		.halg = {
1733 			.digestsize = SHA384_DIGEST_SIZE,
1734 			.statesize = sizeof(struct safexcel_ahash_export_state),
1735 			.base = {
1736 				.cra_name = "hmac(sha384)",
1737 				.cra_driver_name = "safexcel-hmac-sha384",
1738 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1739 				.cra_flags = CRYPTO_ALG_ASYNC |
1740 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1741 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1742 				.cra_blocksize = SHA384_BLOCK_SIZE,
1743 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1744 				.cra_init = safexcel_ahash_cra_init,
1745 				.cra_exit = safexcel_ahash_cra_exit,
1746 				.cra_module = THIS_MODULE,
1747 			},
1748 		},
1749 	},
1750 };
1751 
safexcel_md5_init(struct ahash_request * areq)1752 static int safexcel_md5_init(struct ahash_request *areq)
1753 {
1754 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1755 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1756 
1757 	memset(req, 0, sizeof(*req));
1758 
1759 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1760 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1761 	req->state_sz = MD5_DIGEST_SIZE;
1762 	req->digest_sz = MD5_DIGEST_SIZE;
1763 	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1764 
1765 	return 0;
1766 }
1767 
safexcel_md5_digest(struct ahash_request * areq)1768 static int safexcel_md5_digest(struct ahash_request *areq)
1769 {
1770 	int ret = safexcel_md5_init(areq);
1771 
1772 	if (ret)
1773 		return ret;
1774 
1775 	return safexcel_ahash_finup(areq);
1776 }
1777 
1778 struct safexcel_alg_template safexcel_alg_md5 = {
1779 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1780 	.algo_mask = SAFEXCEL_ALG_MD5,
1781 	.alg.ahash = {
1782 		.init = safexcel_md5_init,
1783 		.update = safexcel_ahash_update,
1784 		.final = safexcel_ahash_final,
1785 		.finup = safexcel_ahash_finup,
1786 		.digest = safexcel_md5_digest,
1787 		.export = safexcel_ahash_export,
1788 		.import = safexcel_ahash_import,
1789 		.halg = {
1790 			.digestsize = MD5_DIGEST_SIZE,
1791 			.statesize = sizeof(struct safexcel_ahash_export_state),
1792 			.base = {
1793 				.cra_name = "md5",
1794 				.cra_driver_name = "safexcel-md5",
1795 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1796 				.cra_flags = CRYPTO_ALG_ASYNC |
1797 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1798 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1799 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1800 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1801 				.cra_init = safexcel_ahash_cra_init,
1802 				.cra_exit = safexcel_ahash_cra_exit,
1803 				.cra_module = THIS_MODULE,
1804 			},
1805 		},
1806 	},
1807 };
1808 
safexcel_hmac_md5_init(struct ahash_request * areq)1809 static int safexcel_hmac_md5_init(struct ahash_request *areq)
1810 {
1811 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1812 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1813 
1814 	memset(req, 0, sizeof(*req));
1815 
1816 	/* Start from ipad precompute */
1817 	memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
1818 	/* Already processed the key^ipad part now! */
1819 	req->len	= MD5_HMAC_BLOCK_SIZE;
1820 	req->processed	= MD5_HMAC_BLOCK_SIZE;
1821 
1822 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1823 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1824 	req->state_sz = MD5_DIGEST_SIZE;
1825 	req->digest_sz = MD5_DIGEST_SIZE;
1826 	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1827 	req->len_is_le = true; /* MD5 is little endian! ... */
1828 	req->hmac = true;
1829 
1830 	return 0;
1831 }
1832 
safexcel_hmac_md5_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)1833 static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1834 				     unsigned int keylen)
1835 {
1836 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1837 					MD5_DIGEST_SIZE);
1838 }
1839 
safexcel_hmac_md5_digest(struct ahash_request * areq)1840 static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1841 {
1842 	int ret = safexcel_hmac_md5_init(areq);
1843 
1844 	if (ret)
1845 		return ret;
1846 
1847 	return safexcel_ahash_finup(areq);
1848 }
1849 
1850 struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1851 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1852 	.algo_mask = SAFEXCEL_ALG_MD5,
1853 	.alg.ahash = {
1854 		.init = safexcel_hmac_md5_init,
1855 		.update = safexcel_ahash_update,
1856 		.final = safexcel_ahash_final,
1857 		.finup = safexcel_ahash_finup,
1858 		.digest = safexcel_hmac_md5_digest,
1859 		.setkey = safexcel_hmac_md5_setkey,
1860 		.export = safexcel_ahash_export,
1861 		.import = safexcel_ahash_import,
1862 		.halg = {
1863 			.digestsize = MD5_DIGEST_SIZE,
1864 			.statesize = sizeof(struct safexcel_ahash_export_state),
1865 			.base = {
1866 				.cra_name = "hmac(md5)",
1867 				.cra_driver_name = "safexcel-hmac-md5",
1868 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1869 				.cra_flags = CRYPTO_ALG_ASYNC |
1870 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1871 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1872 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1873 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1874 				.cra_init = safexcel_ahash_cra_init,
1875 				.cra_exit = safexcel_ahash_cra_exit,
1876 				.cra_module = THIS_MODULE,
1877 			},
1878 		},
1879 	},
1880 };
1881 
safexcel_cbcmac_init(struct ahash_request * areq)1882 static int safexcel_cbcmac_init(struct ahash_request *areq)
1883 {
1884 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1885 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1886 
1887 	memset(req, 0, sizeof(*req));
1888 
1889 	/* Start from loaded keys */
1890 	memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
1891 	/* Set processed to non-zero to enable invalidation detection */
1892 	req->len	= AES_BLOCK_SIZE;
1893 	req->processed	= AES_BLOCK_SIZE;
1894 
1895 	req->digest   = CONTEXT_CONTROL_DIGEST_XCM;
1896 	req->state_sz = ctx->key_sz;
1897 	req->digest_sz = AES_BLOCK_SIZE;
1898 	req->block_sz = AES_BLOCK_SIZE;
1899 	req->xcbcmac  = true;
1900 
1901 	return 0;
1902 }
1903 
safexcel_cbcmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int len)1904 static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1905 				 unsigned int len)
1906 {
1907 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1908 	struct crypto_aes_ctx aes;
1909 	int ret, i;
1910 
1911 	ret = aes_expandkey(&aes, key, len);
1912 	if (ret)
1913 		return ret;
1914 
1915 	memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
1916 	for (i = 0; i < len / sizeof(u32); i++)
1917 		ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
1918 
1919 	if (len == AES_KEYSIZE_192) {
1920 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
1921 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1922 	} else if (len == AES_KEYSIZE_256) {
1923 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
1924 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1925 	} else {
1926 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
1927 		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
1928 	}
1929 	ctx->cbcmac  = true;
1930 
1931 	memzero_explicit(&aes, sizeof(aes));
1932 	return 0;
1933 }
1934 
safexcel_cbcmac_digest(struct ahash_request * areq)1935 static int safexcel_cbcmac_digest(struct ahash_request *areq)
1936 {
1937 	return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
1938 }
1939 
1940 struct safexcel_alg_template safexcel_alg_cbcmac = {
1941 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1942 	.algo_mask = 0,
1943 	.alg.ahash = {
1944 		.init = safexcel_cbcmac_init,
1945 		.update = safexcel_ahash_update,
1946 		.final = safexcel_ahash_final,
1947 		.finup = safexcel_ahash_finup,
1948 		.digest = safexcel_cbcmac_digest,
1949 		.setkey = safexcel_cbcmac_setkey,
1950 		.export = safexcel_ahash_export,
1951 		.import = safexcel_ahash_import,
1952 		.halg = {
1953 			.digestsize = AES_BLOCK_SIZE,
1954 			.statesize = sizeof(struct safexcel_ahash_export_state),
1955 			.base = {
1956 				.cra_name = "cbcmac(aes)",
1957 				.cra_driver_name = "safexcel-cbcmac-aes",
1958 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1959 				.cra_flags = CRYPTO_ALG_ASYNC |
1960 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1961 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1962 				.cra_blocksize = AES_BLOCK_SIZE,
1963 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1964 				.cra_init = safexcel_ahash_cra_init,
1965 				.cra_exit = safexcel_ahash_cra_exit,
1966 				.cra_module = THIS_MODULE,
1967 			},
1968 		},
1969 	},
1970 };
1971 
safexcel_xcbcmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int len)1972 static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1973 				 unsigned int len)
1974 {
1975 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1976 	u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
1977 	int ret, i;
1978 
1979 	ret = aes_expandkey(ctx->aes, key, len);
1980 	if (ret)
1981 		return ret;
1982 
1983 	/* precompute the XCBC key material */
1984 	aes_encrypt(ctx->aes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
1985 		    "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
1986 	aes_encrypt(ctx->aes, (u8 *)key_tmp,
1987 		    "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
1988 	aes_encrypt(ctx->aes, (u8 *)key_tmp + AES_BLOCK_SIZE,
1989 		    "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
1990 	for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
1991 		ctx->base.ipad.word[i] = swab32(key_tmp[i]);
1992 
1993 	ret = aes_expandkey(ctx->aes,
1994 			    (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
1995 			    AES_MIN_KEY_SIZE);
1996 	if (ret)
1997 		return ret;
1998 
1999 	ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2000 	ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2001 	ctx->cbcmac = false;
2002 
2003 	return 0;
2004 }
2005 
safexcel_xcbcmac_cra_init(struct crypto_tfm * tfm)2006 static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2007 {
2008 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2009 
2010 	safexcel_ahash_cra_init(tfm);
2011 	ctx->aes = kmalloc(sizeof(*ctx->aes), GFP_KERNEL);
2012 	return ctx->aes == NULL ? -ENOMEM : 0;
2013 }
2014 
safexcel_xcbcmac_cra_exit(struct crypto_tfm * tfm)2015 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2016 {
2017 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2018 
2019 	kfree(ctx->aes);
2020 	safexcel_ahash_cra_exit(tfm);
2021 }
2022 
2023 struct safexcel_alg_template safexcel_alg_xcbcmac = {
2024 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2025 	.algo_mask = 0,
2026 	.alg.ahash = {
2027 		.init = safexcel_cbcmac_init,
2028 		.update = safexcel_ahash_update,
2029 		.final = safexcel_ahash_final,
2030 		.finup = safexcel_ahash_finup,
2031 		.digest = safexcel_cbcmac_digest,
2032 		.setkey = safexcel_xcbcmac_setkey,
2033 		.export = safexcel_ahash_export,
2034 		.import = safexcel_ahash_import,
2035 		.halg = {
2036 			.digestsize = AES_BLOCK_SIZE,
2037 			.statesize = sizeof(struct safexcel_ahash_export_state),
2038 			.base = {
2039 				.cra_name = "xcbc(aes)",
2040 				.cra_driver_name = "safexcel-xcbc-aes",
2041 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2042 				.cra_flags = CRYPTO_ALG_ASYNC |
2043 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2044 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2045 				.cra_blocksize = AES_BLOCK_SIZE,
2046 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2047 				.cra_init = safexcel_xcbcmac_cra_init,
2048 				.cra_exit = safexcel_xcbcmac_cra_exit,
2049 				.cra_module = THIS_MODULE,
2050 			},
2051 		},
2052 	},
2053 };
2054 
safexcel_cmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int len)2055 static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2056 				unsigned int len)
2057 {
2058 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2059 	__be64 consts[4];
2060 	u64 _const[2];
2061 	u8 msb_mask, gfmask;
2062 	int ret, i;
2063 
2064 	/* precompute the CMAC key material */
2065 	ret = aes_expandkey(ctx->aes, key, len);
2066 	if (ret)
2067 		return ret;
2068 
2069 	for (i = 0; i < len / sizeof(u32); i++)
2070 		ctx->base.ipad.word[i + 8] = swab32(ctx->aes->key_enc[i]);
2071 
2072 	/* code below borrowed from crypto/cmac.c */
2073 	/* encrypt the zero block */
2074 	memset(consts, 0, AES_BLOCK_SIZE);
2075 	aes_encrypt(ctx->aes, (u8 *)consts, (u8 *)consts);
2076 
2077 	gfmask = 0x87;
2078 	_const[0] = be64_to_cpu(consts[1]);
2079 	_const[1] = be64_to_cpu(consts[0]);
2080 
2081 	/* gf(2^128) multiply zero-ciphertext with u and u^2 */
2082 	for (i = 0; i < 4; i += 2) {
2083 		msb_mask = ((s64)_const[1] >> 63) & gfmask;
2084 		_const[1] = (_const[1] << 1) | (_const[0] >> 63);
2085 		_const[0] = (_const[0] << 1) ^ msb_mask;
2086 
2087 		consts[i + 0] = cpu_to_be64(_const[1]);
2088 		consts[i + 1] = cpu_to_be64(_const[0]);
2089 	}
2090 	/* end of code borrowed from crypto/cmac.c */
2091 
2092 	for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2093 		ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
2094 
2095 	if (len == AES_KEYSIZE_192) {
2096 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2097 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2098 	} else if (len == AES_KEYSIZE_256) {
2099 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2100 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2101 	} else {
2102 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2103 		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2104 	}
2105 	ctx->cbcmac = false;
2106 
2107 	return 0;
2108 }
2109 
2110 struct safexcel_alg_template safexcel_alg_cmac = {
2111 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2112 	.algo_mask = 0,
2113 	.alg.ahash = {
2114 		.init = safexcel_cbcmac_init,
2115 		.update = safexcel_ahash_update,
2116 		.final = safexcel_ahash_final,
2117 		.finup = safexcel_ahash_finup,
2118 		.digest = safexcel_cbcmac_digest,
2119 		.setkey = safexcel_cmac_setkey,
2120 		.export = safexcel_ahash_export,
2121 		.import = safexcel_ahash_import,
2122 		.halg = {
2123 			.digestsize = AES_BLOCK_SIZE,
2124 			.statesize = sizeof(struct safexcel_ahash_export_state),
2125 			.base = {
2126 				.cra_name = "cmac(aes)",
2127 				.cra_driver_name = "safexcel-cmac-aes",
2128 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2129 				.cra_flags = CRYPTO_ALG_ASYNC |
2130 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2131 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2132 				.cra_blocksize = AES_BLOCK_SIZE,
2133 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2134 				.cra_init = safexcel_xcbcmac_cra_init,
2135 				.cra_exit = safexcel_xcbcmac_cra_exit,
2136 				.cra_module = THIS_MODULE,
2137 			},
2138 		},
2139 	},
2140 };
2141 
safexcel_sm3_init(struct ahash_request * areq)2142 static int safexcel_sm3_init(struct ahash_request *areq)
2143 {
2144 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2145 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2146 
2147 	memset(req, 0, sizeof(*req));
2148 
2149 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2150 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2151 	req->state_sz = SM3_DIGEST_SIZE;
2152 	req->digest_sz = SM3_DIGEST_SIZE;
2153 	req->block_sz = SM3_BLOCK_SIZE;
2154 
2155 	return 0;
2156 }
2157 
safexcel_sm3_digest(struct ahash_request * areq)2158 static int safexcel_sm3_digest(struct ahash_request *areq)
2159 {
2160 	int ret = safexcel_sm3_init(areq);
2161 
2162 	if (ret)
2163 		return ret;
2164 
2165 	return safexcel_ahash_finup(areq);
2166 }
2167 
2168 struct safexcel_alg_template safexcel_alg_sm3 = {
2169 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2170 	.algo_mask = SAFEXCEL_ALG_SM3,
2171 	.alg.ahash = {
2172 		.init = safexcel_sm3_init,
2173 		.update = safexcel_ahash_update,
2174 		.final = safexcel_ahash_final,
2175 		.finup = safexcel_ahash_finup,
2176 		.digest = safexcel_sm3_digest,
2177 		.export = safexcel_ahash_export,
2178 		.import = safexcel_ahash_import,
2179 		.halg = {
2180 			.digestsize = SM3_DIGEST_SIZE,
2181 			.statesize = sizeof(struct safexcel_ahash_export_state),
2182 			.base = {
2183 				.cra_name = "sm3",
2184 				.cra_driver_name = "safexcel-sm3",
2185 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2186 				.cra_flags = CRYPTO_ALG_ASYNC |
2187 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2188 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2189 				.cra_blocksize = SM3_BLOCK_SIZE,
2190 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2191 				.cra_init = safexcel_ahash_cra_init,
2192 				.cra_exit = safexcel_ahash_cra_exit,
2193 				.cra_module = THIS_MODULE,
2194 			},
2195 		},
2196 	},
2197 };
2198 
safexcel_hmac_sm3_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)2199 static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2200 				    unsigned int keylen)
2201 {
2202 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2203 					SM3_DIGEST_SIZE);
2204 }
2205 
safexcel_hmac_sm3_init(struct ahash_request * areq)2206 static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2207 {
2208 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2209 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2210 
2211 	memset(req, 0, sizeof(*req));
2212 
2213 	/* Start from ipad precompute */
2214 	memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
2215 	/* Already processed the key^ipad part now! */
2216 	req->len	= SM3_BLOCK_SIZE;
2217 	req->processed	= SM3_BLOCK_SIZE;
2218 
2219 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2220 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2221 	req->state_sz = SM3_DIGEST_SIZE;
2222 	req->digest_sz = SM3_DIGEST_SIZE;
2223 	req->block_sz = SM3_BLOCK_SIZE;
2224 	req->hmac = true;
2225 
2226 	return 0;
2227 }
2228 
safexcel_hmac_sm3_digest(struct ahash_request * areq)2229 static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2230 {
2231 	int ret = safexcel_hmac_sm3_init(areq);
2232 
2233 	if (ret)
2234 		return ret;
2235 
2236 	return safexcel_ahash_finup(areq);
2237 }
2238 
2239 struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2240 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2241 	.algo_mask = SAFEXCEL_ALG_SM3,
2242 	.alg.ahash = {
2243 		.init = safexcel_hmac_sm3_init,
2244 		.update = safexcel_ahash_update,
2245 		.final = safexcel_ahash_final,
2246 		.finup = safexcel_ahash_finup,
2247 		.digest = safexcel_hmac_sm3_digest,
2248 		.setkey = safexcel_hmac_sm3_setkey,
2249 		.export = safexcel_ahash_export,
2250 		.import = safexcel_ahash_import,
2251 		.halg = {
2252 			.digestsize = SM3_DIGEST_SIZE,
2253 			.statesize = sizeof(struct safexcel_ahash_export_state),
2254 			.base = {
2255 				.cra_name = "hmac(sm3)",
2256 				.cra_driver_name = "safexcel-hmac-sm3",
2257 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2258 				.cra_flags = CRYPTO_ALG_ASYNC |
2259 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2260 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2261 				.cra_blocksize = SM3_BLOCK_SIZE,
2262 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2263 				.cra_init = safexcel_ahash_cra_init,
2264 				.cra_exit = safexcel_ahash_cra_exit,
2265 				.cra_module = THIS_MODULE,
2266 			},
2267 		},
2268 	},
2269 };
2270 
safexcel_sha3_224_init(struct ahash_request * areq)2271 static int safexcel_sha3_224_init(struct ahash_request *areq)
2272 {
2273 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2274 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2275 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2276 
2277 	memset(req, 0, sizeof(*req));
2278 
2279 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2280 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2281 	req->state_sz = SHA3_224_DIGEST_SIZE;
2282 	req->digest_sz = SHA3_224_DIGEST_SIZE;
2283 	req->block_sz = SHA3_224_BLOCK_SIZE;
2284 	ctx->do_fallback = false;
2285 	ctx->fb_init_done = false;
2286 	return 0;
2287 }
2288 
safexcel_sha3_fbcheck(struct ahash_request * req)2289 static int safexcel_sha3_fbcheck(struct ahash_request *req)
2290 {
2291 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2292 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2293 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2294 	int ret = 0;
2295 
2296 	if (ctx->do_fallback) {
2297 		ahash_request_set_tfm(subreq, ctx->fback);
2298 		ahash_request_set_callback(subreq, req->base.flags,
2299 					   req->base.complete, req->base.data);
2300 		ahash_request_set_crypt(subreq, req->src, req->result,
2301 					req->nbytes);
2302 		if (!ctx->fb_init_done) {
2303 			if (ctx->fb_do_setkey) {
2304 				/* Set fallback cipher HMAC key */
2305 				u8 key[SHA3_224_BLOCK_SIZE];
2306 
2307 				memcpy(key, &ctx->base.ipad,
2308 				       crypto_ahash_blocksize(ctx->fback) / 2);
2309 				memcpy(key +
2310 				       crypto_ahash_blocksize(ctx->fback) / 2,
2311 				       &ctx->base.opad,
2312 				       crypto_ahash_blocksize(ctx->fback) / 2);
2313 				ret = crypto_ahash_setkey(ctx->fback, key,
2314 					crypto_ahash_blocksize(ctx->fback));
2315 				memzero_explicit(key,
2316 					crypto_ahash_blocksize(ctx->fback));
2317 				ctx->fb_do_setkey = false;
2318 			}
2319 			ret = ret ?: crypto_ahash_init(subreq);
2320 			ctx->fb_init_done = true;
2321 		}
2322 	}
2323 	return ret;
2324 }
2325 
safexcel_sha3_update(struct ahash_request * req)2326 static int safexcel_sha3_update(struct ahash_request *req)
2327 {
2328 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2329 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2330 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2331 
2332 	ctx->do_fallback = true;
2333 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2334 }
2335 
safexcel_sha3_final(struct ahash_request * req)2336 static int safexcel_sha3_final(struct ahash_request *req)
2337 {
2338 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2339 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2340 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2341 
2342 	ctx->do_fallback = true;
2343 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2344 }
2345 
safexcel_sha3_finup(struct ahash_request * req)2346 static int safexcel_sha3_finup(struct ahash_request *req)
2347 {
2348 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2349 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2350 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2351 
2352 	ctx->do_fallback |= !req->nbytes;
2353 	if (ctx->do_fallback)
2354 		/* Update or ex/import happened or len 0, cannot use the HW */
2355 		return safexcel_sha3_fbcheck(req) ?:
2356 		       crypto_ahash_finup(subreq);
2357 	else
2358 		return safexcel_ahash_finup(req);
2359 }
2360 
safexcel_sha3_digest_fallback(struct ahash_request * req)2361 static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2362 {
2363 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2364 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2365 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2366 
2367 	ctx->do_fallback = true;
2368 	ctx->fb_init_done = false;
2369 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2370 }
2371 
safexcel_sha3_224_digest(struct ahash_request * req)2372 static int safexcel_sha3_224_digest(struct ahash_request *req)
2373 {
2374 	if (req->nbytes)
2375 		return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2376 
2377 	/* HW cannot do zero length hash, use fallback instead */
2378 	return safexcel_sha3_digest_fallback(req);
2379 }
2380 
safexcel_sha3_export(struct ahash_request * req,void * out)2381 static int safexcel_sha3_export(struct ahash_request *req, void *out)
2382 {
2383 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2384 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2385 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2386 
2387 	ctx->do_fallback = true;
2388 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2389 }
2390 
safexcel_sha3_import(struct ahash_request * req,const void * in)2391 static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2392 {
2393 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2394 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2395 	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2396 
2397 	ctx->do_fallback = true;
2398 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2399 	// return safexcel_ahash_import(req, in);
2400 }
2401 
safexcel_sha3_cra_init(struct crypto_tfm * tfm)2402 static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2403 {
2404 	struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2405 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2406 
2407 	safexcel_ahash_cra_init(tfm);
2408 
2409 	/* Allocate fallback implementation */
2410 	ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2411 					CRYPTO_ALG_ASYNC |
2412 					CRYPTO_ALG_NEED_FALLBACK);
2413 	if (IS_ERR(ctx->fback))
2414 		return PTR_ERR(ctx->fback);
2415 
2416 	/* Update statesize from fallback algorithm! */
2417 	crypto_hash_alg_common(ahash)->statesize =
2418 		crypto_ahash_statesize(ctx->fback);
2419 	crypto_ahash_set_reqsize_dma(
2420 		ahash, max(sizeof(struct safexcel_ahash_req),
2421 			   sizeof(struct ahash_request) +
2422 			   crypto_ahash_reqsize(ctx->fback)));
2423 	return 0;
2424 }
2425 
safexcel_sha3_cra_exit(struct crypto_tfm * tfm)2426 static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2427 {
2428 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2429 
2430 	crypto_free_ahash(ctx->fback);
2431 	safexcel_ahash_cra_exit(tfm);
2432 }
2433 
2434 struct safexcel_alg_template safexcel_alg_sha3_224 = {
2435 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2436 	.algo_mask = SAFEXCEL_ALG_SHA3,
2437 	.alg.ahash = {
2438 		.init = safexcel_sha3_224_init,
2439 		.update = safexcel_sha3_update,
2440 		.final = safexcel_sha3_final,
2441 		.finup = safexcel_sha3_finup,
2442 		.digest = safexcel_sha3_224_digest,
2443 		.export = safexcel_sha3_export,
2444 		.import = safexcel_sha3_import,
2445 		.halg = {
2446 			.digestsize = SHA3_224_DIGEST_SIZE,
2447 			.statesize = sizeof(struct safexcel_ahash_export_state),
2448 			.base = {
2449 				.cra_name = "sha3-224",
2450 				.cra_driver_name = "safexcel-sha3-224",
2451 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2452 				.cra_flags = CRYPTO_ALG_ASYNC |
2453 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2454 					     CRYPTO_ALG_NEED_FALLBACK,
2455 				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2456 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2457 				.cra_init = safexcel_sha3_cra_init,
2458 				.cra_exit = safexcel_sha3_cra_exit,
2459 				.cra_module = THIS_MODULE,
2460 			},
2461 		},
2462 	},
2463 };
2464 
safexcel_sha3_256_init(struct ahash_request * areq)2465 static int safexcel_sha3_256_init(struct ahash_request *areq)
2466 {
2467 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2468 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2469 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2470 
2471 	memset(req, 0, sizeof(*req));
2472 
2473 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2474 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2475 	req->state_sz = SHA3_256_DIGEST_SIZE;
2476 	req->digest_sz = SHA3_256_DIGEST_SIZE;
2477 	req->block_sz = SHA3_256_BLOCK_SIZE;
2478 	ctx->do_fallback = false;
2479 	ctx->fb_init_done = false;
2480 	return 0;
2481 }
2482 
safexcel_sha3_256_digest(struct ahash_request * req)2483 static int safexcel_sha3_256_digest(struct ahash_request *req)
2484 {
2485 	if (req->nbytes)
2486 		return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2487 
2488 	/* HW cannot do zero length hash, use fallback instead */
2489 	return safexcel_sha3_digest_fallback(req);
2490 }
2491 
2492 struct safexcel_alg_template safexcel_alg_sha3_256 = {
2493 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2494 	.algo_mask = SAFEXCEL_ALG_SHA3,
2495 	.alg.ahash = {
2496 		.init = safexcel_sha3_256_init,
2497 		.update = safexcel_sha3_update,
2498 		.final = safexcel_sha3_final,
2499 		.finup = safexcel_sha3_finup,
2500 		.digest = safexcel_sha3_256_digest,
2501 		.export = safexcel_sha3_export,
2502 		.import = safexcel_sha3_import,
2503 		.halg = {
2504 			.digestsize = SHA3_256_DIGEST_SIZE,
2505 			.statesize = sizeof(struct safexcel_ahash_export_state),
2506 			.base = {
2507 				.cra_name = "sha3-256",
2508 				.cra_driver_name = "safexcel-sha3-256",
2509 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2510 				.cra_flags = CRYPTO_ALG_ASYNC |
2511 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2512 					     CRYPTO_ALG_NEED_FALLBACK,
2513 				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2514 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2515 				.cra_init = safexcel_sha3_cra_init,
2516 				.cra_exit = safexcel_sha3_cra_exit,
2517 				.cra_module = THIS_MODULE,
2518 			},
2519 		},
2520 	},
2521 };
2522 
safexcel_sha3_384_init(struct ahash_request * areq)2523 static int safexcel_sha3_384_init(struct ahash_request *areq)
2524 {
2525 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2526 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2527 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2528 
2529 	memset(req, 0, sizeof(*req));
2530 
2531 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2532 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2533 	req->state_sz = SHA3_384_DIGEST_SIZE;
2534 	req->digest_sz = SHA3_384_DIGEST_SIZE;
2535 	req->block_sz = SHA3_384_BLOCK_SIZE;
2536 	ctx->do_fallback = false;
2537 	ctx->fb_init_done = false;
2538 	return 0;
2539 }
2540 
safexcel_sha3_384_digest(struct ahash_request * req)2541 static int safexcel_sha3_384_digest(struct ahash_request *req)
2542 {
2543 	if (req->nbytes)
2544 		return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2545 
2546 	/* HW cannot do zero length hash, use fallback instead */
2547 	return safexcel_sha3_digest_fallback(req);
2548 }
2549 
2550 struct safexcel_alg_template safexcel_alg_sha3_384 = {
2551 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2552 	.algo_mask = SAFEXCEL_ALG_SHA3,
2553 	.alg.ahash = {
2554 		.init = safexcel_sha3_384_init,
2555 		.update = safexcel_sha3_update,
2556 		.final = safexcel_sha3_final,
2557 		.finup = safexcel_sha3_finup,
2558 		.digest = safexcel_sha3_384_digest,
2559 		.export = safexcel_sha3_export,
2560 		.import = safexcel_sha3_import,
2561 		.halg = {
2562 			.digestsize = SHA3_384_DIGEST_SIZE,
2563 			.statesize = sizeof(struct safexcel_ahash_export_state),
2564 			.base = {
2565 				.cra_name = "sha3-384",
2566 				.cra_driver_name = "safexcel-sha3-384",
2567 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2568 				.cra_flags = CRYPTO_ALG_ASYNC |
2569 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2570 					     CRYPTO_ALG_NEED_FALLBACK,
2571 				.cra_blocksize = SHA3_384_BLOCK_SIZE,
2572 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2573 				.cra_init = safexcel_sha3_cra_init,
2574 				.cra_exit = safexcel_sha3_cra_exit,
2575 				.cra_module = THIS_MODULE,
2576 			},
2577 		},
2578 	},
2579 };
2580 
safexcel_sha3_512_init(struct ahash_request * areq)2581 static int safexcel_sha3_512_init(struct ahash_request *areq)
2582 {
2583 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2584 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2585 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2586 
2587 	memset(req, 0, sizeof(*req));
2588 
2589 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2590 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2591 	req->state_sz = SHA3_512_DIGEST_SIZE;
2592 	req->digest_sz = SHA3_512_DIGEST_SIZE;
2593 	req->block_sz = SHA3_512_BLOCK_SIZE;
2594 	ctx->do_fallback = false;
2595 	ctx->fb_init_done = false;
2596 	return 0;
2597 }
2598 
safexcel_sha3_512_digest(struct ahash_request * req)2599 static int safexcel_sha3_512_digest(struct ahash_request *req)
2600 {
2601 	if (req->nbytes)
2602 		return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2603 
2604 	/* HW cannot do zero length hash, use fallback instead */
2605 	return safexcel_sha3_digest_fallback(req);
2606 }
2607 
2608 struct safexcel_alg_template safexcel_alg_sha3_512 = {
2609 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2610 	.algo_mask = SAFEXCEL_ALG_SHA3,
2611 	.alg.ahash = {
2612 		.init = safexcel_sha3_512_init,
2613 		.update = safexcel_sha3_update,
2614 		.final = safexcel_sha3_final,
2615 		.finup = safexcel_sha3_finup,
2616 		.digest = safexcel_sha3_512_digest,
2617 		.export = safexcel_sha3_export,
2618 		.import = safexcel_sha3_import,
2619 		.halg = {
2620 			.digestsize = SHA3_512_DIGEST_SIZE,
2621 			.statesize = sizeof(struct safexcel_ahash_export_state),
2622 			.base = {
2623 				.cra_name = "sha3-512",
2624 				.cra_driver_name = "safexcel-sha3-512",
2625 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2626 				.cra_flags = CRYPTO_ALG_ASYNC |
2627 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2628 					     CRYPTO_ALG_NEED_FALLBACK,
2629 				.cra_blocksize = SHA3_512_BLOCK_SIZE,
2630 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2631 				.cra_init = safexcel_sha3_cra_init,
2632 				.cra_exit = safexcel_sha3_cra_exit,
2633 				.cra_module = THIS_MODULE,
2634 			},
2635 		},
2636 	},
2637 };
2638 
safexcel_hmac_sha3_cra_init(struct crypto_tfm * tfm,const char * alg)2639 static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2640 {
2641 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2642 	int ret;
2643 
2644 	ret = safexcel_sha3_cra_init(tfm);
2645 	if (ret)
2646 		return ret;
2647 
2648 	/* Allocate precalc basic digest implementation */
2649 	ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2650 	if (IS_ERR(ctx->shpre))
2651 		return PTR_ERR(ctx->shpre);
2652 
2653 	ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2654 			      crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2655 	if (!ctx->shdesc) {
2656 		crypto_free_shash(ctx->shpre);
2657 		return -ENOMEM;
2658 	}
2659 	ctx->shdesc->tfm = ctx->shpre;
2660 	return 0;
2661 }
2662 
safexcel_hmac_sha3_cra_exit(struct crypto_tfm * tfm)2663 static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2664 {
2665 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2666 
2667 	crypto_free_ahash(ctx->fback);
2668 	crypto_free_shash(ctx->shpre);
2669 	kfree(ctx->shdesc);
2670 	safexcel_ahash_cra_exit(tfm);
2671 }
2672 
safexcel_hmac_sha3_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)2673 static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2674 				     unsigned int keylen)
2675 {
2676 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2677 	int ret = 0;
2678 
2679 	if (keylen > crypto_ahash_blocksize(tfm)) {
2680 		/*
2681 		 * If the key is larger than the blocksize, then hash it
2682 		 * first using our fallback cipher
2683 		 */
2684 		ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2685 					  ctx->base.ipad.byte);
2686 		keylen = crypto_shash_digestsize(ctx->shpre);
2687 
2688 		/*
2689 		 * If the digest is larger than half the blocksize, we need to
2690 		 * move the rest to opad due to the way our HMAC infra works.
2691 		 */
2692 		if (keylen > crypto_ahash_blocksize(tfm) / 2)
2693 			/* Buffers overlap, need to use memmove iso memcpy! */
2694 			memmove(&ctx->base.opad,
2695 				ctx->base.ipad.byte +
2696 					crypto_ahash_blocksize(tfm) / 2,
2697 				keylen - crypto_ahash_blocksize(tfm) / 2);
2698 	} else {
2699 		/*
2700 		 * Copy the key to our ipad & opad buffers
2701 		 * Note that ipad and opad each contain one half of the key,
2702 		 * to match the existing HMAC driver infrastructure.
2703 		 */
2704 		if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2705 			memcpy(&ctx->base.ipad, key, keylen);
2706 		} else {
2707 			memcpy(&ctx->base.ipad, key,
2708 			       crypto_ahash_blocksize(tfm) / 2);
2709 			memcpy(&ctx->base.opad,
2710 			       key + crypto_ahash_blocksize(tfm) / 2,
2711 			       keylen - crypto_ahash_blocksize(tfm) / 2);
2712 		}
2713 	}
2714 
2715 	/* Pad key with zeroes */
2716 	if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2717 		memset(ctx->base.ipad.byte + keylen, 0,
2718 		       crypto_ahash_blocksize(tfm) / 2 - keylen);
2719 		memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
2720 	} else {
2721 		memset(ctx->base.opad.byte + keylen -
2722 		       crypto_ahash_blocksize(tfm) / 2, 0,
2723 		       crypto_ahash_blocksize(tfm) - keylen);
2724 	}
2725 
2726 	/* If doing fallback, still need to set the new key! */
2727 	ctx->fb_do_setkey = true;
2728 	return ret;
2729 }
2730 
safexcel_hmac_sha3_224_init(struct ahash_request * areq)2731 static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2732 {
2733 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2734 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2735 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2736 
2737 	memset(req, 0, sizeof(*req));
2738 
2739 	/* Copy (half of) the key */
2740 	memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
2741 	/* Start of HMAC should have len == processed == blocksize */
2742 	req->len	= SHA3_224_BLOCK_SIZE;
2743 	req->processed	= SHA3_224_BLOCK_SIZE;
2744 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2745 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2746 	req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2747 	req->digest_sz = SHA3_224_DIGEST_SIZE;
2748 	req->block_sz = SHA3_224_BLOCK_SIZE;
2749 	req->hmac = true;
2750 	ctx->do_fallback = false;
2751 	ctx->fb_init_done = false;
2752 	return 0;
2753 }
2754 
safexcel_hmac_sha3_224_digest(struct ahash_request * req)2755 static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2756 {
2757 	if (req->nbytes)
2758 		return safexcel_hmac_sha3_224_init(req) ?:
2759 		       safexcel_ahash_finup(req);
2760 
2761 	/* HW cannot do zero length HMAC, use fallback instead */
2762 	return safexcel_sha3_digest_fallback(req);
2763 }
2764 
safexcel_hmac_sha3_224_cra_init(struct crypto_tfm * tfm)2765 static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2766 {
2767 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2768 }
2769 
2770 struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2771 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2772 	.algo_mask = SAFEXCEL_ALG_SHA3,
2773 	.alg.ahash = {
2774 		.init = safexcel_hmac_sha3_224_init,
2775 		.update = safexcel_sha3_update,
2776 		.final = safexcel_sha3_final,
2777 		.finup = safexcel_sha3_finup,
2778 		.digest = safexcel_hmac_sha3_224_digest,
2779 		.setkey = safexcel_hmac_sha3_setkey,
2780 		.export = safexcel_sha3_export,
2781 		.import = safexcel_sha3_import,
2782 		.halg = {
2783 			.digestsize = SHA3_224_DIGEST_SIZE,
2784 			.statesize = sizeof(struct safexcel_ahash_export_state),
2785 			.base = {
2786 				.cra_name = "hmac(sha3-224)",
2787 				.cra_driver_name = "safexcel-hmac-sha3-224",
2788 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2789 				.cra_flags = CRYPTO_ALG_ASYNC |
2790 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2791 					     CRYPTO_ALG_NEED_FALLBACK,
2792 				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2793 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2794 				.cra_init = safexcel_hmac_sha3_224_cra_init,
2795 				.cra_exit = safexcel_hmac_sha3_cra_exit,
2796 				.cra_module = THIS_MODULE,
2797 			},
2798 		},
2799 	},
2800 };
2801 
safexcel_hmac_sha3_256_init(struct ahash_request * areq)2802 static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2803 {
2804 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2805 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2806 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2807 
2808 	memset(req, 0, sizeof(*req));
2809 
2810 	/* Copy (half of) the key */
2811 	memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
2812 	/* Start of HMAC should have len == processed == blocksize */
2813 	req->len	= SHA3_256_BLOCK_SIZE;
2814 	req->processed	= SHA3_256_BLOCK_SIZE;
2815 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2816 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2817 	req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2818 	req->digest_sz = SHA3_256_DIGEST_SIZE;
2819 	req->block_sz = SHA3_256_BLOCK_SIZE;
2820 	req->hmac = true;
2821 	ctx->do_fallback = false;
2822 	ctx->fb_init_done = false;
2823 	return 0;
2824 }
2825 
safexcel_hmac_sha3_256_digest(struct ahash_request * req)2826 static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2827 {
2828 	if (req->nbytes)
2829 		return safexcel_hmac_sha3_256_init(req) ?:
2830 		       safexcel_ahash_finup(req);
2831 
2832 	/* HW cannot do zero length HMAC, use fallback instead */
2833 	return safexcel_sha3_digest_fallback(req);
2834 }
2835 
safexcel_hmac_sha3_256_cra_init(struct crypto_tfm * tfm)2836 static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2837 {
2838 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2839 }
2840 
2841 struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2842 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2843 	.algo_mask = SAFEXCEL_ALG_SHA3,
2844 	.alg.ahash = {
2845 		.init = safexcel_hmac_sha3_256_init,
2846 		.update = safexcel_sha3_update,
2847 		.final = safexcel_sha3_final,
2848 		.finup = safexcel_sha3_finup,
2849 		.digest = safexcel_hmac_sha3_256_digest,
2850 		.setkey = safexcel_hmac_sha3_setkey,
2851 		.export = safexcel_sha3_export,
2852 		.import = safexcel_sha3_import,
2853 		.halg = {
2854 			.digestsize = SHA3_256_DIGEST_SIZE,
2855 			.statesize = sizeof(struct safexcel_ahash_export_state),
2856 			.base = {
2857 				.cra_name = "hmac(sha3-256)",
2858 				.cra_driver_name = "safexcel-hmac-sha3-256",
2859 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2860 				.cra_flags = CRYPTO_ALG_ASYNC |
2861 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2862 					     CRYPTO_ALG_NEED_FALLBACK,
2863 				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2864 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2865 				.cra_init = safexcel_hmac_sha3_256_cra_init,
2866 				.cra_exit = safexcel_hmac_sha3_cra_exit,
2867 				.cra_module = THIS_MODULE,
2868 			},
2869 		},
2870 	},
2871 };
2872 
safexcel_hmac_sha3_384_init(struct ahash_request * areq)2873 static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
2874 {
2875 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2876 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2877 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2878 
2879 	memset(req, 0, sizeof(*req));
2880 
2881 	/* Copy (half of) the key */
2882 	memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
2883 	/* Start of HMAC should have len == processed == blocksize */
2884 	req->len	= SHA3_384_BLOCK_SIZE;
2885 	req->processed	= SHA3_384_BLOCK_SIZE;
2886 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2887 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2888 	req->state_sz = SHA3_384_BLOCK_SIZE / 2;
2889 	req->digest_sz = SHA3_384_DIGEST_SIZE;
2890 	req->block_sz = SHA3_384_BLOCK_SIZE;
2891 	req->hmac = true;
2892 	ctx->do_fallback = false;
2893 	ctx->fb_init_done = false;
2894 	return 0;
2895 }
2896 
safexcel_hmac_sha3_384_digest(struct ahash_request * req)2897 static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
2898 {
2899 	if (req->nbytes)
2900 		return safexcel_hmac_sha3_384_init(req) ?:
2901 		       safexcel_ahash_finup(req);
2902 
2903 	/* HW cannot do zero length HMAC, use fallback instead */
2904 	return safexcel_sha3_digest_fallback(req);
2905 }
2906 
safexcel_hmac_sha3_384_cra_init(struct crypto_tfm * tfm)2907 static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
2908 {
2909 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
2910 }
2911 
2912 struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
2913 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2914 	.algo_mask = SAFEXCEL_ALG_SHA3,
2915 	.alg.ahash = {
2916 		.init = safexcel_hmac_sha3_384_init,
2917 		.update = safexcel_sha3_update,
2918 		.final = safexcel_sha3_final,
2919 		.finup = safexcel_sha3_finup,
2920 		.digest = safexcel_hmac_sha3_384_digest,
2921 		.setkey = safexcel_hmac_sha3_setkey,
2922 		.export = safexcel_sha3_export,
2923 		.import = safexcel_sha3_import,
2924 		.halg = {
2925 			.digestsize = SHA3_384_DIGEST_SIZE,
2926 			.statesize = sizeof(struct safexcel_ahash_export_state),
2927 			.base = {
2928 				.cra_name = "hmac(sha3-384)",
2929 				.cra_driver_name = "safexcel-hmac-sha3-384",
2930 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2931 				.cra_flags = CRYPTO_ALG_ASYNC |
2932 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2933 					     CRYPTO_ALG_NEED_FALLBACK,
2934 				.cra_blocksize = SHA3_384_BLOCK_SIZE,
2935 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2936 				.cra_init = safexcel_hmac_sha3_384_cra_init,
2937 				.cra_exit = safexcel_hmac_sha3_cra_exit,
2938 				.cra_module = THIS_MODULE,
2939 			},
2940 		},
2941 	},
2942 };
2943 
safexcel_hmac_sha3_512_init(struct ahash_request * areq)2944 static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
2945 {
2946 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2947 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2948 	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2949 
2950 	memset(req, 0, sizeof(*req));
2951 
2952 	/* Copy (half of) the key */
2953 	memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
2954 	/* Start of HMAC should have len == processed == blocksize */
2955 	req->len	= SHA3_512_BLOCK_SIZE;
2956 	req->processed	= SHA3_512_BLOCK_SIZE;
2957 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2958 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2959 	req->state_sz = SHA3_512_BLOCK_SIZE / 2;
2960 	req->digest_sz = SHA3_512_DIGEST_SIZE;
2961 	req->block_sz = SHA3_512_BLOCK_SIZE;
2962 	req->hmac = true;
2963 	ctx->do_fallback = false;
2964 	ctx->fb_init_done = false;
2965 	return 0;
2966 }
2967 
safexcel_hmac_sha3_512_digest(struct ahash_request * req)2968 static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
2969 {
2970 	if (req->nbytes)
2971 		return safexcel_hmac_sha3_512_init(req) ?:
2972 		       safexcel_ahash_finup(req);
2973 
2974 	/* HW cannot do zero length HMAC, use fallback instead */
2975 	return safexcel_sha3_digest_fallback(req);
2976 }
2977 
safexcel_hmac_sha3_512_cra_init(struct crypto_tfm * tfm)2978 static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
2979 {
2980 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
2981 }
2982 struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
2983 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2984 	.algo_mask = SAFEXCEL_ALG_SHA3,
2985 	.alg.ahash = {
2986 		.init = safexcel_hmac_sha3_512_init,
2987 		.update = safexcel_sha3_update,
2988 		.final = safexcel_sha3_final,
2989 		.finup = safexcel_sha3_finup,
2990 		.digest = safexcel_hmac_sha3_512_digest,
2991 		.setkey = safexcel_hmac_sha3_setkey,
2992 		.export = safexcel_sha3_export,
2993 		.import = safexcel_sha3_import,
2994 		.halg = {
2995 			.digestsize = SHA3_512_DIGEST_SIZE,
2996 			.statesize = sizeof(struct safexcel_ahash_export_state),
2997 			.base = {
2998 				.cra_name = "hmac(sha3-512)",
2999 				.cra_driver_name = "safexcel-hmac-sha3-512",
3000 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
3001 				.cra_flags = CRYPTO_ALG_ASYNC |
3002 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
3003 					     CRYPTO_ALG_NEED_FALLBACK,
3004 				.cra_blocksize = SHA3_512_BLOCK_SIZE,
3005 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3006 				.cra_init = safexcel_hmac_sha3_512_cra_init,
3007 				.cra_exit = safexcel_hmac_sha3_cra_exit,
3008 				.cra_module = THIS_MODULE,
3009 			},
3010 		},
3011 	},
3012 };
3013