1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Crypto acceleration support for Rockchip RK3288
4 *
5 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
6 *
7 * Author: Zain Wang <zain.wang@rock-chips.com>
8 *
9 * Some ideas are from marvell/cesa.c and s5p-sss.c driver.
10 */
11
12 #include <linux/unaligned.h>
13 #include <crypto/internal/hash.h>
14 #include <linux/device.h>
15 #include <linux/err.h>
16 #include <linux/iopoll.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/string.h>
20 #include "rk3288_crypto.h"
21
22 /*
23 * IC can not process zero message hash,
24 * so we put the fixed hash out when met zero message.
25 */
26
rk_ahash_need_fallback(struct ahash_request * req)27 static bool rk_ahash_need_fallback(struct ahash_request *req)
28 {
29 struct scatterlist *sg;
30
31 sg = req->src;
32 while (sg) {
33 if (!IS_ALIGNED(sg->offset, sizeof(u32))) {
34 return true;
35 }
36 if (sg->length % 4) {
37 return true;
38 }
39 sg = sg_next(sg);
40 }
41 return false;
42 }
43
rk_ahash_digest_fb(struct ahash_request * areq)44 static int rk_ahash_digest_fb(struct ahash_request *areq)
45 {
46 struct rk_ahash_rctx *rctx = ahash_request_ctx(areq);
47 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
48 struct rk_ahash_ctx *tfmctx = crypto_ahash_ctx(tfm);
49 struct ahash_alg *alg = crypto_ahash_alg(tfm);
50 struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.hash.base);
51
52 algt->stat_fb++;
53
54 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm);
55 ahash_request_set_callback(&rctx->fallback_req,
56 areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
57 areq->base.complete, areq->base.data);
58 ahash_request_set_crypt(&rctx->fallback_req, areq->src, areq->result,
59 areq->nbytes);
60
61 return crypto_ahash_digest(&rctx->fallback_req);
62 }
63
zero_message_process(struct ahash_request * req)64 static int zero_message_process(struct ahash_request *req)
65 {
66 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
67 int rk_digest_size = crypto_ahash_digestsize(tfm);
68
69 switch (rk_digest_size) {
70 case SHA1_DIGEST_SIZE:
71 memcpy(req->result, sha1_zero_message_hash, rk_digest_size);
72 break;
73 case SHA256_DIGEST_SIZE:
74 memcpy(req->result, sha256_zero_message_hash, rk_digest_size);
75 break;
76 case MD5_DIGEST_SIZE:
77 memcpy(req->result, md5_zero_message_hash, rk_digest_size);
78 break;
79 default:
80 return -EINVAL;
81 }
82
83 return 0;
84 }
85
rk_ahash_reg_init(struct ahash_request * req,struct rk_crypto_info * dev)86 static void rk_ahash_reg_init(struct ahash_request *req,
87 struct rk_crypto_info *dev)
88 {
89 struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
90 int reg_status;
91
92 reg_status = CRYPTO_READ(dev, RK_CRYPTO_CTRL) |
93 RK_CRYPTO_HASH_FLUSH | _SBF(0xffff, 16);
94 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, reg_status);
95
96 reg_status = CRYPTO_READ(dev, RK_CRYPTO_CTRL);
97 reg_status &= (~RK_CRYPTO_HASH_FLUSH);
98 reg_status |= _SBF(0xffff, 16);
99 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, reg_status);
100
101 memset_io(dev->reg + RK_CRYPTO_HASH_DOUT_0, 0, 32);
102
103 CRYPTO_WRITE(dev, RK_CRYPTO_INTENA, RK_CRYPTO_HRDMA_ERR_ENA |
104 RK_CRYPTO_HRDMA_DONE_ENA);
105
106 CRYPTO_WRITE(dev, RK_CRYPTO_INTSTS, RK_CRYPTO_HRDMA_ERR_INT |
107 RK_CRYPTO_HRDMA_DONE_INT);
108
109 CRYPTO_WRITE(dev, RK_CRYPTO_HASH_CTRL, rctx->mode |
110 RK_CRYPTO_HASH_SWAP_DO);
111
112 CRYPTO_WRITE(dev, RK_CRYPTO_CONF, RK_CRYPTO_BYTESWAP_HRFIFO |
113 RK_CRYPTO_BYTESWAP_BRFIFO |
114 RK_CRYPTO_BYTESWAP_BTFIFO);
115
116 CRYPTO_WRITE(dev, RK_CRYPTO_HASH_MSG_LEN, req->nbytes);
117 }
118
rk_ahash_init(struct ahash_request * req)119 static int rk_ahash_init(struct ahash_request *req)
120 {
121 struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
122 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
123 struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
124
125 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
126 ahash_request_set_callback(&rctx->fallback_req,
127 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
128 req->base.complete, req->base.data);
129
130 return crypto_ahash_init(&rctx->fallback_req);
131 }
132
rk_ahash_update(struct ahash_request * req)133 static int rk_ahash_update(struct ahash_request *req)
134 {
135 struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
136 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
137 struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
138
139 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
140 ahash_request_set_callback(&rctx->fallback_req,
141 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
142 req->base.complete, req->base.data);
143 ahash_request_set_crypt(&rctx->fallback_req, req->src, NULL, req->nbytes);
144
145 return crypto_ahash_update(&rctx->fallback_req);
146 }
147
rk_ahash_final(struct ahash_request * req)148 static int rk_ahash_final(struct ahash_request *req)
149 {
150 struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
151 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
152 struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
153
154 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
155 ahash_request_set_callback(&rctx->fallback_req,
156 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
157 req->base.complete, req->base.data);
158 ahash_request_set_crypt(&rctx->fallback_req, NULL, req->result, 0);
159
160 return crypto_ahash_final(&rctx->fallback_req);
161 }
162
rk_ahash_finup(struct ahash_request * req)163 static int rk_ahash_finup(struct ahash_request *req)
164 {
165 struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
166 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
167 struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
168
169 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
170 ahash_request_set_callback(&rctx->fallback_req,
171 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
172 req->base.complete, req->base.data);
173 ahash_request_set_crypt(&rctx->fallback_req, req->src, req->result,
174 req->nbytes);
175
176 return crypto_ahash_finup(&rctx->fallback_req);
177 }
178
rk_ahash_import(struct ahash_request * req,const void * in)179 static int rk_ahash_import(struct ahash_request *req, const void *in)
180 {
181 struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
182 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
183 struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
184
185 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
186 ahash_request_set_callback(&rctx->fallback_req,
187 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
188 req->base.complete, req->base.data);
189
190 return crypto_ahash_import(&rctx->fallback_req, in);
191 }
192
rk_ahash_export(struct ahash_request * req,void * out)193 static int rk_ahash_export(struct ahash_request *req, void *out)
194 {
195 struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
196 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
197 struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
198
199 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
200 ahash_request_set_callback(&rctx->fallback_req,
201 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP,
202 req->base.complete, req->base.data);
203
204 return crypto_ahash_export(&rctx->fallback_req, out);
205 }
206
rk_ahash_digest(struct ahash_request * req)207 static int rk_ahash_digest(struct ahash_request *req)
208 {
209 struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
210 struct rk_crypto_info *dev;
211 struct crypto_engine *engine;
212
213 if (rk_ahash_need_fallback(req))
214 return rk_ahash_digest_fb(req);
215
216 if (!req->nbytes)
217 return zero_message_process(req);
218
219 dev = get_rk_crypto();
220
221 rctx->dev = dev;
222 engine = dev->engine;
223
224 return crypto_transfer_hash_request_to_engine(engine, req);
225 }
226
crypto_ahash_dma_start(struct rk_crypto_info * dev,struct scatterlist * sg)227 static void crypto_ahash_dma_start(struct rk_crypto_info *dev, struct scatterlist *sg)
228 {
229 CRYPTO_WRITE(dev, RK_CRYPTO_HRDMAS, sg_dma_address(sg));
230 CRYPTO_WRITE(dev, RK_CRYPTO_HRDMAL, sg_dma_len(sg) / 4);
231 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_HASH_START |
232 (RK_CRYPTO_HASH_START << 16));
233 }
234
rk_hash_prepare(struct crypto_engine * engine,void * breq)235 static int rk_hash_prepare(struct crypto_engine *engine, void *breq)
236 {
237 struct ahash_request *areq = container_of(breq, struct ahash_request, base);
238 struct rk_ahash_rctx *rctx = ahash_request_ctx(areq);
239 struct rk_crypto_info *rkc = rctx->dev;
240 int ret;
241
242 ret = dma_map_sg(rkc->dev, areq->src, sg_nents(areq->src), DMA_TO_DEVICE);
243 if (ret <= 0)
244 return -EINVAL;
245
246 rctx->nrsg = ret;
247
248 return 0;
249 }
250
rk_hash_unprepare(struct crypto_engine * engine,void * breq)251 static void rk_hash_unprepare(struct crypto_engine *engine, void *breq)
252 {
253 struct ahash_request *areq = container_of(breq, struct ahash_request, base);
254 struct rk_ahash_rctx *rctx = ahash_request_ctx(areq);
255 struct rk_crypto_info *rkc = rctx->dev;
256
257 dma_unmap_sg(rkc->dev, areq->src, rctx->nrsg, DMA_TO_DEVICE);
258 }
259
rk_hash_run(struct crypto_engine * engine,void * breq)260 static int rk_hash_run(struct crypto_engine *engine, void *breq)
261 {
262 struct ahash_request *areq = container_of(breq, struct ahash_request, base);
263 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
264 struct rk_ahash_rctx *rctx = ahash_request_ctx(areq);
265 struct ahash_alg *alg = crypto_ahash_alg(tfm);
266 struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.hash.base);
267 struct scatterlist *sg = areq->src;
268 struct rk_crypto_info *rkc = rctx->dev;
269 int err;
270 int i;
271 u32 v;
272
273 err = pm_runtime_resume_and_get(rkc->dev);
274 if (err)
275 return err;
276
277 err = rk_hash_prepare(engine, breq);
278 if (err)
279 goto theend;
280
281 rctx->mode = 0;
282
283 algt->stat_req++;
284 rkc->nreq++;
285
286 switch (crypto_ahash_digestsize(tfm)) {
287 case SHA1_DIGEST_SIZE:
288 rctx->mode = RK_CRYPTO_HASH_SHA1;
289 break;
290 case SHA256_DIGEST_SIZE:
291 rctx->mode = RK_CRYPTO_HASH_SHA256;
292 break;
293 case MD5_DIGEST_SIZE:
294 rctx->mode = RK_CRYPTO_HASH_MD5;
295 break;
296 default:
297 err = -EINVAL;
298 goto theend;
299 }
300
301 rk_ahash_reg_init(areq, rkc);
302
303 while (sg) {
304 reinit_completion(&rkc->complete);
305 rkc->status = 0;
306 crypto_ahash_dma_start(rkc, sg);
307 wait_for_completion_interruptible_timeout(&rkc->complete,
308 msecs_to_jiffies(2000));
309 if (!rkc->status) {
310 dev_err(rkc->dev, "DMA timeout\n");
311 err = -EFAULT;
312 goto theend;
313 }
314 sg = sg_next(sg);
315 }
316
317 /*
318 * it will take some time to process date after last dma
319 * transmission.
320 *
321 * waiting time is relative with the last date len,
322 * so cannot set a fixed time here.
323 * 10us makes system not call here frequently wasting
324 * efficiency, and make it response quickly when dma
325 * complete.
326 */
327 readl_poll_timeout(rkc->reg + RK_CRYPTO_HASH_STS, v, v == 0, 10, 1000);
328
329 for (i = 0; i < crypto_ahash_digestsize(tfm) / 4; i++) {
330 v = readl(rkc->reg + RK_CRYPTO_HASH_DOUT_0 + i * 4);
331 put_unaligned_le32(v, areq->result + i * 4);
332 }
333
334 theend:
335 pm_runtime_put_autosuspend(rkc->dev);
336
337 rk_hash_unprepare(engine, breq);
338
339 local_bh_disable();
340 crypto_finalize_hash_request(engine, breq, err);
341 local_bh_enable();
342
343 return 0;
344 }
345
rk_hash_init_tfm(struct crypto_ahash * tfm)346 static int rk_hash_init_tfm(struct crypto_ahash *tfm)
347 {
348 struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm);
349 const char *alg_name = crypto_ahash_alg_name(tfm);
350 struct ahash_alg *alg = crypto_ahash_alg(tfm);
351 struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.hash.base);
352
353 /* for fallback */
354 tctx->fallback_tfm = crypto_alloc_ahash(alg_name, 0,
355 CRYPTO_ALG_NEED_FALLBACK);
356 if (IS_ERR(tctx->fallback_tfm)) {
357 dev_err(algt->dev->dev, "Could not load fallback driver.\n");
358 return PTR_ERR(tctx->fallback_tfm);
359 }
360
361 crypto_ahash_set_reqsize(tfm,
362 sizeof(struct rk_ahash_rctx) +
363 crypto_ahash_reqsize(tctx->fallback_tfm));
364
365 return 0;
366 }
367
rk_hash_exit_tfm(struct crypto_ahash * tfm)368 static void rk_hash_exit_tfm(struct crypto_ahash *tfm)
369 {
370 struct rk_ahash_ctx *tctx = crypto_ahash_ctx(tfm);
371
372 crypto_free_ahash(tctx->fallback_tfm);
373 }
374
375 struct rk_crypto_tmp rk_ahash_sha1 = {
376 .type = CRYPTO_ALG_TYPE_AHASH,
377 .alg.hash.base = {
378 .init = rk_ahash_init,
379 .update = rk_ahash_update,
380 .final = rk_ahash_final,
381 .finup = rk_ahash_finup,
382 .export = rk_ahash_export,
383 .import = rk_ahash_import,
384 .digest = rk_ahash_digest,
385 .init_tfm = rk_hash_init_tfm,
386 .exit_tfm = rk_hash_exit_tfm,
387 .halg = {
388 .digestsize = SHA1_DIGEST_SIZE,
389 .statesize = sizeof(struct sha1_state),
390 .base = {
391 .cra_name = "sha1",
392 .cra_driver_name = "rk-sha1",
393 .cra_priority = 300,
394 .cra_flags = CRYPTO_ALG_ASYNC |
395 CRYPTO_ALG_NEED_FALLBACK,
396 .cra_blocksize = SHA1_BLOCK_SIZE,
397 .cra_ctxsize = sizeof(struct rk_ahash_ctx),
398 .cra_module = THIS_MODULE,
399 }
400 }
401 },
402 .alg.hash.op = {
403 .do_one_request = rk_hash_run,
404 },
405 };
406
407 struct rk_crypto_tmp rk_ahash_sha256 = {
408 .type = CRYPTO_ALG_TYPE_AHASH,
409 .alg.hash.base = {
410 .init = rk_ahash_init,
411 .update = rk_ahash_update,
412 .final = rk_ahash_final,
413 .finup = rk_ahash_finup,
414 .export = rk_ahash_export,
415 .import = rk_ahash_import,
416 .digest = rk_ahash_digest,
417 .init_tfm = rk_hash_init_tfm,
418 .exit_tfm = rk_hash_exit_tfm,
419 .halg = {
420 .digestsize = SHA256_DIGEST_SIZE,
421 .statesize = sizeof(struct sha256_state),
422 .base = {
423 .cra_name = "sha256",
424 .cra_driver_name = "rk-sha256",
425 .cra_priority = 300,
426 .cra_flags = CRYPTO_ALG_ASYNC |
427 CRYPTO_ALG_NEED_FALLBACK,
428 .cra_blocksize = SHA256_BLOCK_SIZE,
429 .cra_ctxsize = sizeof(struct rk_ahash_ctx),
430 .cra_module = THIS_MODULE,
431 }
432 }
433 },
434 .alg.hash.op = {
435 .do_one_request = rk_hash_run,
436 },
437 };
438
439 struct rk_crypto_tmp rk_ahash_md5 = {
440 .type = CRYPTO_ALG_TYPE_AHASH,
441 .alg.hash.base = {
442 .init = rk_ahash_init,
443 .update = rk_ahash_update,
444 .final = rk_ahash_final,
445 .finup = rk_ahash_finup,
446 .export = rk_ahash_export,
447 .import = rk_ahash_import,
448 .digest = rk_ahash_digest,
449 .init_tfm = rk_hash_init_tfm,
450 .exit_tfm = rk_hash_exit_tfm,
451 .halg = {
452 .digestsize = MD5_DIGEST_SIZE,
453 .statesize = sizeof(struct md5_state),
454 .base = {
455 .cra_name = "md5",
456 .cra_driver_name = "rk-md5",
457 .cra_priority = 300,
458 .cra_flags = CRYPTO_ALG_ASYNC |
459 CRYPTO_ALG_NEED_FALLBACK,
460 .cra_blocksize = SHA1_BLOCK_SIZE,
461 .cra_ctxsize = sizeof(struct rk_ahash_ctx),
462 .cra_module = THIS_MODULE,
463 }
464 }
465 },
466 .alg.hash.op = {
467 .do_one_request = rk_hash_run,
468 },
469 };
470