Lines Matching +full:p +full:- +full:384
1 // SPDX-License-Identifier: GPL-2.0
14 #include <linux/dma-mapping.h>
33 #define HPRE_INVLD_REQ_ID (-1)
66 /* low address: e--->n */
70 /* low address: d--->n */
74 /* low address: dq->dp->q->p->qinv */
84 * ya = g^xa mod p; [RFC2631 sec 2.1.1]
87 * ZZ = yb^xa mod p; [RFC2631 sec 2.1.1]
88 * low address: d--->n, please refer to Hisilicon HPRE UM
98 /* low address: p->a->k->b */
99 unsigned char *p; member
102 /* low address: x->y */
108 /* low address: p->a->k */
109 unsigned char *p; member
155 return ((crypto_dma_align() - 1) | (HPRE_ALIGN_SZ - 1)) + 1; in hpre_align_sz()
160 return (hpre_align_sz() - 1) & ~(crypto_tfm_ctx_alignment() - 1); in hpre_align_pd()
168 spin_lock_irqsave(&ctx->req_lock, flags); in hpre_alloc_req_id()
169 id = idr_alloc(&ctx->req_idr, NULL, 0, ctx->qp->sq_depth, GFP_ATOMIC); in hpre_alloc_req_id()
170 spin_unlock_irqrestore(&ctx->req_lock, flags); in hpre_alloc_req_id()
179 spin_lock_irqsave(&ctx->req_lock, flags); in hpre_free_req_id()
180 idr_remove(&ctx->req_idr, req_id); in hpre_free_req_id()
181 spin_unlock_irqrestore(&ctx->req_lock, flags); in hpre_free_req_id()
190 ctx = hpre_req->ctx; in hpre_add_req_to_ctx()
193 return -EINVAL; in hpre_add_req_to_ctx()
195 ctx->req_list[id] = hpre_req; in hpre_add_req_to_ctx()
196 hpre_req->req_id = id; in hpre_add_req_to_ctx()
198 dfx = ctx->hpre->debug.dfx; in hpre_add_req_to_ctx()
200 ktime_get_ts64(&hpre_req->req_time); in hpre_add_req_to_ctx()
207 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_rm_req_from_ctx()
208 int id = hpre_req->req_id; in hpre_rm_req_from_ctx()
210 if (hpre_req->req_id >= 0) { in hpre_rm_req_from_ctx()
211 hpre_req->req_id = HPRE_INVLD_REQ_ID; in hpre_rm_req_from_ctx()
212 ctx->req_list[id] = NULL; in hpre_rm_req_from_ctx()
225 return ERR_PTR(-ENODEV); in hpre_get_qp_and_start()
231 pci_err(qp->qm->pdev, "Can not start qp!\n"); in hpre_get_qp_and_start()
232 return ERR_PTR(-EINVAL); in hpre_get_qp_and_start()
242 struct device *dev = hpre_req->ctx->dev; in hpre_get_data_dma_addr()
246 hpre_req->src = NULL; in hpre_get_data_dma_addr()
249 hpre_req->dst = NULL; in hpre_get_data_dma_addr()
255 return -ENOMEM; in hpre_get_data_dma_addr()
265 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_prepare_dma_buf()
266 struct device *dev = ctx->dev; in hpre_prepare_dma_buf()
270 shift = ctx->key_sz - len; in hpre_prepare_dma_buf()
272 return -EINVAL; in hpre_prepare_dma_buf()
274 ptr = dma_alloc_coherent(dev, ctx->key_sz, tmp, GFP_ATOMIC); in hpre_prepare_dma_buf()
276 return -ENOMEM; in hpre_prepare_dma_buf()
280 hpre_req->src = ptr; in hpre_prepare_dma_buf()
282 hpre_req->dst = ptr; in hpre_prepare_dma_buf()
292 struct hpre_sqe *msg = &hpre_req->req; in hpre_hw_data_init()
293 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_hw_data_init()
298 if ((sg_is_last(data) && len == ctx->key_sz) && in hpre_hw_data_init()
308 msg->in = cpu_to_le64(tmp); in hpre_hw_data_init()
310 msg->out = cpu_to_le64(tmp); in hpre_hw_data_init()
320 struct device *dev = ctx->dev; in hpre_hw_data_clr_all()
321 struct hpre_sqe *sqe = &req->req; in hpre_hw_data_clr_all()
324 tmp = le64_to_cpu(sqe->in); in hpre_hw_data_clr_all()
329 if (req->src) in hpre_hw_data_clr_all()
330 dma_free_coherent(dev, ctx->key_sz, req->src, tmp); in hpre_hw_data_clr_all()
332 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_TO_DEVICE); in hpre_hw_data_clr_all()
335 tmp = le64_to_cpu(sqe->out); in hpre_hw_data_clr_all()
339 if (req->dst) { in hpre_hw_data_clr_all()
341 scatterwalk_map_and_copy(req->dst, dst, 0, in hpre_hw_data_clr_all()
342 ctx->key_sz, 1); in hpre_hw_data_clr_all()
343 dma_free_coherent(dev, ctx->key_sz, req->dst, tmp); in hpre_hw_data_clr_all()
345 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_FROM_DEVICE); in hpre_hw_data_clr_all()
361 id = (int)le16_to_cpu(sqe->tag); in hpre_alg_res_post_hf()
362 req = ctx->req_list[id]; in hpre_alg_res_post_hf()
366 err = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_ALG_BITS) & in hpre_alg_res_post_hf()
369 done = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_DONE_SHIFT) & in hpre_alg_res_post_hf()
375 alg = le32_to_cpu(sqe->dw0) & HREE_ALG_TYPE_MASK; in hpre_alg_res_post_hf()
376 dev_err_ratelimited(ctx->dev, "alg[0x%x] error: done[0x%x], etype[0x%x]\n", in hpre_alg_res_post_hf()
379 return -EINVAL; in hpre_alg_res_post_hf()
387 return -EINVAL; in hpre_ctx_set()
389 spin_lock_init(&ctx->req_lock); in hpre_ctx_set()
390 ctx->qp = qp; in hpre_ctx_set()
391 ctx->dev = &qp->qm->pdev->dev; in hpre_ctx_set()
393 hpre = container_of(ctx->qp->qm, struct hpre, qm); in hpre_ctx_set()
394 ctx->hpre = hpre; in hpre_ctx_set()
395 ctx->req_list = kcalloc(qlen, sizeof(void *), GFP_KERNEL); in hpre_ctx_set()
396 if (!ctx->req_list) in hpre_ctx_set()
397 return -ENOMEM; in hpre_ctx_set()
398 ctx->key_sz = 0; in hpre_ctx_set()
399 ctx->crt_g2_mode = false; in hpre_ctx_set()
400 idr_init(&ctx->req_idr); in hpre_ctx_set()
408 idr_destroy(&ctx->req_idr); in hpre_ctx_clear()
409 kfree(ctx->req_list); in hpre_ctx_clear()
410 hisi_qm_free_qps(&ctx->qp, 1); in hpre_ctx_clear()
413 ctx->crt_g2_mode = false; in hpre_ctx_clear()
414 ctx->key_sz = 0; in hpre_ctx_clear()
424 time_use_us = (reply_time.tv_sec - req->req_time.tv_sec) * in hpre_is_bd_timeout()
426 (reply_time.tv_nsec - req->req_time.tv_nsec) / in hpre_is_bd_timeout()
437 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_dh_cb()
444 areq = req->areq.dh; in hpre_dh_cb()
445 areq->dst_len = ctx->key_sz; in hpre_dh_cb()
451 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_dh_cb()
458 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_rsa_cb()
470 areq = req->areq.rsa; in hpre_rsa_cb()
471 areq->dst_len = ctx->key_sz; in hpre_rsa_cb()
472 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_rsa_cb()
479 struct hpre_ctx *ctx = qp->qp_ctx; in hpre_alg_cb()
480 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_alg_cb()
482 struct hpre_asym_request *req = ctx->req_list[le16_to_cpu(sqe->tag)]; in hpre_alg_cb()
489 req->cb(ctx, resp); in hpre_alg_cb()
507 qp->qp_ctx = ctx; in hpre_ctx_init()
508 qp->req_cb = hpre_alg_cb; in hpre_ctx_init()
510 ret = hpre_ctx_set(ctx, qp, qp->sq_depth); in hpre_ctx_init()
527 if (akreq->dst_len < ctx->key_sz) { in hpre_msg_request_set()
528 akreq->dst_len = ctx->key_sz; in hpre_msg_request_set()
529 return -EOVERFLOW; in hpre_msg_request_set()
534 h_req->cb = hpre_rsa_cb; in hpre_msg_request_set()
535 h_req->areq.rsa = akreq; in hpre_msg_request_set()
536 msg = &h_req->req; in hpre_msg_request_set()
541 if (kreq->dst_len < ctx->key_sz) { in hpre_msg_request_set()
542 kreq->dst_len = ctx->key_sz; in hpre_msg_request_set()
543 return -EOVERFLOW; in hpre_msg_request_set()
548 h_req->cb = hpre_dh_cb; in hpre_msg_request_set()
549 h_req->areq.dh = kreq; in hpre_msg_request_set()
550 msg = &h_req->req; in hpre_msg_request_set()
552 msg->key = cpu_to_le64(ctx->dh.dma_xa_p); in hpre_msg_request_set()
555 msg->in = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_msg_request_set()
556 msg->out = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_msg_request_set()
557 msg->dw0 |= cpu_to_le32(0x1 << HPRE_SQE_DONE_SHIFT); in hpre_msg_request_set()
558 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1; in hpre_msg_request_set()
559 h_req->ctx = ctx; in hpre_msg_request_set()
563 return -EBUSY; in hpre_msg_request_set()
565 msg->tag = cpu_to_le16((u16)req_id); in hpre_msg_request_set()
572 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_send()
578 ret = hisi_qp_send(ctx->qp, msg); in hpre_send()
579 if (ret != -EBUSY) in hpre_send()
587 if (ret != -EBUSY) in hpre_send()
599 struct hpre_sqe *msg = &hpre_req->req; in hpre_dh_compute_value()
606 if (req->src) { in hpre_dh_compute_value()
607 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 1); in hpre_dh_compute_value()
611 msg->in = cpu_to_le64(ctx->dh.dma_g); in hpre_dh_compute_value()
614 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 1); in hpre_dh_compute_value()
618 if (ctx->crt_g2_mode && !req->src) in hpre_dh_compute_value()
619 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH_G2); in hpre_dh_compute_value()
621 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH); in hpre_dh_compute_value()
626 return -EINPROGRESS; in hpre_dh_compute_value()
630 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_dh_compute_value()
652 return -EINVAL; in hpre_is_dh_params_length_valid()
658 struct device *dev = ctx->dev; in hpre_dh_set_params()
661 if (params->p_size > HPRE_DH_MAX_P_SZ) in hpre_dh_set_params()
662 return -EINVAL; in hpre_dh_set_params()
664 if (hpre_is_dh_params_length_valid(params->p_size << in hpre_dh_set_params()
666 return -EINVAL; in hpre_dh_set_params()
668 sz = ctx->key_sz = params->p_size; in hpre_dh_set_params()
669 ctx->dh.xa_p = dma_alloc_coherent(dev, sz << 1, in hpre_dh_set_params()
670 &ctx->dh.dma_xa_p, GFP_KERNEL); in hpre_dh_set_params()
671 if (!ctx->dh.xa_p) in hpre_dh_set_params()
672 return -ENOMEM; in hpre_dh_set_params()
674 memcpy(ctx->dh.xa_p + sz, params->p, sz); in hpre_dh_set_params()
677 if (params->g_size == 1 && *(char *)params->g == HPRE_DH_G_FLAG) { in hpre_dh_set_params()
678 ctx->crt_g2_mode = true; in hpre_dh_set_params()
682 ctx->dh.g = dma_alloc_coherent(dev, sz, &ctx->dh.dma_g, GFP_KERNEL); in hpre_dh_set_params()
683 if (!ctx->dh.g) { in hpre_dh_set_params()
684 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p, in hpre_dh_set_params()
685 ctx->dh.dma_xa_p); in hpre_dh_set_params()
686 ctx->dh.xa_p = NULL; in hpre_dh_set_params()
687 return -ENOMEM; in hpre_dh_set_params()
690 memcpy(ctx->dh.g + (sz - params->g_size), params->g, params->g_size); in hpre_dh_set_params()
697 struct device *dev = ctx->dev; in hpre_dh_clear_ctx()
698 unsigned int sz = ctx->key_sz; in hpre_dh_clear_ctx()
701 hisi_qm_stop_qp(ctx->qp); in hpre_dh_clear_ctx()
703 if (ctx->dh.g) { in hpre_dh_clear_ctx()
704 dma_free_coherent(dev, sz, ctx->dh.g, ctx->dh.dma_g); in hpre_dh_clear_ctx()
705 ctx->dh.g = NULL; in hpre_dh_clear_ctx()
708 if (ctx->dh.xa_p) { in hpre_dh_clear_ctx()
709 memzero_explicit(ctx->dh.xa_p, sz); in hpre_dh_clear_ctx()
710 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p, in hpre_dh_clear_ctx()
711 ctx->dh.dma_xa_p); in hpre_dh_clear_ctx()
712 ctx->dh.xa_p = NULL; in hpre_dh_clear_ctx()
726 return -EINVAL; in hpre_dh_set_secret()
735 memcpy(ctx->dh.xa_p + (ctx->key_sz - params.key_size), params.key, in hpre_dh_set_secret()
749 return ctx->key_sz; in hpre_dh_max_size()
772 (*len)--; in hpre_rsa_drop_leading_zeros()
802 struct hpre_sqe *msg = &hpre_req->req; in hpre_rsa_enc()
806 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_enc()
807 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) { in hpre_rsa_enc()
808 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm); in hpre_rsa_enc()
814 if (unlikely(!ctx->rsa.pubkey)) in hpre_rsa_enc()
815 return -EINVAL; in hpre_rsa_enc()
821 msg->dw0 |= cpu_to_le32(HPRE_ALG_NC_NCRT); in hpre_rsa_enc()
822 msg->key = cpu_to_le64(ctx->rsa.dma_pubkey); in hpre_rsa_enc()
824 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0); in hpre_rsa_enc()
828 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_enc()
835 return -EINPROGRESS; in hpre_rsa_enc()
839 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_rsa_enc()
850 struct hpre_sqe *msg = &hpre_req->req; in hpre_rsa_dec()
854 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_dec()
855 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) { in hpre_rsa_dec()
856 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm); in hpre_rsa_dec()
862 if (unlikely(!ctx->rsa.prikey)) in hpre_rsa_dec()
863 return -EINVAL; in hpre_rsa_dec()
869 if (ctx->crt_g2_mode) { in hpre_rsa_dec()
870 msg->key = cpu_to_le64(ctx->rsa.dma_crt_prikey); in hpre_rsa_dec()
871 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | in hpre_rsa_dec()
874 msg->key = cpu_to_le64(ctx->rsa.dma_prikey); in hpre_rsa_dec()
875 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | in hpre_rsa_dec()
879 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0); in hpre_rsa_dec()
883 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_dec()
890 return -EINPROGRESS; in hpre_rsa_dec()
894 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_rsa_dec()
906 ctx->key_sz = vlen; in hpre_rsa_set_n()
909 if (!hpre_rsa_key_size_is_support(ctx->key_sz)) in hpre_rsa_set_n()
912 ctx->rsa.pubkey = dma_alloc_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
913 &ctx->rsa.dma_pubkey, in hpre_rsa_set_n()
915 if (!ctx->rsa.pubkey) in hpre_rsa_set_n()
916 return -ENOMEM; in hpre_rsa_set_n()
919 ctx->rsa.prikey = dma_alloc_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
920 &ctx->rsa.dma_prikey, in hpre_rsa_set_n()
922 if (!ctx->rsa.prikey) { in hpre_rsa_set_n()
923 dma_free_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
924 ctx->rsa.pubkey, in hpre_rsa_set_n()
925 ctx->rsa.dma_pubkey); in hpre_rsa_set_n()
926 ctx->rsa.pubkey = NULL; in hpre_rsa_set_n()
927 return -ENOMEM; in hpre_rsa_set_n()
929 memcpy(ctx->rsa.prikey + vlen, ptr, vlen); in hpre_rsa_set_n()
931 memcpy(ctx->rsa.pubkey + vlen, ptr, vlen); in hpre_rsa_set_n()
944 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz) in hpre_rsa_set_e()
945 return -EINVAL; in hpre_rsa_set_e()
947 memcpy(ctx->rsa.pubkey + ctx->key_sz - vlen, ptr, vlen); in hpre_rsa_set_e()
959 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz) in hpre_rsa_set_d()
960 return -EINVAL; in hpre_rsa_set_d()
962 memcpy(ctx->rsa.prikey + ctx->key_sz - vlen, ptr, vlen); in hpre_rsa_set_d()
975 return -EINVAL; in hpre_crt_para_get()
977 memcpy(para + para_sz - len, ptr, len); in hpre_crt_para_get()
984 unsigned int hlf_ksz = ctx->key_sz >> 1; in hpre_rsa_setkey_crt()
985 struct device *dev = ctx->dev; in hpre_rsa_setkey_crt()
989 ctx->rsa.crt_prikey = dma_alloc_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, in hpre_rsa_setkey_crt()
990 &ctx->rsa.dma_crt_prikey, in hpre_rsa_setkey_crt()
992 if (!ctx->rsa.crt_prikey) in hpre_rsa_setkey_crt()
993 return -ENOMEM; in hpre_rsa_setkey_crt()
995 ret = hpre_crt_para_get(ctx->rsa.crt_prikey, hlf_ksz, in hpre_rsa_setkey_crt()
996 rsa_key->dq, rsa_key->dq_sz); in hpre_rsa_setkey_crt()
1001 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1002 rsa_key->dp, rsa_key->dp_sz); in hpre_rsa_setkey_crt()
1007 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1008 rsa_key->q, rsa_key->q_sz); in hpre_rsa_setkey_crt()
1013 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1014 rsa_key->p, rsa_key->p_sz); in hpre_rsa_setkey_crt()
1019 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1020 rsa_key->qinv, rsa_key->qinv_sz); in hpre_rsa_setkey_crt()
1024 ctx->crt_g2_mode = true; in hpre_rsa_setkey_crt()
1030 memzero_explicit(ctx->rsa.crt_prikey, offset); in hpre_rsa_setkey_crt()
1031 dma_free_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, ctx->rsa.crt_prikey, in hpre_rsa_setkey_crt()
1032 ctx->rsa.dma_crt_prikey); in hpre_rsa_setkey_crt()
1033 ctx->rsa.crt_prikey = NULL; in hpre_rsa_setkey_crt()
1034 ctx->crt_g2_mode = false; in hpre_rsa_setkey_crt()
1042 unsigned int half_key_sz = ctx->key_sz >> 1; in hpre_rsa_clear_ctx()
1043 struct device *dev = ctx->dev; in hpre_rsa_clear_ctx()
1046 hisi_qm_stop_qp(ctx->qp); in hpre_rsa_clear_ctx()
1048 if (ctx->rsa.pubkey) { in hpre_rsa_clear_ctx()
1049 dma_free_coherent(dev, ctx->key_sz << 1, in hpre_rsa_clear_ctx()
1050 ctx->rsa.pubkey, ctx->rsa.dma_pubkey); in hpre_rsa_clear_ctx()
1051 ctx->rsa.pubkey = NULL; in hpre_rsa_clear_ctx()
1054 if (ctx->rsa.crt_prikey) { in hpre_rsa_clear_ctx()
1055 memzero_explicit(ctx->rsa.crt_prikey, in hpre_rsa_clear_ctx()
1058 ctx->rsa.crt_prikey, ctx->rsa.dma_crt_prikey); in hpre_rsa_clear_ctx()
1059 ctx->rsa.crt_prikey = NULL; in hpre_rsa_clear_ctx()
1062 if (ctx->rsa.prikey) { in hpre_rsa_clear_ctx()
1063 memzero_explicit(ctx->rsa.prikey, ctx->key_sz); in hpre_rsa_clear_ctx()
1064 dma_free_coherent(dev, ctx->key_sz << 1, ctx->rsa.prikey, in hpre_rsa_clear_ctx()
1065 ctx->rsa.dma_prikey); in hpre_rsa_clear_ctx()
1066 ctx->rsa.prikey = NULL; in hpre_rsa_clear_ctx()
1074 * CRT: return true, N-CRT: return false .
1078 u16 len = key->p_sz + key->q_sz + key->dp_sz + key->dq_sz + in hpre_is_crt_key()
1079 key->qinv_sz; in hpre_is_crt_key()
1083 /* N-CRT less than 5 parameters */ in hpre_is_crt_key()
1122 if ((private && !ctx->rsa.prikey) || !ctx->rsa.pubkey) { in hpre_rsa_setkey()
1123 ret = -EINVAL; in hpre_rsa_setkey()
1140 ret = crypto_akcipher_set_pub_key(ctx->rsa.soft_tfm, key, keylen); in hpre_rsa_setpubkey()
1153 ret = crypto_akcipher_set_priv_key(ctx->rsa.soft_tfm, key, keylen); in hpre_rsa_setprivkey()
1165 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_max_size()
1166 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) in hpre_rsa_max_size()
1167 return crypto_akcipher_maxsize(ctx->rsa.soft_tfm); in hpre_rsa_max_size()
1169 return ctx->key_sz; in hpre_rsa_max_size()
1177 ctx->rsa.soft_tfm = crypto_alloc_akcipher("rsa-generic", 0, 0); in hpre_rsa_init_tfm()
1178 if (IS_ERR(ctx->rsa.soft_tfm)) { in hpre_rsa_init_tfm()
1180 return PTR_ERR(ctx->rsa.soft_tfm); in hpre_rsa_init_tfm()
1188 crypto_free_akcipher(ctx->rsa.soft_tfm); in hpre_rsa_init_tfm()
1198 crypto_free_akcipher(ctx->rsa.soft_tfm); in hpre_rsa_exit_tfm()
1206 j = len - i - 1; in hpre_key_to_big_end()
1214 struct device *dev = ctx->dev; in hpre_ecc_clear_ctx()
1215 unsigned int sz = ctx->key_sz; in hpre_ecc_clear_ctx()
1219 hisi_qm_stop_qp(ctx->qp); in hpre_ecc_clear_ctx()
1221 if (is_ecdh && ctx->ecdh.p) { in hpre_ecc_clear_ctx()
1222 /* ecdh: p->a->k->b */ in hpre_ecc_clear_ctx()
1223 memzero_explicit(ctx->ecdh.p + shift, sz); in hpre_ecc_clear_ctx()
1224 dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p); in hpre_ecc_clear_ctx()
1225 ctx->ecdh.p = NULL; in hpre_ecc_clear_ctx()
1226 } else if (!is_ecdh && ctx->curve25519.p) { in hpre_ecc_clear_ctx()
1227 /* curve25519: p->a->k */ in hpre_ecc_clear_ctx()
1228 memzero_explicit(ctx->curve25519.p + shift, sz); in hpre_ecc_clear_ctx()
1229 dma_free_coherent(dev, sz << 2, ctx->curve25519.p, in hpre_ecc_clear_ctx()
1230 ctx->curve25519.dma_p); in hpre_ecc_clear_ctx()
1231 ctx->curve25519.p = NULL; in hpre_ecc_clear_ctx()
1238 * The bits of 192/224/256/384/521 are supported by HPRE,
1240 * bits<=256, bits=256; 256<bits<=384, bits=384; 384<bits<=576, bits=576;
1242 * high-order zeros by soft, so TASK_LENGTH1 is 0x3/0x5/0x8;
1261 unsigned int sz = cur_sz - (ndigits - 1) * sizeof(u64); in fill_curve_param()
1264 while (i < ndigits - 1) { in fill_curve_param()
1269 memcpy(addr + sizeof(u64) * i, ¶m[ndigits - 1], sz); in fill_curve_param()
1276 unsigned int shifta = ctx->key_sz << 1; in hpre_ecdh_fill_curve()
1277 unsigned int shiftb = ctx->key_sz << 2; in hpre_ecdh_fill_curve()
1278 void *p = ctx->ecdh.p + ctx->key_sz - cur_sz; in hpre_ecdh_fill_curve() local
1279 void *a = ctx->ecdh.p + shifta - cur_sz; in hpre_ecdh_fill_curve()
1280 void *b = ctx->ecdh.p + shiftb - cur_sz; in hpre_ecdh_fill_curve()
1281 void *x = ctx->ecdh.g + ctx->key_sz - cur_sz; in hpre_ecdh_fill_curve()
1282 void *y = ctx->ecdh.g + shifta - cur_sz; in hpre_ecdh_fill_curve()
1283 const struct ecc_curve *curve = ecc_get_curve(ctx->curve_id); in hpre_ecdh_fill_curve()
1287 return -EINVAL; in hpre_ecdh_fill_curve()
1289 n = kzalloc(ctx->key_sz, GFP_KERNEL); in hpre_ecdh_fill_curve()
1291 return -ENOMEM; in hpre_ecdh_fill_curve()
1293 fill_curve_param(p, curve->p, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1294 fill_curve_param(a, curve->a, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1295 fill_curve_param(b, curve->b, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1296 fill_curve_param(x, curve->g.x, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1297 fill_curve_param(y, curve->g.y, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1298 fill_curve_param(n, curve->n, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1300 if (params->key_size == cur_sz && memcmp(params->key, n, cur_sz) >= 0) { in hpre_ecdh_fill_curve()
1302 return -EINVAL; in hpre_ecdh_fill_curve()
1327 struct device *dev = ctx->dev; in hpre_ecdh_set_param()
1331 ctx->key_sz = hpre_ecdh_supported_curve(ctx->curve_id); in hpre_ecdh_set_param()
1332 if (!ctx->key_sz) in hpre_ecdh_set_param()
1333 return -EINVAL; in hpre_ecdh_set_param()
1335 curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_set_param()
1336 if (!curve_sz || params->key_size > curve_sz) in hpre_ecdh_set_param()
1337 return -EINVAL; in hpre_ecdh_set_param()
1339 sz = ctx->key_sz; in hpre_ecdh_set_param()
1341 if (!ctx->ecdh.p) { in hpre_ecdh_set_param()
1342 ctx->ecdh.p = dma_alloc_coherent(dev, sz << 3, &ctx->ecdh.dma_p, in hpre_ecdh_set_param()
1344 if (!ctx->ecdh.p) in hpre_ecdh_set_param()
1345 return -ENOMEM; in hpre_ecdh_set_param()
1349 ctx->ecdh.g = ctx->ecdh.p + shift; in hpre_ecdh_set_param()
1350 ctx->ecdh.dma_g = ctx->ecdh.dma_p + shift; in hpre_ecdh_set_param()
1355 dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p); in hpre_ecdh_set_param()
1356 ctx->ecdh.p = NULL; in hpre_ecdh_set_param()
1376 struct device *dev = ctx->dev; in ecdh_gen_privkey()
1385 ret = crypto_rng_get_bytes(crypto_default_rng, (u8 *)params->key, in ecdh_gen_privkey()
1386 params->key_size); in ecdh_gen_privkey()
1399 struct device *dev = ctx->dev; in hpre_ecdh_set_secret()
1406 return -EINVAL; in hpre_ecdh_set_secret()
1412 curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_set_secret()
1415 return -EINVAL; in hpre_ecdh_set_secret()
1418 params.key_size = curve_sz - 1; in hpre_ecdh_set_secret()
1426 return -EINVAL; in hpre_ecdh_set_secret()
1437 sz = ctx->key_sz; in hpre_ecdh_set_secret()
1438 sz_shift = (sz << 1) + sz - params.key_size; in hpre_ecdh_set_secret()
1439 memcpy(ctx->ecdh.p + sz_shift, params.key, params.key_size); in hpre_ecdh_set_secret()
1449 struct device *dev = ctx->dev; in hpre_ecdh_hw_data_clr_all()
1450 struct hpre_sqe *sqe = &req->req; in hpre_ecdh_hw_data_clr_all()
1453 dma = le64_to_cpu(sqe->in); in hpre_ecdh_hw_data_clr_all()
1457 if (src && req->src) in hpre_ecdh_hw_data_clr_all()
1458 dma_free_coherent(dev, ctx->key_sz << 2, req->src, dma); in hpre_ecdh_hw_data_clr_all()
1460 dma = le64_to_cpu(sqe->out); in hpre_ecdh_hw_data_clr_all()
1464 if (req->dst) in hpre_ecdh_hw_data_clr_all()
1465 dma_free_coherent(dev, ctx->key_sz << 1, req->dst, dma); in hpre_ecdh_hw_data_clr_all()
1467 dma_unmap_single(dev, dma, ctx->key_sz << 1, DMA_FROM_DEVICE); in hpre_ecdh_hw_data_clr_all()
1472 unsigned int curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_cb()
1473 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_ecdh_cb()
1477 char *p; in hpre_ecdh_cb() local
1481 areq = req->areq.ecdh; in hpre_ecdh_cb()
1482 areq->dst_len = ctx->key_sz << 1; in hpre_ecdh_cb()
1488 p = sg_virt(areq->dst); in hpre_ecdh_cb()
1489 memmove(p, p + ctx->key_sz - curve_sz, curve_sz); in hpre_ecdh_cb()
1490 memmove(p + curve_sz, p + areq->dst_len - curve_sz, curve_sz); in hpre_ecdh_cb()
1492 hpre_ecdh_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_ecdh_cb()
1506 if (req->dst_len < ctx->key_sz << 1) { in hpre_ecdh_msg_request_set()
1507 req->dst_len = ctx->key_sz << 1; in hpre_ecdh_msg_request_set()
1508 return -EINVAL; in hpre_ecdh_msg_request_set()
1513 h_req->cb = hpre_ecdh_cb; in hpre_ecdh_msg_request_set()
1514 h_req->areq.ecdh = req; in hpre_ecdh_msg_request_set()
1515 msg = &h_req->req; in hpre_ecdh_msg_request_set()
1517 msg->in = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_ecdh_msg_request_set()
1518 msg->out = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_ecdh_msg_request_set()
1519 msg->key = cpu_to_le64(ctx->ecdh.dma_p); in hpre_ecdh_msg_request_set()
1521 msg->dw0 |= cpu_to_le32(0x1U << HPRE_SQE_DONE_SHIFT); in hpre_ecdh_msg_request_set()
1522 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1; in hpre_ecdh_msg_request_set()
1523 h_req->ctx = ctx; in hpre_ecdh_msg_request_set()
1527 return -EBUSY; in hpre_ecdh_msg_request_set()
1529 msg->tag = cpu_to_le16((u16)req_id); in hpre_ecdh_msg_request_set()
1536 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_src_data_init()
1537 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_ecdh_src_data_init()
1538 struct device *dev = ctx->dev; in hpre_ecdh_src_data_init()
1545 shift = ctx->key_sz - (len >> 1); in hpre_ecdh_src_data_init()
1547 return -EINVAL; in hpre_ecdh_src_data_init()
1549 ptr = dma_alloc_coherent(dev, ctx->key_sz << 2, &dma, GFP_KERNEL); in hpre_ecdh_src_data_init()
1551 return -ENOMEM; in hpre_ecdh_src_data_init()
1553 tmpshift = ctx->key_sz << 1; in hpre_ecdh_src_data_init()
1556 memcpy(ptr + ctx->key_sz + shift, ptr + tmpshift + (len >> 1), len >> 1); in hpre_ecdh_src_data_init()
1558 hpre_req->src = ptr; in hpre_ecdh_src_data_init()
1559 msg->in = cpu_to_le64(dma); in hpre_ecdh_src_data_init()
1566 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_dst_data_init()
1567 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_ecdh_dst_data_init()
1568 struct device *dev = ctx->dev; in hpre_ecdh_dst_data_init()
1571 if (unlikely(!data || !sg_is_last(data) || len != ctx->key_sz << 1)) { in hpre_ecdh_dst_data_init()
1573 return -EINVAL; in hpre_ecdh_dst_data_init()
1576 hpre_req->dst = NULL; in hpre_ecdh_dst_data_init()
1580 return -ENOMEM; in hpre_ecdh_dst_data_init()
1583 msg->out = cpu_to_le64(dma); in hpre_ecdh_dst_data_init()
1591 struct device *dev = ctx->dev; in hpre_ecdh_compute_value()
1594 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_compute_value()
1603 if (req->src) { in hpre_ecdh_compute_value()
1604 ret = hpre_ecdh_src_data_init(hpre_req, req->src, req->src_len); in hpre_ecdh_compute_value()
1610 msg->in = cpu_to_le64(ctx->ecdh.dma_g); in hpre_ecdh_compute_value()
1613 ret = hpre_ecdh_dst_data_init(hpre_req, req->dst, req->dst_len); in hpre_ecdh_compute_value()
1619 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_ECC_MUL); in hpre_ecdh_compute_value()
1622 return -EINPROGRESS; in hpre_ecdh_compute_value()
1626 hpre_ecdh_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_ecdh_compute_value()
1635 return ctx->key_sz << 1; in hpre_ecdh_max_size()
1642 ctx->curve_id = ECC_CURVE_NIST_P192; in hpre_ecdh_nist_p192_init_tfm()
1653 ctx->curve_id = ECC_CURVE_NIST_P256; in hpre_ecdh_nist_p256_init_tfm()
1664 ctx->curve_id = ECC_CURVE_NIST_P384; in hpre_ecdh_nist_p384_init_tfm()
1682 unsigned int sz = ctx->key_sz; in hpre_curve25519_fill_curve()
1685 void *p; in hpre_curve25519_fill_curve() local
1688 * The key from 'buf' is in little-endian, we should preprocess it as in hpre_curve25519_fill_curve()
1691 * the same as the software curve-25519 that exists in crypto. in hpre_curve25519_fill_curve()
1697 p = ctx->curve25519.p + sz - len; in hpre_curve25519_fill_curve()
1702 fill_curve_param(p, curve->p, len, curve->g.ndigits); in hpre_curve25519_fill_curve()
1703 fill_curve_param(p + sz, curve->a, len, curve->g.ndigits); in hpre_curve25519_fill_curve()
1704 memcpy(p + shift, secret, len); in hpre_curve25519_fill_curve()
1705 fill_curve_param(p + shift + sz, curve->g.x, len, curve->g.ndigits); in hpre_curve25519_fill_curve()
1712 struct device *dev = ctx->dev; in hpre_curve25519_set_param()
1713 unsigned int sz = ctx->key_sz; in hpre_curve25519_set_param()
1716 /* p->a->k->gx */ in hpre_curve25519_set_param()
1717 if (!ctx->curve25519.p) { in hpre_curve25519_set_param()
1718 ctx->curve25519.p = dma_alloc_coherent(dev, sz << 2, in hpre_curve25519_set_param()
1719 &ctx->curve25519.dma_p, in hpre_curve25519_set_param()
1721 if (!ctx->curve25519.p) in hpre_curve25519_set_param()
1722 return -ENOMEM; in hpre_curve25519_set_param()
1725 ctx->curve25519.g = ctx->curve25519.p + shift + sz; in hpre_curve25519_set_param()
1726 ctx->curve25519.dma_g = ctx->curve25519.dma_p + shift + sz; in hpre_curve25519_set_param()
1737 struct device *dev = ctx->dev; in hpre_curve25519_set_secret()
1738 int ret = -EINVAL; in hpre_curve25519_set_secret()
1749 ctx->key_sz = CURVE25519_KEY_SIZE; in hpre_curve25519_set_secret()
1765 struct device *dev = ctx->dev; in hpre_curve25519_hw_data_clr_all()
1766 struct hpre_sqe *sqe = &req->req; in hpre_curve25519_hw_data_clr_all()
1769 dma = le64_to_cpu(sqe->in); in hpre_curve25519_hw_data_clr_all()
1773 if (src && req->src) in hpre_curve25519_hw_data_clr_all()
1774 dma_free_coherent(dev, ctx->key_sz, req->src, dma); in hpre_curve25519_hw_data_clr_all()
1776 dma = le64_to_cpu(sqe->out); in hpre_curve25519_hw_data_clr_all()
1780 if (req->dst) in hpre_curve25519_hw_data_clr_all()
1781 dma_free_coherent(dev, ctx->key_sz, req->dst, dma); in hpre_curve25519_hw_data_clr_all()
1783 dma_unmap_single(dev, dma, ctx->key_sz, DMA_FROM_DEVICE); in hpre_curve25519_hw_data_clr_all()
1788 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_curve25519_cb()
1795 areq = req->areq.curve25519; in hpre_curve25519_cb()
1796 areq->dst_len = ctx->key_sz; in hpre_curve25519_cb()
1802 hpre_key_to_big_end(sg_virt(areq->dst), CURVE25519_KEY_SIZE); in hpre_curve25519_cb()
1804 hpre_curve25519_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_curve25519_cb()
1818 if (unlikely(req->dst_len < ctx->key_sz)) { in hpre_curve25519_msg_request_set()
1819 req->dst_len = ctx->key_sz; in hpre_curve25519_msg_request_set()
1820 return -EINVAL; in hpre_curve25519_msg_request_set()
1825 h_req->cb = hpre_curve25519_cb; in hpre_curve25519_msg_request_set()
1826 h_req->areq.curve25519 = req; in hpre_curve25519_msg_request_set()
1827 msg = &h_req->req; in hpre_curve25519_msg_request_set()
1829 msg->in = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_curve25519_msg_request_set()
1830 msg->out = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_curve25519_msg_request_set()
1831 msg->key = cpu_to_le64(ctx->curve25519.dma_p); in hpre_curve25519_msg_request_set()
1833 msg->dw0 |= cpu_to_le32(0x1U << HPRE_SQE_DONE_SHIFT); in hpre_curve25519_msg_request_set()
1834 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1; in hpre_curve25519_msg_request_set()
1835 h_req->ctx = ctx; in hpre_curve25519_msg_request_set()
1839 return -EBUSY; in hpre_curve25519_msg_request_set()
1841 msg->tag = cpu_to_le16((u16)req_id); in hpre_curve25519_msg_request_set()
1849 for (i = 0; i < CURVE25519_KEY_SIZE - 1; i++) in hpre_curve25519_src_modulo_p()
1852 /* The modulus is ptr's last byte minus '0xed'(last byte of p) */ in hpre_curve25519_src_modulo_p()
1853 ptr[i] -= 0xed; in hpre_curve25519_src_modulo_p()
1859 struct hpre_sqe *msg = &hpre_req->req; in hpre_curve25519_src_init()
1860 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_curve25519_src_init()
1861 struct device *dev = ctx->dev; in hpre_curve25519_src_init()
1862 u8 p[CURVE25519_KEY_SIZE] = { 0 }; in hpre_curve25519_src_init() local
1869 return -EINVAL; in hpre_curve25519_src_init()
1872 ptr = dma_alloc_coherent(dev, ctx->key_sz, &dma, GFP_KERNEL); in hpre_curve25519_src_init()
1874 return -ENOMEM; in hpre_curve25519_src_init()
1884 * Src_data(gx) is in little-endian order, MSB in the final byte should in hpre_curve25519_src_init()
1885 * be masked as described in RFC7748, then transform it to big-endian in hpre_curve25519_src_init()
1893 fill_curve_param(p, curve->p, CURVE25519_KEY_SIZE, curve->g.ndigits); in hpre_curve25519_src_init()
1896 * When src_data equals (2^255 - 19) ~ (2^255 - 1), it is out of p, in hpre_curve25519_src_init()
1897 * we get its modulus to p, and then use it. in hpre_curve25519_src_init()
1899 if (memcmp(ptr, p, ctx->key_sz) == 0) { in hpre_curve25519_src_init()
1900 dev_err(dev, "gx is p!\n"); in hpre_curve25519_src_init()
1902 } else if (memcmp(ptr, p, ctx->key_sz) > 0) { in hpre_curve25519_src_init()
1906 hpre_req->src = ptr; in hpre_curve25519_src_init()
1907 msg->in = cpu_to_le64(dma); in hpre_curve25519_src_init()
1911 dma_free_coherent(dev, ctx->key_sz, ptr, dma); in hpre_curve25519_src_init()
1912 return -EINVAL; in hpre_curve25519_src_init()
1918 struct hpre_sqe *msg = &hpre_req->req; in hpre_curve25519_dst_init()
1919 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_curve25519_dst_init()
1920 struct device *dev = ctx->dev; in hpre_curve25519_dst_init()
1923 if (!data || !sg_is_last(data) || len != ctx->key_sz) { in hpre_curve25519_dst_init()
1925 return -EINVAL; in hpre_curve25519_dst_init()
1928 hpre_req->dst = NULL; in hpre_curve25519_dst_init()
1932 return -ENOMEM; in hpre_curve25519_dst_init()
1935 msg->out = cpu_to_le64(dma); in hpre_curve25519_dst_init()
1943 struct device *dev = ctx->dev; in hpre_curve25519_compute_value()
1946 struct hpre_sqe *msg = &hpre_req->req; in hpre_curve25519_compute_value()
1955 if (req->src) { in hpre_curve25519_compute_value()
1956 ret = hpre_curve25519_src_init(hpre_req, req->src, req->src_len); in hpre_curve25519_compute_value()
1963 msg->in = cpu_to_le64(ctx->curve25519.dma_g); in hpre_curve25519_compute_value()
1966 ret = hpre_curve25519_dst_init(hpre_req, req->dst, req->dst_len); in hpre_curve25519_compute_value()
1972 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_CURVE25519_MUL); in hpre_curve25519_compute_value()
1975 return -EINPROGRESS; in hpre_curve25519_compute_value()
1979 hpre_curve25519_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_curve25519_compute_value()
1987 return ctx->key_sz; in hpre_curve25519_max_size()
2020 .cra_driver_name = "hpre-rsa",
2036 .cra_driver_name = "hpre-dh",
2052 .cra_name = "ecdh-nist-p192",
2053 .cra_driver_name = "hpre-ecdh-nist-p192",
2066 .cra_name = "ecdh-nist-p256",
2067 .cra_driver_name = "hpre-ecdh-nist-p256",
2080 .cra_name = "ecdh-nist-p384",
2081 .cra_driver_name = "hpre-ecdh-nist-p384",
2098 .cra_driver_name = "hpre-curve25519",
2113 dev_err(&qm->pdev->dev, "failed to register rsa (%d)!\n", ret); in hpre_register_rsa()
2135 dev_err(&qm->pdev->dev, "failed to register dh (%d)!\n", ret); in hpre_register_dh()
2158 dev_err(&qm->pdev->dev, "failed to register %s (%d)!\n", in hpre_register_ecdh()
2167 for (--i; i >= 0; --i) in hpre_register_ecdh()
2180 for (i = ARRAY_SIZE(ecdh_curves) - 1; i >= 0; --i) in hpre_unregister_ecdh()
2193 dev_err(&qm->pdev->dev, "failed to register x25519 (%d)!\n", ret); in hpre_register_x25519()
2251 if (--hpre_available_devs) in hpre_algs_unregister()