Lines Matching +full:p +full:- +full:384

1 // SPDX-License-Identifier: GPL-2.0
13 #include <linux/dma-mapping.h>
32 #define HPRE_INVLD_REQ_ID (-1)
67 /* low address: e--->n */
71 /* low address: d--->n */
75 /* low address: dq->dp->q->p->qinv */
85 * ya = g^xa mod p; [RFC2631 sec 2.1.1]
88 * ZZ = yb^xa mod p; [RFC2631 sec 2.1.1]
89 * low address: d--->n, please refer to Hisilicon HPRE UM
99 /* low address: p->a->k->b */
100 unsigned char *p; member
103 /* low address: x->y */
146 return ((crypto_dma_align() - 1) | (HPRE_ALIGN_SZ - 1)) + 1; in hpre_align_sz()
151 return (hpre_align_sz() - 1) & ~(crypto_tfm_ctx_alignment() - 1); in hpre_align_pd()
159 spin_lock_irqsave(&ctx->req_lock, flags); in hpre_alloc_req_id()
160 id = idr_alloc(&ctx->req_idr, NULL, 0, ctx->qp->sq_depth, GFP_ATOMIC); in hpre_alloc_req_id()
161 spin_unlock_irqrestore(&ctx->req_lock, flags); in hpre_alloc_req_id()
170 spin_lock_irqsave(&ctx->req_lock, flags); in hpre_free_req_id()
171 idr_remove(&ctx->req_idr, req_id); in hpre_free_req_id()
172 spin_unlock_irqrestore(&ctx->req_lock, flags); in hpre_free_req_id()
181 ctx = hpre_req->ctx; in hpre_add_req_to_ctx()
184 return -EINVAL; in hpre_add_req_to_ctx()
186 ctx->req_list[id] = hpre_req; in hpre_add_req_to_ctx()
187 hpre_req->req_id = id; in hpre_add_req_to_ctx()
189 dfx = ctx->hpre->debug.dfx; in hpre_add_req_to_ctx()
191 ktime_get_ts64(&hpre_req->req_time); in hpre_add_req_to_ctx()
198 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_rm_req_from_ctx()
199 int id = hpre_req->req_id; in hpre_rm_req_from_ctx()
201 if (hpre_req->req_id >= 0) { in hpre_rm_req_from_ctx()
202 hpre_req->req_id = HPRE_INVLD_REQ_ID; in hpre_rm_req_from_ctx()
203 ctx->req_list[id] = NULL; in hpre_rm_req_from_ctx()
216 return ERR_PTR(-ENODEV); in hpre_get_qp_and_start()
222 pci_err(qp->qm->pdev, "Can not start qp!\n"); in hpre_get_qp_and_start()
223 return ERR_PTR(-EINVAL); in hpre_get_qp_and_start()
233 struct device *dev = hpre_req->ctx->dev; in hpre_get_data_dma_addr()
237 hpre_req->src = NULL; in hpre_get_data_dma_addr()
240 hpre_req->dst = NULL; in hpre_get_data_dma_addr()
246 return -ENOMEM; in hpre_get_data_dma_addr()
256 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_prepare_dma_buf()
257 struct device *dev = ctx->dev; in hpre_prepare_dma_buf()
261 shift = ctx->key_sz - len; in hpre_prepare_dma_buf()
263 return -EINVAL; in hpre_prepare_dma_buf()
265 ptr = dma_alloc_coherent(dev, ctx->key_sz, tmp, GFP_ATOMIC); in hpre_prepare_dma_buf()
267 return -ENOMEM; in hpre_prepare_dma_buf()
271 hpre_req->src = ptr; in hpre_prepare_dma_buf()
273 hpre_req->dst = ptr; in hpre_prepare_dma_buf()
283 struct hpre_sqe *msg = &hpre_req->req; in hpre_hw_data_init()
284 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_hw_data_init()
289 if ((sg_is_last(data) && len == ctx->key_sz) && in hpre_hw_data_init()
299 msg->in = cpu_to_le64(tmp); in hpre_hw_data_init()
301 msg->out = cpu_to_le64(tmp); in hpre_hw_data_init()
311 struct device *dev = ctx->dev; in hpre_hw_data_clr_all()
312 struct hpre_sqe *sqe = &req->req; in hpre_hw_data_clr_all()
315 tmp = le64_to_cpu(sqe->in); in hpre_hw_data_clr_all()
320 if (req->src) in hpre_hw_data_clr_all()
321 dma_free_coherent(dev, ctx->key_sz, req->src, tmp); in hpre_hw_data_clr_all()
323 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_TO_DEVICE); in hpre_hw_data_clr_all()
326 tmp = le64_to_cpu(sqe->out); in hpre_hw_data_clr_all()
330 if (req->dst) { in hpre_hw_data_clr_all()
332 scatterwalk_map_and_copy(req->dst, dst, 0, in hpre_hw_data_clr_all()
333 ctx->key_sz, 1); in hpre_hw_data_clr_all()
334 dma_free_coherent(dev, ctx->key_sz, req->dst, tmp); in hpre_hw_data_clr_all()
336 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_FROM_DEVICE); in hpre_hw_data_clr_all()
352 id = (int)le16_to_cpu(sqe->tag); in hpre_alg_res_post_hf()
353 req = ctx->req_list[id]; in hpre_alg_res_post_hf()
357 err = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_ALG_BITS) & in hpre_alg_res_post_hf()
360 done = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_DONE_SHIFT) & in hpre_alg_res_post_hf()
366 alg = le32_to_cpu(sqe->dw0) & HREE_ALG_TYPE_MASK; in hpre_alg_res_post_hf()
367 dev_err_ratelimited(ctx->dev, "alg[0x%x] error: done[0x%x], etype[0x%x]\n", in hpre_alg_res_post_hf()
370 return -EINVAL; in hpre_alg_res_post_hf()
378 return -EINVAL; in hpre_ctx_set()
380 spin_lock_init(&ctx->req_lock); in hpre_ctx_set()
381 ctx->qp = qp; in hpre_ctx_set()
382 ctx->dev = &qp->qm->pdev->dev; in hpre_ctx_set()
384 hpre = container_of(ctx->qp->qm, struct hpre, qm); in hpre_ctx_set()
385 ctx->hpre = hpre; in hpre_ctx_set()
386 ctx->req_list = kcalloc(qlen, sizeof(void *), GFP_KERNEL); in hpre_ctx_set()
387 if (!ctx->req_list) in hpre_ctx_set()
388 return -ENOMEM; in hpre_ctx_set()
389 ctx->key_sz = 0; in hpre_ctx_set()
390 ctx->crt_g2_mode = false; in hpre_ctx_set()
391 idr_init(&ctx->req_idr); in hpre_ctx_set()
399 idr_destroy(&ctx->req_idr); in hpre_ctx_clear()
400 kfree(ctx->req_list); in hpre_ctx_clear()
401 hisi_qm_free_qps(&ctx->qp, 1); in hpre_ctx_clear()
404 ctx->crt_g2_mode = false; in hpre_ctx_clear()
405 ctx->key_sz = 0; in hpre_ctx_clear()
415 time_use_us = (reply_time.tv_sec - req->req_time.tv_sec) * in hpre_is_bd_timeout()
417 (reply_time.tv_nsec - req->req_time.tv_nsec) / in hpre_is_bd_timeout()
428 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_dh_cb()
435 areq = req->areq.dh; in hpre_dh_cb()
436 areq->dst_len = ctx->key_sz; in hpre_dh_cb()
442 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_dh_cb()
449 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_rsa_cb()
461 areq = req->areq.rsa; in hpre_rsa_cb()
462 areq->dst_len = ctx->key_sz; in hpre_rsa_cb()
463 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_rsa_cb()
470 struct hpre_ctx *ctx = qp->qp_ctx; in hpre_alg_cb()
471 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_alg_cb()
473 struct hpre_asym_request *req = ctx->req_list[le16_to_cpu(sqe->tag)]; in hpre_alg_cb()
480 req->cb(ctx, resp); in hpre_alg_cb()
498 qp->qp_ctx = ctx; in hpre_ctx_init()
499 qp->req_cb = hpre_alg_cb; in hpre_ctx_init()
501 ret = hpre_ctx_set(ctx, qp, qp->sq_depth); in hpre_ctx_init()
518 if (akreq->dst_len < ctx->key_sz) { in hpre_msg_request_set()
519 akreq->dst_len = ctx->key_sz; in hpre_msg_request_set()
520 return -EOVERFLOW; in hpre_msg_request_set()
525 h_req->cb = hpre_rsa_cb; in hpre_msg_request_set()
526 h_req->areq.rsa = akreq; in hpre_msg_request_set()
527 msg = &h_req->req; in hpre_msg_request_set()
532 if (kreq->dst_len < ctx->key_sz) { in hpre_msg_request_set()
533 kreq->dst_len = ctx->key_sz; in hpre_msg_request_set()
534 return -EOVERFLOW; in hpre_msg_request_set()
539 h_req->cb = hpre_dh_cb; in hpre_msg_request_set()
540 h_req->areq.dh = kreq; in hpre_msg_request_set()
541 msg = &h_req->req; in hpre_msg_request_set()
543 msg->key = cpu_to_le64(ctx->dh.dma_xa_p); in hpre_msg_request_set()
546 msg->in = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_msg_request_set()
547 msg->out = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_msg_request_set()
548 msg->dw0 |= cpu_to_le32(0x1 << HPRE_SQE_DONE_SHIFT); in hpre_msg_request_set()
549 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1; in hpre_msg_request_set()
550 h_req->ctx = ctx; in hpre_msg_request_set()
554 return -EBUSY; in hpre_msg_request_set()
556 msg->tag = cpu_to_le16((u16)req_id); in hpre_msg_request_set()
563 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_send()
569 spin_lock_bh(&ctx->req_lock); in hpre_send()
570 ret = hisi_qp_send(ctx->qp, msg); in hpre_send()
571 spin_unlock_bh(&ctx->req_lock); in hpre_send()
572 if (ret != -EBUSY) in hpre_send()
580 if (ret != -EBUSY) in hpre_send()
592 struct hpre_sqe *msg = &hpre_req->req; in hpre_dh_compute_value()
599 if (req->src) { in hpre_dh_compute_value()
600 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 1); in hpre_dh_compute_value()
604 msg->in = cpu_to_le64(ctx->dh.dma_g); in hpre_dh_compute_value()
607 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 1); in hpre_dh_compute_value()
611 if (ctx->crt_g2_mode && !req->src) in hpre_dh_compute_value()
612 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH_G2); in hpre_dh_compute_value()
614 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH); in hpre_dh_compute_value()
619 return -EINPROGRESS; in hpre_dh_compute_value()
623 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_dh_compute_value()
645 return -EINVAL; in hpre_is_dh_params_length_valid()
651 struct device *dev = ctx->dev; in hpre_dh_set_params()
654 if (params->p_size > HPRE_DH_MAX_P_SZ) in hpre_dh_set_params()
655 return -EINVAL; in hpre_dh_set_params()
657 if (hpre_is_dh_params_length_valid(params->p_size << in hpre_dh_set_params()
659 return -EINVAL; in hpre_dh_set_params()
661 sz = ctx->key_sz = params->p_size; in hpre_dh_set_params()
662 ctx->dh.xa_p = dma_alloc_coherent(dev, sz << 1, in hpre_dh_set_params()
663 &ctx->dh.dma_xa_p, GFP_KERNEL); in hpre_dh_set_params()
664 if (!ctx->dh.xa_p) in hpre_dh_set_params()
665 return -ENOMEM; in hpre_dh_set_params()
667 memcpy(ctx->dh.xa_p + sz, params->p, sz); in hpre_dh_set_params()
670 if (params->g_size == 1 && *(char *)params->g == HPRE_DH_G_FLAG) { in hpre_dh_set_params()
671 ctx->crt_g2_mode = true; in hpre_dh_set_params()
675 ctx->dh.g = dma_alloc_coherent(dev, sz, &ctx->dh.dma_g, GFP_KERNEL); in hpre_dh_set_params()
676 if (!ctx->dh.g) { in hpre_dh_set_params()
677 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p, in hpre_dh_set_params()
678 ctx->dh.dma_xa_p); in hpre_dh_set_params()
679 ctx->dh.xa_p = NULL; in hpre_dh_set_params()
680 return -ENOMEM; in hpre_dh_set_params()
683 memcpy(ctx->dh.g + (sz - params->g_size), params->g, params->g_size); in hpre_dh_set_params()
690 struct device *dev = ctx->dev; in hpre_dh_clear_ctx()
691 unsigned int sz = ctx->key_sz; in hpre_dh_clear_ctx()
694 hisi_qm_stop_qp(ctx->qp); in hpre_dh_clear_ctx()
696 if (ctx->dh.g) { in hpre_dh_clear_ctx()
697 dma_free_coherent(dev, sz, ctx->dh.g, ctx->dh.dma_g); in hpre_dh_clear_ctx()
698 ctx->dh.g = NULL; in hpre_dh_clear_ctx()
701 if (ctx->dh.xa_p) { in hpre_dh_clear_ctx()
702 memzero_explicit(ctx->dh.xa_p, sz); in hpre_dh_clear_ctx()
703 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p, in hpre_dh_clear_ctx()
704 ctx->dh.dma_xa_p); in hpre_dh_clear_ctx()
705 ctx->dh.xa_p = NULL; in hpre_dh_clear_ctx()
719 return -EINVAL; in hpre_dh_set_secret()
728 memcpy(ctx->dh.xa_p + (ctx->key_sz - params.key_size), params.key, in hpre_dh_set_secret()
742 return ctx->key_sz; in hpre_dh_max_size()
765 (*len)--; in hpre_rsa_drop_leading_zeros()
795 struct hpre_sqe *msg = &hpre_req->req; in hpre_rsa_enc()
799 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_enc()
800 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) { in hpre_rsa_enc()
801 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm); in hpre_rsa_enc()
807 if (unlikely(!ctx->rsa.pubkey)) in hpre_rsa_enc()
808 return -EINVAL; in hpre_rsa_enc()
814 msg->dw0 |= cpu_to_le32(HPRE_ALG_NC_NCRT); in hpre_rsa_enc()
815 msg->key = cpu_to_le64(ctx->rsa.dma_pubkey); in hpre_rsa_enc()
817 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0); in hpre_rsa_enc()
821 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_enc()
828 return -EINPROGRESS; in hpre_rsa_enc()
832 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_rsa_enc()
843 struct hpre_sqe *msg = &hpre_req->req; in hpre_rsa_dec()
847 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_dec()
848 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) { in hpre_rsa_dec()
849 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm); in hpre_rsa_dec()
855 if (unlikely(!ctx->rsa.prikey)) in hpre_rsa_dec()
856 return -EINVAL; in hpre_rsa_dec()
862 if (ctx->crt_g2_mode) { in hpre_rsa_dec()
863 msg->key = cpu_to_le64(ctx->rsa.dma_crt_prikey); in hpre_rsa_dec()
864 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | in hpre_rsa_dec()
867 msg->key = cpu_to_le64(ctx->rsa.dma_prikey); in hpre_rsa_dec()
868 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | in hpre_rsa_dec()
872 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0); in hpre_rsa_dec()
876 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_dec()
883 return -EINPROGRESS; in hpre_rsa_dec()
887 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_rsa_dec()
899 ctx->key_sz = vlen; in hpre_rsa_set_n()
902 if (!hpre_rsa_key_size_is_support(ctx->key_sz)) in hpre_rsa_set_n()
905 ctx->rsa.pubkey = dma_alloc_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
906 &ctx->rsa.dma_pubkey, in hpre_rsa_set_n()
908 if (!ctx->rsa.pubkey) in hpre_rsa_set_n()
909 return -ENOMEM; in hpre_rsa_set_n()
912 ctx->rsa.prikey = dma_alloc_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
913 &ctx->rsa.dma_prikey, in hpre_rsa_set_n()
915 if (!ctx->rsa.prikey) { in hpre_rsa_set_n()
916 dma_free_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
917 ctx->rsa.pubkey, in hpre_rsa_set_n()
918 ctx->rsa.dma_pubkey); in hpre_rsa_set_n()
919 ctx->rsa.pubkey = NULL; in hpre_rsa_set_n()
920 return -ENOMEM; in hpre_rsa_set_n()
922 memcpy(ctx->rsa.prikey + vlen, ptr, vlen); in hpre_rsa_set_n()
924 memcpy(ctx->rsa.pubkey + vlen, ptr, vlen); in hpre_rsa_set_n()
937 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz) in hpre_rsa_set_e()
938 return -EINVAL; in hpre_rsa_set_e()
940 memcpy(ctx->rsa.pubkey + ctx->key_sz - vlen, ptr, vlen); in hpre_rsa_set_e()
952 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz) in hpre_rsa_set_d()
953 return -EINVAL; in hpre_rsa_set_d()
955 memcpy(ctx->rsa.prikey + ctx->key_sz - vlen, ptr, vlen); in hpre_rsa_set_d()
968 return -EINVAL; in hpre_crt_para_get()
970 memcpy(para + para_sz - len, ptr, len); in hpre_crt_para_get()
977 unsigned int hlf_ksz = ctx->key_sz >> 1; in hpre_rsa_setkey_crt()
978 struct device *dev = ctx->dev; in hpre_rsa_setkey_crt()
982 ctx->rsa.crt_prikey = dma_alloc_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, in hpre_rsa_setkey_crt()
983 &ctx->rsa.dma_crt_prikey, in hpre_rsa_setkey_crt()
985 if (!ctx->rsa.crt_prikey) in hpre_rsa_setkey_crt()
986 return -ENOMEM; in hpre_rsa_setkey_crt()
988 ret = hpre_crt_para_get(ctx->rsa.crt_prikey, hlf_ksz, in hpre_rsa_setkey_crt()
989 rsa_key->dq, rsa_key->dq_sz); in hpre_rsa_setkey_crt()
994 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
995 rsa_key->dp, rsa_key->dp_sz); in hpre_rsa_setkey_crt()
1000 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1001 rsa_key->q, rsa_key->q_sz); in hpre_rsa_setkey_crt()
1006 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1007 rsa_key->p, rsa_key->p_sz); in hpre_rsa_setkey_crt()
1012 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1013 rsa_key->qinv, rsa_key->qinv_sz); in hpre_rsa_setkey_crt()
1017 ctx->crt_g2_mode = true; in hpre_rsa_setkey_crt()
1023 memzero_explicit(ctx->rsa.crt_prikey, offset); in hpre_rsa_setkey_crt()
1024 dma_free_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, ctx->rsa.crt_prikey, in hpre_rsa_setkey_crt()
1025 ctx->rsa.dma_crt_prikey); in hpre_rsa_setkey_crt()
1026 ctx->rsa.crt_prikey = NULL; in hpre_rsa_setkey_crt()
1027 ctx->crt_g2_mode = false; in hpre_rsa_setkey_crt()
1035 unsigned int half_key_sz = ctx->key_sz >> 1; in hpre_rsa_clear_ctx()
1036 struct device *dev = ctx->dev; in hpre_rsa_clear_ctx()
1039 hisi_qm_stop_qp(ctx->qp); in hpre_rsa_clear_ctx()
1041 if (ctx->rsa.pubkey) { in hpre_rsa_clear_ctx()
1042 dma_free_coherent(dev, ctx->key_sz << 1, in hpre_rsa_clear_ctx()
1043 ctx->rsa.pubkey, ctx->rsa.dma_pubkey); in hpre_rsa_clear_ctx()
1044 ctx->rsa.pubkey = NULL; in hpre_rsa_clear_ctx()
1047 if (ctx->rsa.crt_prikey) { in hpre_rsa_clear_ctx()
1048 memzero_explicit(ctx->rsa.crt_prikey, in hpre_rsa_clear_ctx()
1051 ctx->rsa.crt_prikey, ctx->rsa.dma_crt_prikey); in hpre_rsa_clear_ctx()
1052 ctx->rsa.crt_prikey = NULL; in hpre_rsa_clear_ctx()
1055 if (ctx->rsa.prikey) { in hpre_rsa_clear_ctx()
1056 memzero_explicit(ctx->rsa.prikey, ctx->key_sz); in hpre_rsa_clear_ctx()
1057 dma_free_coherent(dev, ctx->key_sz << 1, ctx->rsa.prikey, in hpre_rsa_clear_ctx()
1058 ctx->rsa.dma_prikey); in hpre_rsa_clear_ctx()
1059 ctx->rsa.prikey = NULL; in hpre_rsa_clear_ctx()
1067 * CRT: return true, N-CRT: return false .
1071 u16 len = key->p_sz + key->q_sz + key->dp_sz + key->dq_sz + in hpre_is_crt_key()
1072 key->qinv_sz; in hpre_is_crt_key()
1076 /* N-CRT less than 5 parameters */ in hpre_is_crt_key()
1115 if ((private && !ctx->rsa.prikey) || !ctx->rsa.pubkey) { in hpre_rsa_setkey()
1116 ret = -EINVAL; in hpre_rsa_setkey()
1133 ret = crypto_akcipher_set_pub_key(ctx->rsa.soft_tfm, key, keylen); in hpre_rsa_setpubkey()
1146 ret = crypto_akcipher_set_priv_key(ctx->rsa.soft_tfm, key, keylen); in hpre_rsa_setprivkey()
1158 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_max_size()
1159 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) in hpre_rsa_max_size()
1160 return crypto_akcipher_maxsize(ctx->rsa.soft_tfm); in hpre_rsa_max_size()
1162 return ctx->key_sz; in hpre_rsa_max_size()
1170 ctx->rsa.soft_tfm = crypto_alloc_akcipher("rsa-generic", 0, 0); in hpre_rsa_init_tfm()
1171 if (IS_ERR(ctx->rsa.soft_tfm)) { in hpre_rsa_init_tfm()
1173 return PTR_ERR(ctx->rsa.soft_tfm); in hpre_rsa_init_tfm()
1181 crypto_free_akcipher(ctx->rsa.soft_tfm); in hpre_rsa_init_tfm()
1191 crypto_free_akcipher(ctx->rsa.soft_tfm); in hpre_rsa_exit_tfm()
1199 j = len - i - 1; in hpre_key_to_big_end()
1206 struct device *dev = ctx->dev; in hpre_ecc_clear_ctx()
1207 unsigned int sz = ctx->key_sz; in hpre_ecc_clear_ctx()
1211 hisi_qm_stop_qp(ctx->qp); in hpre_ecc_clear_ctx()
1213 if (ctx->ecdh.p) { in hpre_ecc_clear_ctx()
1214 /* ecdh: p->a->k->b */ in hpre_ecc_clear_ctx()
1215 memzero_explicit(ctx->ecdh.p + shift, sz); in hpre_ecc_clear_ctx()
1216 dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p); in hpre_ecc_clear_ctx()
1217 ctx->ecdh.p = NULL; in hpre_ecc_clear_ctx()
1224 * The bits of 192/224/256/384/521 are supported by HPRE,
1226 * bits<=256, bits=256; 256<bits<=384, bits=384; 384<bits<=576, bits=576;
1228 * high-order zeros by soft, so TASK_LENGTH1 is 0x3/0x5/0x8;
1247 unsigned int sz = cur_sz - (ndigits - 1) * sizeof(u64); in fill_curve_param()
1250 while (i < ndigits - 1) { in fill_curve_param()
1255 memcpy(addr + sizeof(u64) * i, &param[ndigits - 1], sz); in fill_curve_param()
1262 unsigned int shifta = ctx->key_sz << 1; in hpre_ecdh_fill_curve()
1263 unsigned int shiftb = ctx->key_sz << 2; in hpre_ecdh_fill_curve()
1264 void *p = ctx->ecdh.p + ctx->key_sz - cur_sz; in hpre_ecdh_fill_curve() local
1265 void *a = ctx->ecdh.p + shifta - cur_sz; in hpre_ecdh_fill_curve()
1266 void *b = ctx->ecdh.p + shiftb - cur_sz; in hpre_ecdh_fill_curve()
1267 void *x = ctx->ecdh.g + ctx->key_sz - cur_sz; in hpre_ecdh_fill_curve()
1268 void *y = ctx->ecdh.g + shifta - cur_sz; in hpre_ecdh_fill_curve()
1269 const struct ecc_curve *curve = ecc_get_curve(ctx->curve_id); in hpre_ecdh_fill_curve()
1273 return -EINVAL; in hpre_ecdh_fill_curve()
1275 n = kzalloc(ctx->key_sz, GFP_KERNEL); in hpre_ecdh_fill_curve()
1277 return -ENOMEM; in hpre_ecdh_fill_curve()
1279 fill_curve_param(p, curve->p, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1280 fill_curve_param(a, curve->a, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1281 fill_curve_param(b, curve->b, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1282 fill_curve_param(x, curve->g.x, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1283 fill_curve_param(y, curve->g.y, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1284 fill_curve_param(n, curve->n, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1286 if (params->key_size == cur_sz && memcmp(params->key, n, cur_sz) >= 0) { in hpre_ecdh_fill_curve()
1288 return -EINVAL; in hpre_ecdh_fill_curve()
1313 struct device *dev = ctx->dev; in hpre_ecdh_set_param()
1317 ctx->key_sz = hpre_ecdh_supported_curve(ctx->curve_id); in hpre_ecdh_set_param()
1318 if (!ctx->key_sz) in hpre_ecdh_set_param()
1319 return -EINVAL; in hpre_ecdh_set_param()
1321 curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_set_param()
1322 if (!curve_sz || params->key_size > curve_sz) in hpre_ecdh_set_param()
1323 return -EINVAL; in hpre_ecdh_set_param()
1325 sz = ctx->key_sz; in hpre_ecdh_set_param()
1327 if (!ctx->ecdh.p) { in hpre_ecdh_set_param()
1328 ctx->ecdh.p = dma_alloc_coherent(dev, sz << 3, &ctx->ecdh.dma_p, in hpre_ecdh_set_param()
1330 if (!ctx->ecdh.p) in hpre_ecdh_set_param()
1331 return -ENOMEM; in hpre_ecdh_set_param()
1335 ctx->ecdh.g = ctx->ecdh.p + shift; in hpre_ecdh_set_param()
1336 ctx->ecdh.dma_g = ctx->ecdh.dma_p + shift; in hpre_ecdh_set_param()
1341 dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p); in hpre_ecdh_set_param()
1342 ctx->ecdh.p = NULL; in hpre_ecdh_set_param()
1362 struct device *dev = ctx->dev; in ecdh_gen_privkey()
1371 ret = crypto_rng_get_bytes(crypto_default_rng, (u8 *)params->key, in ecdh_gen_privkey()
1372 params->key_size); in ecdh_gen_privkey()
1385 struct device *dev = ctx->dev; in hpre_ecdh_set_secret()
1392 return -EINVAL; in hpre_ecdh_set_secret()
1398 curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_set_secret()
1401 return -EINVAL; in hpre_ecdh_set_secret()
1404 params.key_size = curve_sz - 1; in hpre_ecdh_set_secret()
1412 return -EINVAL; in hpre_ecdh_set_secret()
1423 sz = ctx->key_sz; in hpre_ecdh_set_secret()
1424 sz_shift = (sz << 1) + sz - params.key_size; in hpre_ecdh_set_secret()
1425 memcpy(ctx->ecdh.p + sz_shift, params.key, params.key_size); in hpre_ecdh_set_secret()
1435 struct device *dev = ctx->dev; in hpre_ecdh_hw_data_clr_all()
1436 struct hpre_sqe *sqe = &req->req; in hpre_ecdh_hw_data_clr_all()
1439 dma = le64_to_cpu(sqe->in); in hpre_ecdh_hw_data_clr_all()
1443 if (src && req->src) in hpre_ecdh_hw_data_clr_all()
1444 dma_free_coherent(dev, ctx->key_sz << 2, req->src, dma); in hpre_ecdh_hw_data_clr_all()
1446 dma = le64_to_cpu(sqe->out); in hpre_ecdh_hw_data_clr_all()
1450 if (req->dst) in hpre_ecdh_hw_data_clr_all()
1451 dma_free_coherent(dev, ctx->key_sz << 1, req->dst, dma); in hpre_ecdh_hw_data_clr_all()
1453 dma_unmap_single(dev, dma, ctx->key_sz << 1, DMA_FROM_DEVICE); in hpre_ecdh_hw_data_clr_all()
1458 unsigned int curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_cb()
1459 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_ecdh_cb()
1463 char *p; in hpre_ecdh_cb() local
1467 areq = req->areq.ecdh; in hpre_ecdh_cb()
1468 areq->dst_len = ctx->key_sz << 1; in hpre_ecdh_cb()
1475 hpre_ecdh_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_ecdh_cb()
1477 p = sg_virt(areq->dst); in hpre_ecdh_cb()
1478 memmove(p, p + ctx->key_sz - curve_sz, curve_sz); in hpre_ecdh_cb()
1479 memmove(p + curve_sz, p + areq->dst_len - curve_sz, curve_sz); in hpre_ecdh_cb()
1494 if (req->dst_len < ctx->key_sz << 1) { in hpre_ecdh_msg_request_set()
1495 req->dst_len = ctx->key_sz << 1; in hpre_ecdh_msg_request_set()
1496 return -EINVAL; in hpre_ecdh_msg_request_set()
1501 h_req->cb = hpre_ecdh_cb; in hpre_ecdh_msg_request_set()
1502 h_req->areq.ecdh = req; in hpre_ecdh_msg_request_set()
1503 msg = &h_req->req; in hpre_ecdh_msg_request_set()
1505 msg->in = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_ecdh_msg_request_set()
1506 msg->out = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_ecdh_msg_request_set()
1507 msg->key = cpu_to_le64(ctx->ecdh.dma_p); in hpre_ecdh_msg_request_set()
1509 msg->dw0 |= cpu_to_le32(0x1U << HPRE_SQE_DONE_SHIFT); in hpre_ecdh_msg_request_set()
1510 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1; in hpre_ecdh_msg_request_set()
1511 h_req->ctx = ctx; in hpre_ecdh_msg_request_set()
1515 return -EBUSY; in hpre_ecdh_msg_request_set()
1517 msg->tag = cpu_to_le16((u16)req_id); in hpre_ecdh_msg_request_set()
1524 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_src_data_init()
1525 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_ecdh_src_data_init()
1526 struct device *dev = ctx->dev; in hpre_ecdh_src_data_init()
1533 shift = ctx->key_sz - (len >> 1); in hpre_ecdh_src_data_init()
1535 return -EINVAL; in hpre_ecdh_src_data_init()
1537 ptr = dma_alloc_coherent(dev, ctx->key_sz << 2, &dma, GFP_KERNEL); in hpre_ecdh_src_data_init()
1539 return -ENOMEM; in hpre_ecdh_src_data_init()
1541 tmpshift = ctx->key_sz << 1; in hpre_ecdh_src_data_init()
1544 memcpy(ptr + ctx->key_sz + shift, ptr + tmpshift + (len >> 1), len >> 1); in hpre_ecdh_src_data_init()
1546 hpre_req->src = ptr; in hpre_ecdh_src_data_init()
1547 msg->in = cpu_to_le64(dma); in hpre_ecdh_src_data_init()
1554 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_dst_data_init()
1555 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_ecdh_dst_data_init()
1556 struct device *dev = ctx->dev; in hpre_ecdh_dst_data_init()
1559 if (unlikely(!data || !sg_is_last(data) || len != ctx->key_sz << 1)) { in hpre_ecdh_dst_data_init()
1561 return -EINVAL; in hpre_ecdh_dst_data_init()
1564 hpre_req->dst = NULL; in hpre_ecdh_dst_data_init()
1568 return -ENOMEM; in hpre_ecdh_dst_data_init()
1571 msg->out = cpu_to_le64(dma); in hpre_ecdh_dst_data_init()
1579 struct device *dev = ctx->dev; in hpre_ecdh_compute_value()
1582 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_compute_value()
1591 if (req->src) { in hpre_ecdh_compute_value()
1592 ret = hpre_ecdh_src_data_init(hpre_req, req->src, req->src_len); in hpre_ecdh_compute_value()
1598 msg->in = cpu_to_le64(ctx->ecdh.dma_g); in hpre_ecdh_compute_value()
1601 ret = hpre_ecdh_dst_data_init(hpre_req, req->dst, req->dst_len); in hpre_ecdh_compute_value()
1607 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_ECC_MUL); in hpre_ecdh_compute_value()
1608 msg->resv1 = ctx->enable_hpcore << HPRE_ENABLE_HPCORE_SHIFT; in hpre_ecdh_compute_value()
1612 return -EINPROGRESS; in hpre_ecdh_compute_value()
1616 hpre_ecdh_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_ecdh_compute_value()
1625 return ctx->key_sz << 1; in hpre_ecdh_max_size()
1632 ctx->curve_id = ECC_CURVE_NIST_P192; in hpre_ecdh_nist_p192_init_tfm()
1643 ctx->curve_id = ECC_CURVE_NIST_P256; in hpre_ecdh_nist_p256_init_tfm()
1644 ctx->enable_hpcore = 1; in hpre_ecdh_nist_p256_init_tfm()
1655 ctx->curve_id = ECC_CURVE_NIST_P384; in hpre_ecdh_nist_p384_init_tfm()
1681 .cra_driver_name = "hpre-rsa",
1697 .cra_driver_name = "hpre-dh",
1713 .cra_name = "ecdh-nist-p192",
1714 .cra_driver_name = "hpre-ecdh-nist-p192",
1727 .cra_name = "ecdh-nist-p256",
1728 .cra_driver_name = "hpre-ecdh-nist-p256",
1741 .cra_name = "ecdh-nist-p384",
1742 .cra_driver_name = "hpre-ecdh-nist-p384",
1758 dev_err(&qm->pdev->dev, "failed to register rsa (%d)!\n", ret); in hpre_register_rsa()
1780 dev_err(&qm->pdev->dev, "failed to register dh (%d)!\n", ret); in hpre_register_dh()
1803 dev_err(&qm->pdev->dev, "failed to register %s (%d)!\n", in hpre_register_ecdh()
1812 for (--i; i >= 0; --i) in hpre_register_ecdh()
1825 for (i = ARRAY_SIZE(ecdh_curves) - 1; i >= 0; --i) in hpre_unregister_ecdh()
1868 if (--hpre_available_devs) in hpre_algs_unregister()