Lines Matching +full:sha +full:- +full:512
1 // SPDX-License-Identifier: GPL-2.0-only
5 * Copyright (C) 2013-2019 Advanced Micro Devices, Inc.
11 #include <linux/dma-mapping.h>
19 #include "ccp-dev.h"
21 /* SHA initial context values */
56 #define CCP_NEW_JOBID(ccp) ((ccp->vdata->version == CCP_VERSION(3, 0)) ? \
61 return atomic_inc_return(&ccp->current_id) & CCP_JOBID_MASK; in ccp_gen_jobid()
66 if (wa->dma_count) in ccp_sg_free()
67 dma_unmap_sg(wa->dma_dev, wa->dma_sg_head, wa->nents, wa->dma_dir); in ccp_sg_free()
69 wa->dma_count = 0; in ccp_sg_free()
78 wa->sg = sg; in ccp_init_sg_workarea()
82 wa->nents = sg_nents_for_len(sg, len); in ccp_init_sg_workarea()
83 if (wa->nents < 0) in ccp_init_sg_workarea()
84 return wa->nents; in ccp_init_sg_workarea()
86 wa->bytes_left = len; in ccp_init_sg_workarea()
87 wa->sg_used = 0; in ccp_init_sg_workarea()
95 wa->dma_sg = sg; in ccp_init_sg_workarea()
96 wa->dma_sg_head = sg; in ccp_init_sg_workarea()
97 wa->dma_dev = dev; in ccp_init_sg_workarea()
98 wa->dma_dir = dma_dir; in ccp_init_sg_workarea()
99 wa->dma_count = dma_map_sg(dev, sg, wa->nents, dma_dir); in ccp_init_sg_workarea()
100 if (!wa->dma_count) in ccp_init_sg_workarea()
101 return -ENOMEM; in ccp_init_sg_workarea()
108 unsigned int nbytes = min_t(u64, len, wa->bytes_left); in ccp_update_sg_workarea()
111 if (!wa->sg) in ccp_update_sg_workarea()
114 wa->sg_used += nbytes; in ccp_update_sg_workarea()
115 wa->bytes_left -= nbytes; in ccp_update_sg_workarea()
116 if (wa->sg_used == sg_dma_len(wa->dma_sg)) { in ccp_update_sg_workarea()
118 wa->dma_sg = sg_next(wa->dma_sg); in ccp_update_sg_workarea()
121 * that have been merged, the non-DMA mapped scatterlist in ccp_update_sg_workarea()
123 * This ensures that the current non-DMA mapped entry in ccp_update_sg_workarea()
127 sg_combined_len += wa->sg->length; in ccp_update_sg_workarea()
128 wa->sg = sg_next(wa->sg); in ccp_update_sg_workarea()
129 } while (wa->sg_used > sg_combined_len); in ccp_update_sg_workarea()
131 wa->sg_used = 0; in ccp_update_sg_workarea()
137 if (wa->length <= CCP_DMAPOOL_MAX_SIZE) { in ccp_dm_free()
138 if (wa->address) in ccp_dm_free()
139 dma_pool_free(wa->dma_pool, wa->address, in ccp_dm_free()
140 wa->dma.address); in ccp_dm_free()
142 if (wa->dma.address) in ccp_dm_free()
143 dma_unmap_single(wa->dev, wa->dma.address, wa->length, in ccp_dm_free()
144 wa->dma.dir); in ccp_dm_free()
145 kfree(wa->address); in ccp_dm_free()
148 wa->address = NULL; in ccp_dm_free()
149 wa->dma.address = 0; in ccp_dm_free()
162 wa->dev = cmd_q->ccp->dev; in ccp_init_dm_workarea()
163 wa->length = len; in ccp_init_dm_workarea()
166 wa->dma_pool = cmd_q->dma_pool; in ccp_init_dm_workarea()
168 wa->address = dma_pool_zalloc(wa->dma_pool, GFP_KERNEL, in ccp_init_dm_workarea()
169 &wa->dma.address); in ccp_init_dm_workarea()
170 if (!wa->address) in ccp_init_dm_workarea()
171 return -ENOMEM; in ccp_init_dm_workarea()
173 wa->dma.length = CCP_DMAPOOL_MAX_SIZE; in ccp_init_dm_workarea()
176 wa->address = kzalloc(len, GFP_KERNEL); in ccp_init_dm_workarea()
177 if (!wa->address) in ccp_init_dm_workarea()
178 return -ENOMEM; in ccp_init_dm_workarea()
180 wa->dma.address = dma_map_single(wa->dev, wa->address, len, in ccp_init_dm_workarea()
182 if (dma_mapping_error(wa->dev, wa->dma.address)) { in ccp_init_dm_workarea()
183 kfree(wa->address); in ccp_init_dm_workarea()
184 wa->address = NULL; in ccp_init_dm_workarea()
185 return -ENOMEM; in ccp_init_dm_workarea()
188 wa->dma.length = len; in ccp_init_dm_workarea()
190 wa->dma.dir = dir; in ccp_init_dm_workarea()
199 WARN_ON(!wa->address); in ccp_set_dm_area()
201 if (len > (wa->length - wa_offset)) in ccp_set_dm_area()
202 return -EINVAL; in ccp_set_dm_area()
204 scatterwalk_map_and_copy(wa->address + wa_offset, sg, sg_offset, len, in ccp_set_dm_area()
213 WARN_ON(!wa->address); in ccp_get_dm_area()
215 scatterwalk_map_and_copy(wa->address + wa_offset, sg, sg_offset, len, in ccp_get_dm_area()
232 p = wa->address + wa_offset; in ccp_reverse_set_dm_area()
233 q = p + len - 1; in ccp_reverse_set_dm_area()
239 q--; in ccp_reverse_set_dm_area()
252 p = wa->address + wa_offset; in ccp_reverse_get_dm_area()
253 q = p + len - 1; in ccp_reverse_get_dm_area()
259 q--; in ccp_reverse_get_dm_area()
267 ccp_dm_free(&data->dm_wa); in ccp_free_data()
268 ccp_sg_free(&data->sg_wa); in ccp_free_data()
280 ret = ccp_init_sg_workarea(&data->sg_wa, cmd_q->ccp->dev, sg, sg_len, in ccp_init_data()
285 ret = ccp_init_dm_workarea(&data->dm_wa, cmd_q, dm_len, dir); in ccp_init_data()
299 struct ccp_sg_workarea *sg_wa = &data->sg_wa; in ccp_queue_buf()
300 struct ccp_dm_workarea *dm_wa = &data->dm_wa; in ccp_queue_buf()
305 memset(dm_wa->address, 0, dm_wa->length); in ccp_queue_buf()
307 if (!sg_wa->sg) in ccp_queue_buf()
311 * nbytes will always be <= UINT_MAX because dm_wa->length is in ccp_queue_buf()
314 nbytes = min_t(u64, sg_wa->bytes_left, dm_wa->length); in ccp_queue_buf()
315 scatterwalk_map_and_copy(dm_wa->address, sg_wa->sg, sg_wa->sg_used, in ccp_queue_buf()
320 while (sg_wa->bytes_left && (buf_count < dm_wa->length)) { in ccp_queue_buf()
321 nbytes = min(sg_dma_len(sg_wa->dma_sg) - sg_wa->sg_used, in ccp_queue_buf()
322 dm_wa->length - buf_count); in ccp_queue_buf()
323 nbytes = min_t(u64, sg_wa->bytes_left, nbytes); in ccp_queue_buf()
353 sg_src_len = sg_dma_len(src->sg_wa.dma_sg) - src->sg_wa.sg_used; in ccp_prepare_data()
354 sg_src_len = min_t(u64, src->sg_wa.bytes_left, sg_src_len); in ccp_prepare_data()
357 sg_dst_len = sg_dma_len(dst->sg_wa.dma_sg) - dst->sg_wa.sg_used; in ccp_prepare_data()
358 sg_dst_len = min_t(u64, src->sg_wa.bytes_left, sg_dst_len); in ccp_prepare_data()
371 op->soc = 0; in ccp_prepare_data()
379 op->soc = 1; in ccp_prepare_data()
380 op->src.u.dma.address = src->dm_wa.dma.address; in ccp_prepare_data()
381 op->src.u.dma.offset = 0; in ccp_prepare_data()
382 op->src.u.dma.length = (blocksize_op) ? block_size : cp_len; in ccp_prepare_data()
387 op->src.u.dma.address = sg_dma_address(src->sg_wa.dma_sg); in ccp_prepare_data()
388 op->src.u.dma.offset = src->sg_wa.sg_used; in ccp_prepare_data()
389 op->src.u.dma.length = op_len & ~(block_size - 1); in ccp_prepare_data()
391 ccp_update_sg_workarea(&src->sg_wa, op->src.u.dma.length); in ccp_prepare_data()
400 op->soc = 1; in ccp_prepare_data()
401 op->dst.u.dma.address = dst->dm_wa.dma.address; in ccp_prepare_data()
402 op->dst.u.dma.offset = 0; in ccp_prepare_data()
403 op->dst.u.dma.length = op->src.u.dma.length; in ccp_prepare_data()
408 op->dst.u.dma.address = sg_dma_address(dst->sg_wa.dma_sg); in ccp_prepare_data()
409 op->dst.u.dma.offset = dst->sg_wa.sg_used; in ccp_prepare_data()
410 op->dst.u.dma.length = op->src.u.dma.length; in ccp_prepare_data()
418 op->init = 0; in ccp_process_data()
421 if (op->dst.u.dma.address == dst->dm_wa.dma.address) in ccp_process_data()
424 ccp_update_sg_workarea(&dst->sg_wa, in ccp_process_data()
425 op->dst.u.dma.length); in ccp_process_data()
446 op.dst.u.dma.address = wa->dma.address; in ccp_copy_to_from_sb()
447 op.dst.u.dma.length = wa->length; in ccp_copy_to_from_sb()
450 op.src.u.dma.address = wa->dma.address; in ccp_copy_to_from_sb()
451 op.src.u.dma.length = wa->length; in ccp_copy_to_from_sb()
458 return cmd_q->ccp->vdata->perform->passthru(&op); in ccp_copy_to_from_sb()
478 struct ccp_aes_engine *aes = &cmd->u.aes; in ccp_run_aes_cmac_cmd()
485 if (!((aes->key_len == AES_KEYSIZE_128) || in ccp_run_aes_cmac_cmd()
486 (aes->key_len == AES_KEYSIZE_192) || in ccp_run_aes_cmac_cmd()
487 (aes->key_len == AES_KEYSIZE_256))) in ccp_run_aes_cmac_cmd()
488 return -EINVAL; in ccp_run_aes_cmac_cmd()
490 if (aes->src_len & (AES_BLOCK_SIZE - 1)) in ccp_run_aes_cmac_cmd()
491 return -EINVAL; in ccp_run_aes_cmac_cmd()
493 if (aes->iv_len != AES_BLOCK_SIZE) in ccp_run_aes_cmac_cmd()
494 return -EINVAL; in ccp_run_aes_cmac_cmd()
496 if (!aes->key || !aes->iv || !aes->src) in ccp_run_aes_cmac_cmd()
497 return -EINVAL; in ccp_run_aes_cmac_cmd()
499 if (aes->cmac_final) { in ccp_run_aes_cmac_cmd()
500 if (aes->cmac_key_len != AES_BLOCK_SIZE) in ccp_run_aes_cmac_cmd()
501 return -EINVAL; in ccp_run_aes_cmac_cmd()
503 if (!aes->cmac_key) in ccp_run_aes_cmac_cmd()
504 return -EINVAL; in ccp_run_aes_cmac_cmd()
510 ret = -EIO; in ccp_run_aes_cmac_cmd()
513 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_aes_cmac_cmd()
514 op.sb_key = cmd_q->sb_key; in ccp_run_aes_cmac_cmd()
515 op.sb_ctx = cmd_q->sb_ctx; in ccp_run_aes_cmac_cmd()
517 op.u.aes.type = aes->type; in ccp_run_aes_cmac_cmd()
518 op.u.aes.mode = aes->mode; in ccp_run_aes_cmac_cmd()
519 op.u.aes.action = aes->action; in ccp_run_aes_cmac_cmd()
521 /* All supported key sizes fit in a single (32-byte) SB entry in ccp_run_aes_cmac_cmd()
522 * and must be in little endian format. Use the 256-bit byte in ccp_run_aes_cmac_cmd()
532 dm_offset = CCP_SB_BYTES - aes->key_len; in ccp_run_aes_cmac_cmd()
533 ret = ccp_set_dm_area(&key, dm_offset, aes->key, 0, aes->key_len); in ccp_run_aes_cmac_cmd()
539 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
543 /* The AES context fits in a single (32-byte) SB entry and in ccp_run_aes_cmac_cmd()
544 * must be in little endian format. Use the 256-bit byte swap in ccp_run_aes_cmac_cmd()
553 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmac_cmd()
554 ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_cmac_cmd()
560 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
565 ret = ccp_init_data(&src, cmd_q, aes->src, aes->src_len, in ccp_run_aes_cmac_cmd()
572 if (aes->cmac_final && !src.sg_wa.bytes_left) { in ccp_run_aes_cmac_cmd()
580 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
584 ret = ccp_set_dm_area(&ctx, 0, aes->cmac_key, 0, in ccp_run_aes_cmac_cmd()
585 aes->cmac_key_len); in ccp_run_aes_cmac_cmd()
591 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
596 ret = cmd_q->ccp->vdata->perform->aes(&op); in ccp_run_aes_cmac_cmd()
598 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
605 /* Retrieve the AES context - convert from LE to BE using in ccp_run_aes_cmac_cmd()
606 * 32-byte (256-bit) byteswapping in ccp_run_aes_cmac_cmd()
611 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmac_cmd()
616 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmac_cmd()
617 ccp_get_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_cmac_cmd()
634 struct ccp_aes_engine *aes = &cmd->u.aes; in ccp_run_aes_gcm_cmd()
652 if (!aes->iv) in ccp_run_aes_gcm_cmd()
653 return -EINVAL; in ccp_run_aes_gcm_cmd()
655 if (!((aes->key_len == AES_KEYSIZE_128) || in ccp_run_aes_gcm_cmd()
656 (aes->key_len == AES_KEYSIZE_192) || in ccp_run_aes_gcm_cmd()
657 (aes->key_len == AES_KEYSIZE_256))) in ccp_run_aes_gcm_cmd()
658 return -EINVAL; in ccp_run_aes_gcm_cmd()
660 if (!aes->key) /* Gotta have a key SGL */ in ccp_run_aes_gcm_cmd()
661 return -EINVAL; in ccp_run_aes_gcm_cmd()
664 authsize = aes->authsize ? aes->authsize : AES_BLOCK_SIZE; in ccp_run_aes_gcm_cmd()
675 return -EINVAL; in ccp_run_aes_gcm_cmd()
684 p_aad = aes->src; in ccp_run_aes_gcm_cmd()
685 p_inp = scatterwalk_ffwd(sg_inp, aes->src, aes->aad_len); in ccp_run_aes_gcm_cmd()
686 p_outp = scatterwalk_ffwd(sg_outp, aes->dst, aes->aad_len); in ccp_run_aes_gcm_cmd()
687 if (aes->action == CCP_AES_ACTION_ENCRYPT) { in ccp_run_aes_gcm_cmd()
688 ilen = aes->src_len; in ccp_run_aes_gcm_cmd()
692 ilen = aes->src_len - authsize; in ccp_run_aes_gcm_cmd()
696 jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_aes_gcm_cmd()
701 op.sb_key = cmd_q->sb_key; /* Pre-allocated */ in ccp_run_aes_gcm_cmd()
702 op.sb_ctx = cmd_q->sb_ctx; /* Pre-allocated */ in ccp_run_aes_gcm_cmd()
704 op.u.aes.type = aes->type; in ccp_run_aes_gcm_cmd()
713 dm_offset = CCP_SB_BYTES - aes->key_len; in ccp_run_aes_gcm_cmd()
714 ret = ccp_set_dm_area(&key, dm_offset, aes->key, 0, aes->key_len); in ccp_run_aes_gcm_cmd()
720 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
734 dm_offset = CCP_AES_CTX_SB_COUNT * CCP_SB_BYTES - aes->iv_len; in ccp_run_aes_gcm_cmd()
735 ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_gcm_cmd()
742 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
747 if (aes->aad_len > 0) { in ccp_run_aes_gcm_cmd()
749 ret = ccp_init_data(&aad, cmd_q, p_aad, aes->aad_len, in ccp_run_aes_gcm_cmd()
761 ret = cmd_q->ccp->vdata->perform->aes(&op); in ccp_run_aes_gcm_cmd()
763 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
773 op.u.aes.action = aes->action; in ccp_run_aes_gcm_cmd()
805 op.u.aes.size = (nbytes * 8) - 1; in ccp_run_aes_gcm_cmd()
809 ret = cmd_q->ccp->vdata->perform->aes(&op); in ccp_run_aes_gcm_cmd()
811 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
824 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
828 ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_gcm_cmd()
835 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_gcm_cmd()
847 final[0] = cpu_to_be64(aes->aad_len * 8); in ccp_run_aes_gcm_cmd()
853 op.sb_key = cmd_q->sb_key; /* Pre-allocated */ in ccp_run_aes_gcm_cmd()
854 op.sb_ctx = cmd_q->sb_ctx; /* Pre-allocated */ in ccp_run_aes_gcm_cmd()
856 op.u.aes.type = aes->type; in ccp_run_aes_gcm_cmd()
867 ret = cmd_q->ccp->vdata->perform->aes(&op); in ccp_run_aes_gcm_cmd()
871 if (aes->action == CCP_AES_ACTION_ENCRYPT) { in ccp_run_aes_gcm_cmd()
887 authsize) ? -EBADMSG : 0; in ccp_run_aes_gcm_cmd()
903 if (aes->aad_len) in ccp_run_aes_gcm_cmd()
918 struct ccp_aes_engine *aes = &cmd->u.aes; in ccp_run_aes_cmd()
926 if (!((aes->key_len == AES_KEYSIZE_128) || in ccp_run_aes_cmd()
927 (aes->key_len == AES_KEYSIZE_192) || in ccp_run_aes_cmd()
928 (aes->key_len == AES_KEYSIZE_256))) in ccp_run_aes_cmd()
929 return -EINVAL; in ccp_run_aes_cmd()
931 if (((aes->mode == CCP_AES_MODE_ECB) || in ccp_run_aes_cmd()
932 (aes->mode == CCP_AES_MODE_CBC)) && in ccp_run_aes_cmd()
933 (aes->src_len & (AES_BLOCK_SIZE - 1))) in ccp_run_aes_cmd()
934 return -EINVAL; in ccp_run_aes_cmd()
936 if (!aes->key || !aes->src || !aes->dst) in ccp_run_aes_cmd()
937 return -EINVAL; in ccp_run_aes_cmd()
939 if (aes->mode != CCP_AES_MODE_ECB) { in ccp_run_aes_cmd()
940 if (aes->iv_len != AES_BLOCK_SIZE) in ccp_run_aes_cmd()
941 return -EINVAL; in ccp_run_aes_cmd()
943 if (!aes->iv) in ccp_run_aes_cmd()
944 return -EINVAL; in ccp_run_aes_cmd()
950 ret = -EIO; in ccp_run_aes_cmd()
953 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_aes_cmd()
954 op.sb_key = cmd_q->sb_key; in ccp_run_aes_cmd()
955 op.sb_ctx = cmd_q->sb_ctx; in ccp_run_aes_cmd()
956 op.init = (aes->mode == CCP_AES_MODE_ECB) ? 0 : 1; in ccp_run_aes_cmd()
957 op.u.aes.type = aes->type; in ccp_run_aes_cmd()
958 op.u.aes.mode = aes->mode; in ccp_run_aes_cmd()
959 op.u.aes.action = aes->action; in ccp_run_aes_cmd()
961 /* All supported key sizes fit in a single (32-byte) SB entry in ccp_run_aes_cmd()
962 * and must be in little endian format. Use the 256-bit byte in ccp_run_aes_cmd()
972 dm_offset = CCP_SB_BYTES - aes->key_len; in ccp_run_aes_cmd()
973 ret = ccp_set_dm_area(&key, dm_offset, aes->key, 0, aes->key_len); in ccp_run_aes_cmd()
979 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmd()
983 /* The AES context fits in a single (32-byte) SB entry and in ccp_run_aes_cmd()
984 * must be in little endian format. Use the 256-bit byte swap in ccp_run_aes_cmd()
993 if (aes->mode != CCP_AES_MODE_ECB) { in ccp_run_aes_cmd()
994 /* Load the AES context - convert to LE */ in ccp_run_aes_cmd()
995 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmd()
996 ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_cmd()
1002 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmd()
1006 switch (aes->mode) { in ccp_run_aes_cmd()
1009 op.u.aes.size = AES_BLOCK_SIZE * BITS_PER_BYTE - 1; in ccp_run_aes_cmd()
1015 /* Prepare the input and output data workareas. For in-place in ccp_run_aes_cmd()
1019 if (sg_virt(aes->src) == sg_virt(aes->dst)) in ccp_run_aes_cmd()
1022 ret = ccp_init_data(&src, cmd_q, aes->src, aes->src_len, in ccp_run_aes_cmd()
1031 ret = ccp_init_data(&dst, cmd_q, aes->dst, aes->src_len, in ccp_run_aes_cmd()
1047 if (aes->mode == CCP_AES_MODE_ECB) in ccp_run_aes_cmd()
1051 ret = cmd_q->ccp->vdata->perform->aes(&op); in ccp_run_aes_cmd()
1053 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmd()
1060 if (aes->mode != CCP_AES_MODE_ECB) { in ccp_run_aes_cmd()
1061 /* Retrieve the AES context - convert from LE to BE using in ccp_run_aes_cmd()
1062 * 32-byte (256-bit) byteswapping in ccp_run_aes_cmd()
1067 cmd->engine_error = cmd_q->cmd_error; in ccp_run_aes_cmd()
1072 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmd()
1073 ccp_get_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len); in ccp_run_aes_cmd()
1095 struct ccp_xts_aes_engine *xts = &cmd->u.xts; in ccp_run_xts_aes_cmd()
1105 switch (xts->unit_size) { in ccp_run_xts_aes_cmd()
1110 unit_size = 512; in ccp_run_xts_aes_cmd()
1123 return -EINVAL; in ccp_run_xts_aes_cmd()
1126 if (xts->key_len == AES_KEYSIZE_128) in ccp_run_xts_aes_cmd()
1128 else if (xts->key_len == AES_KEYSIZE_256) in ccp_run_xts_aes_cmd()
1131 return -EINVAL; in ccp_run_xts_aes_cmd()
1133 if (!xts->final && (xts->src_len & (AES_BLOCK_SIZE - 1))) in ccp_run_xts_aes_cmd()
1134 return -EINVAL; in ccp_run_xts_aes_cmd()
1136 if (xts->iv_len != AES_BLOCK_SIZE) in ccp_run_xts_aes_cmd()
1137 return -EINVAL; in ccp_run_xts_aes_cmd()
1139 if (!xts->key || !xts->iv || !xts->src || !xts->dst) in ccp_run_xts_aes_cmd()
1140 return -EINVAL; in ccp_run_xts_aes_cmd()
1145 ret = -EIO; in ccp_run_xts_aes_cmd()
1148 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_xts_aes_cmd()
1149 op.sb_key = cmd_q->sb_key; in ccp_run_xts_aes_cmd()
1150 op.sb_ctx = cmd_q->sb_ctx; in ccp_run_xts_aes_cmd()
1153 op.u.xts.action = xts->action; in ccp_run_xts_aes_cmd()
1154 op.u.xts.unit_size = xts->unit_size; in ccp_run_xts_aes_cmd()
1156 /* A version 3 device only supports 128-bit keys, which fits into a in ccp_run_xts_aes_cmd()
1157 * single SB entry. A version 5 device uses a 512-bit vector, so two in ccp_run_xts_aes_cmd()
1160 if (cmd_q->ccp->vdata->version == CCP_VERSION(3, 0)) in ccp_run_xts_aes_cmd()
1170 if (cmd_q->ccp->vdata->version == CCP_VERSION(3, 0)) { in ccp_run_xts_aes_cmd()
1172 * Use the 256-bit byte swap passthru option to convert from in ccp_run_xts_aes_cmd()
1175 dm_offset = CCP_SB_BYTES - AES_KEYSIZE_128; in ccp_run_xts_aes_cmd()
1176 ret = ccp_set_dm_area(&key, dm_offset, xts->key, 0, xts->key_len); in ccp_run_xts_aes_cmd()
1179 ret = ccp_set_dm_area(&key, 0, xts->key, xts->key_len, xts->key_len); in ccp_run_xts_aes_cmd()
1183 /* Version 5 CCPs use a 512-bit space for the key: each portion in ccp_run_xts_aes_cmd()
1184 * occupies 256 bits, or one entire slot, and is zero-padded. in ccp_run_xts_aes_cmd()
1189 pad = dm_offset - xts->key_len; in ccp_run_xts_aes_cmd()
1190 ret = ccp_set_dm_area(&key, pad, xts->key, 0, xts->key_len); in ccp_run_xts_aes_cmd()
1193 ret = ccp_set_dm_area(&key, dm_offset + pad, xts->key, in ccp_run_xts_aes_cmd()
1194 xts->key_len, xts->key_len); in ccp_run_xts_aes_cmd()
1201 cmd->engine_error = cmd_q->cmd_error; in ccp_run_xts_aes_cmd()
1205 /* The AES context fits in a single (32-byte) SB entry and in ccp_run_xts_aes_cmd()
1215 ret = ccp_set_dm_area(&ctx, 0, xts->iv, 0, xts->iv_len); in ccp_run_xts_aes_cmd()
1221 cmd->engine_error = cmd_q->cmd_error; in ccp_run_xts_aes_cmd()
1225 /* Prepare the input and output data workareas. For in-place in ccp_run_xts_aes_cmd()
1229 if (sg_virt(xts->src) == sg_virt(xts->dst)) in ccp_run_xts_aes_cmd()
1232 ret = ccp_init_data(&src, cmd_q, xts->src, xts->src_len, in ccp_run_xts_aes_cmd()
1241 ret = ccp_init_data(&dst, cmd_q, xts->dst, xts->src_len, in ccp_run_xts_aes_cmd()
1253 ret = cmd_q->ccp->vdata->perform->xts_aes(&op); in ccp_run_xts_aes_cmd()
1255 cmd->engine_error = cmd_q->cmd_error; in ccp_run_xts_aes_cmd()
1262 /* Retrieve the AES context - convert from LE to BE using in ccp_run_xts_aes_cmd()
1263 * 32-byte (256-bit) byteswapping in ccp_run_xts_aes_cmd()
1268 cmd->engine_error = cmd_q->cmd_error; in ccp_run_xts_aes_cmd()
1273 dm_offset = CCP_SB_BYTES - AES_BLOCK_SIZE; in ccp_run_xts_aes_cmd()
1274 ccp_get_dm_area(&ctx, dm_offset, xts->iv, 0, xts->iv_len); in ccp_run_xts_aes_cmd()
1295 struct ccp_des3_engine *des3 = &cmd->u.des3; in ccp_run_des3_cmd()
1306 if (cmd_q->ccp->vdata->version < CCP_VERSION(5, 0)) in ccp_run_des3_cmd()
1307 return -EINVAL; in ccp_run_des3_cmd()
1309 if (!cmd_q->ccp->vdata->perform->des3) in ccp_run_des3_cmd()
1310 return -EINVAL; in ccp_run_des3_cmd()
1312 if (des3->key_len != DES3_EDE_KEY_SIZE) in ccp_run_des3_cmd()
1313 return -EINVAL; in ccp_run_des3_cmd()
1315 if (((des3->mode == CCP_DES3_MODE_ECB) || in ccp_run_des3_cmd()
1316 (des3->mode == CCP_DES3_MODE_CBC)) && in ccp_run_des3_cmd()
1317 (des3->src_len & (DES3_EDE_BLOCK_SIZE - 1))) in ccp_run_des3_cmd()
1318 return -EINVAL; in ccp_run_des3_cmd()
1320 if (!des3->key || !des3->src || !des3->dst) in ccp_run_des3_cmd()
1321 return -EINVAL; in ccp_run_des3_cmd()
1323 if (des3->mode != CCP_DES3_MODE_ECB) { in ccp_run_des3_cmd()
1324 if (des3->iv_len != DES3_EDE_BLOCK_SIZE) in ccp_run_des3_cmd()
1325 return -EINVAL; in ccp_run_des3_cmd()
1327 if (!des3->iv) in ccp_run_des3_cmd()
1328 return -EINVAL; in ccp_run_des3_cmd()
1336 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_des3_cmd()
1337 op.sb_key = cmd_q->sb_key; in ccp_run_des3_cmd()
1339 op.init = (des3->mode == CCP_DES3_MODE_ECB) ? 0 : 1; in ccp_run_des3_cmd()
1340 op.u.des3.type = des3->type; in ccp_run_des3_cmd()
1341 op.u.des3.mode = des3->mode; in ccp_run_des3_cmd()
1342 op.u.des3.action = des3->action; in ccp_run_des3_cmd()
1345 * All supported key sizes fit in a single (32-byte) KSB entry and in ccp_run_des3_cmd()
1346 * (like AES) must be in little endian format. Use the 256-bit byte in ccp_run_des3_cmd()
1360 dm_offset = CCP_SB_BYTES - des3->key_len; /* Basic offset */ in ccp_run_des3_cmd()
1362 len_singlekey = des3->key_len / 3; in ccp_run_des3_cmd()
1364 des3->key, 0, len_singlekey); in ccp_run_des3_cmd()
1368 des3->key, len_singlekey, len_singlekey); in ccp_run_des3_cmd()
1372 des3->key, 2 * len_singlekey, len_singlekey); in ccp_run_des3_cmd()
1380 cmd->engine_error = cmd_q->cmd_error; in ccp_run_des3_cmd()
1385 * The DES3 context fits in a single (32-byte) KSB entry and in ccp_run_des3_cmd()
1386 * must be in little endian format. Use the 256-bit byte swap in ccp_run_des3_cmd()
1389 if (des3->mode != CCP_DES3_MODE_ECB) { in ccp_run_des3_cmd()
1390 op.sb_ctx = cmd_q->sb_ctx; in ccp_run_des3_cmd()
1399 dm_offset = CCP_SB_BYTES - des3->iv_len; in ccp_run_des3_cmd()
1400 ret = ccp_set_dm_area(&ctx, dm_offset, des3->iv, 0, in ccp_run_des3_cmd()
1401 des3->iv_len); in ccp_run_des3_cmd()
1408 cmd->engine_error = cmd_q->cmd_error; in ccp_run_des3_cmd()
1414 * Prepare the input and output data workareas. For in-place in ccp_run_des3_cmd()
1418 if (sg_virt(des3->src) == sg_virt(des3->dst)) in ccp_run_des3_cmd()
1421 ret = ccp_init_data(&src, cmd_q, des3->src, des3->src_len, in ccp_run_des3_cmd()
1430 ret = ccp_init_data(&dst, cmd_q, des3->dst, des3->src_len, in ccp_run_des3_cmd()
1449 ret = cmd_q->ccp->vdata->perform->des3(&op); in ccp_run_des3_cmd()
1451 cmd->engine_error = cmd_q->cmd_error; in ccp_run_des3_cmd()
1458 if (des3->mode != CCP_DES3_MODE_ECB) { in ccp_run_des3_cmd()
1463 cmd->engine_error = cmd_q->cmd_error; in ccp_run_des3_cmd()
1468 ccp_get_dm_area(&ctx, dm_offset, des3->iv, 0, in ccp_run_des3_cmd()
1479 if (des3->mode != CCP_DES3_MODE_ECB) in ccp_run_des3_cmd()
1491 struct ccp_sha_engine *sha = &cmd->u.sha; in ccp_run_sha_cmd() local
1503 switch (sha->type) { in ccp_run_sha_cmd()
1505 if (sha->ctx_len < SHA1_DIGEST_SIZE) in ccp_run_sha_cmd()
1506 return -EINVAL; in ccp_run_sha_cmd()
1510 if (sha->ctx_len < SHA224_DIGEST_SIZE) in ccp_run_sha_cmd()
1511 return -EINVAL; in ccp_run_sha_cmd()
1515 if (sha->ctx_len < SHA256_DIGEST_SIZE) in ccp_run_sha_cmd()
1516 return -EINVAL; in ccp_run_sha_cmd()
1520 if (cmd_q->ccp->vdata->version < CCP_VERSION(4, 0) in ccp_run_sha_cmd()
1521 || sha->ctx_len < SHA384_DIGEST_SIZE) in ccp_run_sha_cmd()
1522 return -EINVAL; in ccp_run_sha_cmd()
1526 if (cmd_q->ccp->vdata->version < CCP_VERSION(4, 0) in ccp_run_sha_cmd()
1527 || sha->ctx_len < SHA512_DIGEST_SIZE) in ccp_run_sha_cmd()
1528 return -EINVAL; in ccp_run_sha_cmd()
1532 return -EINVAL; in ccp_run_sha_cmd()
1535 if (!sha->ctx) in ccp_run_sha_cmd()
1536 return -EINVAL; in ccp_run_sha_cmd()
1538 if (!sha->final && (sha->src_len & (block_size - 1))) in ccp_run_sha_cmd()
1539 return -EINVAL; in ccp_run_sha_cmd()
1541 /* The version 3 device can't handle zero-length input */ in ccp_run_sha_cmd()
1542 if (cmd_q->ccp->vdata->version == CCP_VERSION(3, 0)) { in ccp_run_sha_cmd()
1544 if (!sha->src_len) { in ccp_run_sha_cmd()
1549 if (!sha->final) in ccp_run_sha_cmd()
1552 /* CCP can't do a zero length sha operation so the in ccp_run_sha_cmd()
1555 if (sha->msg_bits) in ccp_run_sha_cmd()
1556 return -EINVAL; in ccp_run_sha_cmd()
1558 /* The CCP cannot perform zero-length sha operations in ccp_run_sha_cmd()
1560 * final operation. However, a sha operation for a in ccp_run_sha_cmd()
1564 switch (sha->type) { in ccp_run_sha_cmd()
1578 return -EINVAL; in ccp_run_sha_cmd()
1581 scatterwalk_map_and_copy((void *)sha_zero, sha->ctx, 0, in ccp_run_sha_cmd()
1589 switch (sha->type) { in ccp_run_sha_cmd()
1595 if (cmd_q->ccp->vdata->version != CCP_VERSION(3, 0)) in ccp_run_sha_cmd()
1596 ooffset = ioffset = CCP_SB_BYTES - SHA1_DIGEST_SIZE; in ccp_run_sha_cmd()
1606 if (cmd_q->ccp->vdata->version != CCP_VERSION(3, 0)) in ccp_run_sha_cmd()
1607 ooffset = CCP_SB_BYTES - SHA224_DIGEST_SIZE; in ccp_run_sha_cmd()
1624 ooffset = 2 * CCP_SB_BYTES - SHA384_DIGEST_SIZE; in ccp_run_sha_cmd()
1634 ret = -EINVAL; in ccp_run_sha_cmd()
1638 /* For zero-length plaintext the src pointer is ignored; in ccp_run_sha_cmd()
1641 if (sha->src_len && !sha->src) in ccp_run_sha_cmd()
1642 return -EINVAL; in ccp_run_sha_cmd()
1646 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_sha_cmd()
1647 op.sb_ctx = cmd_q->sb_ctx; /* Pre-allocated */ in ccp_run_sha_cmd()
1648 op.u.sha.type = sha->type; in ccp_run_sha_cmd()
1649 op.u.sha.msg_bits = sha->msg_bits; in ccp_run_sha_cmd()
1651 /* For SHA1/224/256 the context fits in a single (32-byte) SB entry; in ccp_run_sha_cmd()
1652 * SHA384/512 require 2 adjacent SB slots, with the right half in the in ccp_run_sha_cmd()
1654 * be in little endian format: use the 256-bit byte swap option. in ccp_run_sha_cmd()
1660 if (sha->first) { in ccp_run_sha_cmd()
1661 switch (sha->type) { in ccp_run_sha_cmd()
1675 ret = -EINVAL; in ccp_run_sha_cmd()
1680 ret = ccp_set_dm_area(&ctx, 0, sha->ctx, 0, in ccp_run_sha_cmd()
1689 cmd->engine_error = cmd_q->cmd_error; in ccp_run_sha_cmd()
1693 if (sha->src) { in ccp_run_sha_cmd()
1694 /* Send data to the CCP SHA engine; block_size is set above */ in ccp_run_sha_cmd()
1695 ret = ccp_init_data(&src, cmd_q, sha->src, sha->src_len, in ccp_run_sha_cmd()
1702 if (sha->final && !src.sg_wa.bytes_left) in ccp_run_sha_cmd()
1705 ret = cmd_q->ccp->vdata->perform->sha(&op); in ccp_run_sha_cmd()
1707 cmd->engine_error = cmd_q->cmd_error; in ccp_run_sha_cmd()
1715 ret = cmd_q->ccp->vdata->perform->sha(&op); in ccp_run_sha_cmd()
1717 cmd->engine_error = cmd_q->cmd_error; in ccp_run_sha_cmd()
1722 /* Retrieve the SHA context - convert from LE to BE using in ccp_run_sha_cmd()
1723 * 32-byte (256-bit) byteswapping to BE in ccp_run_sha_cmd()
1728 cmd->engine_error = cmd_q->cmd_error; in ccp_run_sha_cmd()
1732 if (sha->final) { in ccp_run_sha_cmd()
1734 switch (sha->type) { in ccp_run_sha_cmd()
1739 sha->ctx, 0, in ccp_run_sha_cmd()
1745 sha->ctx, LSB_ITEM_SIZE - ooffset, in ccp_run_sha_cmd()
1748 sha->ctx, 0, in ccp_run_sha_cmd()
1749 LSB_ITEM_SIZE - ooffset); in ccp_run_sha_cmd()
1752 ret = -EINVAL; in ccp_run_sha_cmd()
1757 ccp_get_dm_area(&ctx, 0, sha->ctx, 0, in ccp_run_sha_cmd()
1761 if (sha->final && sha->opad) { in ccp_run_sha_cmd()
1762 /* HMAC operation, recursively perform final SHA */ in ccp_run_sha_cmd()
1767 if (sha->opad_len != block_size) { in ccp_run_sha_cmd()
1768 ret = -EINVAL; in ccp_run_sha_cmd()
1774 ret = -ENOMEM; in ccp_run_sha_cmd()
1779 scatterwalk_map_and_copy(hmac_buf, sha->opad, 0, block_size, 0); in ccp_run_sha_cmd()
1780 switch (sha->type) { in ccp_run_sha_cmd()
1794 (LSB_ITEM_SIZE - ooffset), in ccp_run_sha_cmd()
1800 ret = -EINVAL; in ccp_run_sha_cmd()
1806 hmac_cmd.u.sha.type = sha->type; in ccp_run_sha_cmd()
1807 hmac_cmd.u.sha.ctx = sha->ctx; in ccp_run_sha_cmd()
1808 hmac_cmd.u.sha.ctx_len = sha->ctx_len; in ccp_run_sha_cmd()
1809 hmac_cmd.u.sha.src = &sg; in ccp_run_sha_cmd()
1810 hmac_cmd.u.sha.src_len = block_size + digest_size; in ccp_run_sha_cmd()
1811 hmac_cmd.u.sha.opad = NULL; in ccp_run_sha_cmd()
1812 hmac_cmd.u.sha.opad_len = 0; in ccp_run_sha_cmd()
1813 hmac_cmd.u.sha.first = 1; in ccp_run_sha_cmd()
1814 hmac_cmd.u.sha.final = 1; in ccp_run_sha_cmd()
1815 hmac_cmd.u.sha.msg_bits = (block_size + digest_size) << 3; in ccp_run_sha_cmd()
1819 cmd->engine_error = hmac_cmd.engine_error; in ccp_run_sha_cmd()
1825 if (sha->src) in ccp_run_sha_cmd()
1837 struct ccp_rsa_engine *rsa = &cmd->u.rsa; in ccp_run_rsa_cmd()
1844 if (rsa->key_size > cmd_q->ccp->vdata->rsamax) in ccp_run_rsa_cmd()
1845 return -EINVAL; in ccp_run_rsa_cmd()
1847 if (!rsa->exp || !rsa->mod || !rsa->src || !rsa->dst) in ccp_run_rsa_cmd()
1848 return -EINVAL; in ccp_run_rsa_cmd()
1852 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_rsa_cmd()
1858 * must be a multiple of 256-bits). Compute o_len, i_len in bytes. in ccp_run_rsa_cmd()
1862 o_len = 32 * ((rsa->key_size + 255) / 256); in ccp_run_rsa_cmd()
1866 if (cmd_q->ccp->vdata->version < CCP_VERSION(5, 0)) { in ccp_run_rsa_cmd()
1871 op.sb_key = cmd_q->ccp->vdata->perform->sballoc(cmd_q, in ccp_run_rsa_cmd()
1874 return -EIO; in ccp_run_rsa_cmd()
1880 op.sb_key = cmd_q->sb_key; in ccp_run_rsa_cmd()
1890 ret = ccp_reverse_set_dm_area(&exp, 0, rsa->exp, 0, rsa->exp_len); in ccp_run_rsa_cmd()
1894 if (cmd_q->ccp->vdata->version < CCP_VERSION(5, 0)) { in ccp_run_rsa_cmd()
1896 * as many 32-byte blocks as were allocated above. It's in ccp_run_rsa_cmd()
1902 cmd->engine_error = cmd_q->cmd_error; in ccp_run_rsa_cmd()
1919 ret = ccp_reverse_set_dm_area(&src, 0, rsa->mod, 0, rsa->mod_len); in ccp_run_rsa_cmd()
1922 ret = ccp_reverse_set_dm_area(&src, o_len, rsa->src, 0, rsa->src_len); in ccp_run_rsa_cmd()
1939 op.u.rsa.mod_size = rsa->key_size; in ccp_run_rsa_cmd()
1942 ret = cmd_q->ccp->vdata->perform->rsa(&op); in ccp_run_rsa_cmd()
1944 cmd->engine_error = cmd_q->cmd_error; in ccp_run_rsa_cmd()
1948 ccp_reverse_get_dm_area(&dst, 0, rsa->dst, 0, rsa->mod_len); in ccp_run_rsa_cmd()
1961 cmd_q->ccp->vdata->perform->sbfree(cmd_q, op.sb_key, sb_count); in ccp_run_rsa_cmd()
1969 struct ccp_passthru_engine *pt = &cmd->u.passthru; in ccp_run_passthru_cmd()
1977 if (!pt->final && (pt->src_len & (CCP_PASSTHRU_BLOCKSIZE - 1))) in ccp_run_passthru_cmd()
1978 return -EINVAL; in ccp_run_passthru_cmd()
1980 if (!pt->src || !pt->dst) in ccp_run_passthru_cmd()
1981 return -EINVAL; in ccp_run_passthru_cmd()
1983 if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { in ccp_run_passthru_cmd()
1984 if (pt->mask_len != CCP_PASSTHRU_MASKSIZE) in ccp_run_passthru_cmd()
1985 return -EINVAL; in ccp_run_passthru_cmd()
1986 if (!pt->mask) in ccp_run_passthru_cmd()
1987 return -EINVAL; in ccp_run_passthru_cmd()
1994 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_passthru_cmd()
1996 if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { in ccp_run_passthru_cmd()
1998 op.sb_key = cmd_q->sb_key; in ccp_run_passthru_cmd()
2007 ret = ccp_set_dm_area(&mask, 0, pt->mask, 0, pt->mask_len); in ccp_run_passthru_cmd()
2013 cmd->engine_error = cmd_q->cmd_error; in ccp_run_passthru_cmd()
2018 /* Prepare the input and output data workareas. For in-place in ccp_run_passthru_cmd()
2022 if (sg_virt(pt->src) == sg_virt(pt->dst)) in ccp_run_passthru_cmd()
2025 ret = ccp_init_data(&src, cmd_q, pt->src, pt->src_len, in ccp_run_passthru_cmd()
2034 ret = ccp_init_data(&dst, cmd_q, pt->dst, pt->src_len, in ccp_run_passthru_cmd()
2051 ret = -EINVAL; in ccp_run_passthru_cmd()
2070 ret = cmd_q->ccp->vdata->perform->passthru(&op); in ccp_run_passthru_cmd()
2072 cmd->engine_error = cmd_q->cmd_error; in ccp_run_passthru_cmd()
2092 if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) in ccp_run_passthru_cmd()
2102 struct ccp_passthru_nomap_engine *pt = &cmd->u.passthru_nomap; in ccp_run_passthru_nomap_cmd()
2107 if (!pt->final && (pt->src_len & (CCP_PASSTHRU_BLOCKSIZE - 1))) in ccp_run_passthru_nomap_cmd()
2108 return -EINVAL; in ccp_run_passthru_nomap_cmd()
2110 if (!pt->src_dma || !pt->dst_dma) in ccp_run_passthru_nomap_cmd()
2111 return -EINVAL; in ccp_run_passthru_nomap_cmd()
2113 if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { in ccp_run_passthru_nomap_cmd()
2114 if (pt->mask_len != CCP_PASSTHRU_MASKSIZE) in ccp_run_passthru_nomap_cmd()
2115 return -EINVAL; in ccp_run_passthru_nomap_cmd()
2116 if (!pt->mask) in ccp_run_passthru_nomap_cmd()
2117 return -EINVAL; in ccp_run_passthru_nomap_cmd()
2124 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_passthru_nomap_cmd()
2126 if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { in ccp_run_passthru_nomap_cmd()
2128 op.sb_key = cmd_q->sb_key; in ccp_run_passthru_nomap_cmd()
2130 mask.length = pt->mask_len; in ccp_run_passthru_nomap_cmd()
2131 mask.dma.address = pt->mask; in ccp_run_passthru_nomap_cmd()
2132 mask.dma.length = pt->mask_len; in ccp_run_passthru_nomap_cmd()
2137 cmd->engine_error = cmd_q->cmd_error; in ccp_run_passthru_nomap_cmd()
2147 op.src.u.dma.address = pt->src_dma; in ccp_run_passthru_nomap_cmd()
2149 op.src.u.dma.length = pt->src_len; in ccp_run_passthru_nomap_cmd()
2152 op.dst.u.dma.address = pt->dst_dma; in ccp_run_passthru_nomap_cmd()
2154 op.dst.u.dma.length = pt->src_len; in ccp_run_passthru_nomap_cmd()
2156 ret = cmd_q->ccp->vdata->perform->passthru(&op); in ccp_run_passthru_nomap_cmd()
2158 cmd->engine_error = cmd_q->cmd_error; in ccp_run_passthru_nomap_cmd()
2165 struct ccp_ecc_engine *ecc = &cmd->u.ecc; in ccp_run_ecc_mm_cmd()
2171 if (!ecc->u.mm.operand_1 || in ccp_run_ecc_mm_cmd()
2172 (ecc->u.mm.operand_1_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_mm_cmd()
2173 return -EINVAL; in ccp_run_ecc_mm_cmd()
2175 if (ecc->function != CCP_ECC_FUNCTION_MINV_384BIT) in ccp_run_ecc_mm_cmd()
2176 if (!ecc->u.mm.operand_2 || in ccp_run_ecc_mm_cmd()
2177 (ecc->u.mm.operand_2_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_mm_cmd()
2178 return -EINVAL; in ccp_run_ecc_mm_cmd()
2180 if (!ecc->u.mm.result || in ccp_run_ecc_mm_cmd()
2181 (ecc->u.mm.result_len < CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_mm_cmd()
2182 return -EINVAL; in ccp_run_ecc_mm_cmd()
2186 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_ecc_mm_cmd()
2204 ret = ccp_reverse_set_dm_area(&src, 0, ecc->mod, 0, ecc->mod_len); in ccp_run_ecc_mm_cmd()
2210 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.mm.operand_1, 0, in ccp_run_ecc_mm_cmd()
2211 ecc->u.mm.operand_1_len); in ccp_run_ecc_mm_cmd()
2216 if (ecc->function != CCP_ECC_FUNCTION_MINV_384BIT) { in ccp_run_ecc_mm_cmd()
2218 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.mm.operand_2, 0, in ccp_run_ecc_mm_cmd()
2219 ecc->u.mm.operand_2_len); in ccp_run_ecc_mm_cmd()
2242 op.u.ecc.function = cmd->u.ecc.function; in ccp_run_ecc_mm_cmd()
2244 ret = cmd_q->ccp->vdata->perform->ecc(&op); in ccp_run_ecc_mm_cmd()
2246 cmd->engine_error = cmd_q->cmd_error; in ccp_run_ecc_mm_cmd()
2250 ecc->ecc_result = le16_to_cpup( in ccp_run_ecc_mm_cmd()
2252 if (!(ecc->ecc_result & CCP_ECC_RESULT_SUCCESS)) { in ccp_run_ecc_mm_cmd()
2253 ret = -EIO; in ccp_run_ecc_mm_cmd()
2258 ccp_reverse_get_dm_area(&dst, 0, ecc->u.mm.result, 0, in ccp_run_ecc_mm_cmd()
2272 struct ccp_ecc_engine *ecc = &cmd->u.ecc; in ccp_run_ecc_pm_cmd()
2278 if (!ecc->u.pm.point_1.x || in ccp_run_ecc_pm_cmd()
2279 (ecc->u.pm.point_1.x_len > CCP_ECC_MODULUS_BYTES) || in ccp_run_ecc_pm_cmd()
2280 !ecc->u.pm.point_1.y || in ccp_run_ecc_pm_cmd()
2281 (ecc->u.pm.point_1.y_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_pm_cmd()
2282 return -EINVAL; in ccp_run_ecc_pm_cmd()
2284 if (ecc->function == CCP_ECC_FUNCTION_PADD_384BIT) { in ccp_run_ecc_pm_cmd()
2285 if (!ecc->u.pm.point_2.x || in ccp_run_ecc_pm_cmd()
2286 (ecc->u.pm.point_2.x_len > CCP_ECC_MODULUS_BYTES) || in ccp_run_ecc_pm_cmd()
2287 !ecc->u.pm.point_2.y || in ccp_run_ecc_pm_cmd()
2288 (ecc->u.pm.point_2.y_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_pm_cmd()
2289 return -EINVAL; in ccp_run_ecc_pm_cmd()
2291 if (!ecc->u.pm.domain_a || in ccp_run_ecc_pm_cmd()
2292 (ecc->u.pm.domain_a_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_pm_cmd()
2293 return -EINVAL; in ccp_run_ecc_pm_cmd()
2295 if (ecc->function == CCP_ECC_FUNCTION_PMUL_384BIT) in ccp_run_ecc_pm_cmd()
2296 if (!ecc->u.pm.scalar || in ccp_run_ecc_pm_cmd()
2297 (ecc->u.pm.scalar_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_pm_cmd()
2298 return -EINVAL; in ccp_run_ecc_pm_cmd()
2301 if (!ecc->u.pm.result.x || in ccp_run_ecc_pm_cmd()
2302 (ecc->u.pm.result.x_len < CCP_ECC_MODULUS_BYTES) || in ccp_run_ecc_pm_cmd()
2303 !ecc->u.pm.result.y || in ccp_run_ecc_pm_cmd()
2304 (ecc->u.pm.result.y_len < CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_pm_cmd()
2305 return -EINVAL; in ccp_run_ecc_pm_cmd()
2309 op.jobid = CCP_NEW_JOBID(cmd_q->ccp); in ccp_run_ecc_pm_cmd()
2327 ret = ccp_reverse_set_dm_area(&src, 0, ecc->mod, 0, ecc->mod_len); in ccp_run_ecc_pm_cmd()
2333 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.pm.point_1.x, 0, in ccp_run_ecc_pm_cmd()
2334 ecc->u.pm.point_1.x_len); in ccp_run_ecc_pm_cmd()
2338 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.pm.point_1.y, 0, in ccp_run_ecc_pm_cmd()
2339 ecc->u.pm.point_1.y_len); in ccp_run_ecc_pm_cmd()
2348 if (ecc->function == CCP_ECC_FUNCTION_PADD_384BIT) { in ccp_run_ecc_pm_cmd()
2350 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.pm.point_2.x, 0, in ccp_run_ecc_pm_cmd()
2351 ecc->u.pm.point_2.x_len); in ccp_run_ecc_pm_cmd()
2355 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.pm.point_2.y, 0, in ccp_run_ecc_pm_cmd()
2356 ecc->u.pm.point_2.y_len); in ccp_run_ecc_pm_cmd()
2366 ret = ccp_reverse_set_dm_area(&src, 0, ecc->u.pm.domain_a, 0, in ccp_run_ecc_pm_cmd()
2367 ecc->u.pm.domain_a_len); in ccp_run_ecc_pm_cmd()
2372 if (ecc->function == CCP_ECC_FUNCTION_PMUL_384BIT) { in ccp_run_ecc_pm_cmd()
2375 ecc->u.pm.scalar, 0, in ccp_run_ecc_pm_cmd()
2376 ecc->u.pm.scalar_len); in ccp_run_ecc_pm_cmd()
2400 op.u.ecc.function = cmd->u.ecc.function; in ccp_run_ecc_pm_cmd()
2402 ret = cmd_q->ccp->vdata->perform->ecc(&op); in ccp_run_ecc_pm_cmd()
2404 cmd->engine_error = cmd_q->cmd_error; in ccp_run_ecc_pm_cmd()
2408 ecc->ecc_result = le16_to_cpup( in ccp_run_ecc_pm_cmd()
2410 if (!(ecc->ecc_result & CCP_ECC_RESULT_SUCCESS)) { in ccp_run_ecc_pm_cmd()
2411 ret = -EIO; in ccp_run_ecc_pm_cmd()
2421 ccp_reverse_get_dm_area(&dst, 0, ecc->u.pm.result.x, 0, in ccp_run_ecc_pm_cmd()
2424 ccp_reverse_get_dm_area(&dst, 0, ecc->u.pm.result.y, 0, in ccp_run_ecc_pm_cmd()
2442 struct ccp_ecc_engine *ecc = &cmd->u.ecc; in ccp_run_ecc_cmd()
2444 ecc->ecc_result = 0; in ccp_run_ecc_cmd()
2446 if (!ecc->mod || in ccp_run_ecc_cmd()
2447 (ecc->mod_len > CCP_ECC_MODULUS_BYTES)) in ccp_run_ecc_cmd()
2448 return -EINVAL; in ccp_run_ecc_cmd()
2450 switch (ecc->function) { in ccp_run_ecc_cmd()
2462 return -EINVAL; in ccp_run_ecc_cmd()
2470 cmd->engine_error = 0; in ccp_run_cmd()
2471 cmd_q->cmd_error = 0; in ccp_run_cmd()
2472 cmd_q->int_rcvd = 0; in ccp_run_cmd()
2473 cmd_q->free_slots = cmd_q->ccp->vdata->perform->get_free_slots(cmd_q); in ccp_run_cmd()
2475 switch (cmd->engine) { in ccp_run_cmd()
2477 switch (cmd->u.aes.mode) { in ccp_run_cmd()
2502 if (cmd->flags & CCP_CMD_PASSTHRU_NO_DMA_MAP) in ccp_run_cmd()
2511 ret = -EINVAL; in ccp_run_cmd()