Lines Matching +full:required +full:- +full:for +full:- +full:hardware +full:- +full:jobs
1 // SPDX-License-Identifier: GPL-2.0-only
3 * Driver for ARTPEC-6 crypto block using the kernel asynchronous crypto api.
5 * Copyright (C) 2014-2017 Axis Communications AB
13 #include <linux/dma-mapping.h>
14 #include <linux/fault-inject.h>
35 /* Max length of a line in all cache levels for Artpec SoCs. */
186 #define MODULE_NAME "Artpec-6 CA"
198 /* The PDMA is a DMA-engine tightly coupled with a ciphering engine.
203 * a 4-byte metadata that is inserted at the beginning of each dma packet.
208 * Multiple packets are used for providing context data, key data and
212 * +------+------+------+~~+-------+------+----
214 * +--+---+--+---+----+-+~~+-------+----+-+----
217 * __|__ +-------++-------++-------+ +----+
219 * +-----+ +-------++-------++-------+ +----+
244 /* Enough maps for all out/in buffers, and all three descr. arrays */
262 struct list_head queue; /* waiting for pdma fifo space */
269 void *pad_buffer; /* cache-aligned block padding buffer */
389 awalk->sg = sg; in artpec6_crypto_walk_init()
390 awalk->offset = 0; in artpec6_crypto_walk_init()
396 while (nbytes && awalk->sg) { in artpec6_crypto_walk_advance()
399 WARN_ON(awalk->offset > awalk->sg->length); in artpec6_crypto_walk_advance()
401 piece = min(nbytes, (size_t)awalk->sg->length - awalk->offset); in artpec6_crypto_walk_advance()
402 nbytes -= piece; in artpec6_crypto_walk_advance()
403 awalk->offset += piece; in artpec6_crypto_walk_advance()
404 if (awalk->offset == awalk->sg->length) { in artpec6_crypto_walk_advance()
405 awalk->sg = sg_next(awalk->sg); in artpec6_crypto_walk_advance()
406 awalk->offset = 0; in artpec6_crypto_walk_advance()
417 WARN_ON(awalk->sg->length == awalk->offset); in artpec6_crypto_walk_chunklen()
419 return awalk->sg->length - awalk->offset; in artpec6_crypto_walk_chunklen()
425 return sg_phys(awalk->sg) + awalk->offset; in artpec6_crypto_walk_chunk_phys()
431 struct artpec6_crypto_dma_descriptors *dma = common->dma; in artpec6_crypto_copy_bounce_buffers()
435 list_for_each_entry_safe(b, next, &dma->bounce_buffers, list) { in artpec6_crypto_copy_bounce_buffers()
437 b, b->length, b->offset, b->buf); in artpec6_crypto_copy_bounce_buffers()
438 sg_pcopy_from_buffer(b->sg, in artpec6_crypto_copy_bounce_buffers()
440 b->buf, in artpec6_crypto_copy_bounce_buffers()
441 b->length, in artpec6_crypto_copy_bounce_buffers()
442 b->offset); in artpec6_crypto_copy_bounce_buffers()
444 list_del(&b->list); in artpec6_crypto_copy_bounce_buffers()
452 int fifo_count = ac->pending_count; in artpec6_crypto_busy()
460 int ret = -EBUSY; in artpec6_crypto_submit()
462 spin_lock_bh(&ac->queue_lock); in artpec6_crypto_submit()
465 list_add_tail(&req->list, &ac->pending); in artpec6_crypto_submit()
467 ret = -EINPROGRESS; in artpec6_crypto_submit()
468 } else if (req->req->flags & CRYPTO_TFM_REQ_MAY_BACKLOG) { in artpec6_crypto_submit()
469 list_add_tail(&req->list, &ac->queue); in artpec6_crypto_submit()
474 spin_unlock_bh(&ac->queue_lock); in artpec6_crypto_submit()
482 enum artpec6_crypto_variant variant = ac->variant; in artpec6_crypto_start_dma()
483 void __iomem *base = ac->base; in artpec6_crypto_start_dma()
484 struct artpec6_crypto_dma_descriptors *dma = common->dma; in artpec6_crypto_start_dma()
490 ind = FIELD_PREP(PDMA_IN_DESCRQ_PUSH_LEN, dma->in_cnt - 1) | in artpec6_crypto_start_dma()
491 FIELD_PREP(PDMA_IN_DESCRQ_PUSH_ADDR, dma->in_dma_addr >> 6); in artpec6_crypto_start_dma()
493 statd = FIELD_PREP(PDMA_IN_STATQ_PUSH_LEN, dma->in_cnt - 1) | in artpec6_crypto_start_dma()
494 FIELD_PREP(PDMA_IN_STATQ_PUSH_ADDR, dma->stat_dma_addr >> 6); in artpec6_crypto_start_dma()
496 outd = FIELD_PREP(PDMA_OUT_DESCRQ_PUSH_LEN, dma->out_cnt - 1) | in artpec6_crypto_start_dma()
497 FIELD_PREP(PDMA_OUT_DESCRQ_PUSH_ADDR, dma->out_dma_addr >> 6); in artpec6_crypto_start_dma()
512 ac->pending_count++; in artpec6_crypto_start_dma()
518 struct artpec6_crypto_dma_descriptors *dma = common->dma; in artpec6_crypto_init_dma_operation()
520 dma->out_cnt = 0; in artpec6_crypto_init_dma_operation()
521 dma->in_cnt = 0; in artpec6_crypto_init_dma_operation()
522 dma->map_count = 0; in artpec6_crypto_init_dma_operation()
523 INIT_LIST_HEAD(&dma->bounce_buffers); in artpec6_crypto_init_dma_operation()
535 /** artpec6_crypto_setup_out_descr_phys - Setup an out channel with a
542 * @return 0 on success or -ENOSPC if there are no more descriptors available
548 struct artpec6_crypto_dma_descriptors *dma = common->dma; in artpec6_crypto_setup_out_descr_phys()
551 if (dma->out_cnt >= PDMA_DESCR_COUNT || in artpec6_crypto_setup_out_descr_phys()
554 return -ENOSPC; in artpec6_crypto_setup_out_descr_phys()
557 d = &dma->out[dma->out_cnt++]; in artpec6_crypto_setup_out_descr_phys()
560 d->ctrl.short_descr = 0; in artpec6_crypto_setup_out_descr_phys()
561 d->ctrl.eop = eop; in artpec6_crypto_setup_out_descr_phys()
562 d->data.len = len; in artpec6_crypto_setup_out_descr_phys()
563 d->data.buf = addr; in artpec6_crypto_setup_out_descr_phys()
567 /** artpec6_crypto_setup_out_descr_short - Setup a short out descriptor
574 * -ENOSPC if no more descriptors are available
575 * -EINVAL if the data length exceeds 7 bytes
581 struct artpec6_crypto_dma_descriptors *dma = common->dma; in artpec6_crypto_setup_out_descr_short()
584 if (dma->out_cnt >= PDMA_DESCR_COUNT || in artpec6_crypto_setup_out_descr_short()
587 return -ENOSPC; in artpec6_crypto_setup_out_descr_short()
589 return -EINVAL; in artpec6_crypto_setup_out_descr_short()
591 d = &dma->out[dma->out_cnt++]; in artpec6_crypto_setup_out_descr_short()
594 d->ctrl.short_descr = 1; in artpec6_crypto_setup_out_descr_short()
595 d->ctrl.short_len = len; in artpec6_crypto_setup_out_descr_short()
596 d->ctrl.eop = eop; in artpec6_crypto_setup_out_descr_short()
597 memcpy(d->shrt.data, dst, len); in artpec6_crypto_setup_out_descr_short()
607 struct artpec6_crypto_dma_descriptors *dma = common->dma; in artpec6_crypto_dma_map_page()
614 if (dma->map_count >= ARRAY_SIZE(dma->maps)) in artpec6_crypto_dma_map_page()
615 return -ENOMEM; in artpec6_crypto_dma_map_page()
619 return -ENOMEM; in artpec6_crypto_dma_map_page()
621 map = &dma->maps[dma->map_count++]; in artpec6_crypto_dma_map_page()
622 map->size = size; in artpec6_crypto_dma_map_page()
623 map->dma_addr = dma_addr; in artpec6_crypto_dma_map_page()
624 map->dir = dir; in artpec6_crypto_dma_map_page()
647 struct artpec6_crypto_dma_descriptors *dma = common->dma; in artpec6_crypto_dma_map_descs()
650 ret = artpec6_crypto_dma_map_single(common, dma->in, in artpec6_crypto_dma_map_descs()
651 sizeof(dma->in[0]) * dma->in_cnt, in artpec6_crypto_dma_map_descs()
652 DMA_TO_DEVICE, &dma->in_dma_addr); in artpec6_crypto_dma_map_descs()
656 ret = artpec6_crypto_dma_map_single(common, dma->out, in artpec6_crypto_dma_map_descs()
657 sizeof(dma->out[0]) * dma->out_cnt, in artpec6_crypto_dma_map_descs()
658 DMA_TO_DEVICE, &dma->out_dma_addr); in artpec6_crypto_dma_map_descs()
663 dma->stat[dma->in_cnt - 1] = 0; in artpec6_crypto_dma_map_descs()
670 dma->stat, in artpec6_crypto_dma_map_descs()
671 sizeof(dma->stat[0]) * dma->in_cnt, in artpec6_crypto_dma_map_descs()
673 &dma->stat_dma_addr); in artpec6_crypto_dma_map_descs()
679 struct artpec6_crypto_dma_descriptors *dma = common->dma; in artpec6_crypto_dma_unmap_all()
683 for (i = 0; i < dma->map_count; i++) { in artpec6_crypto_dma_unmap_all()
684 struct artpec6_crypto_dma_map *map = &dma->maps[i]; in artpec6_crypto_dma_unmap_all()
686 dma_unmap_page(dev, map->dma_addr, map->size, map->dir); in artpec6_crypto_dma_unmap_all()
689 dma->map_count = 0; in artpec6_crypto_dma_unmap_all()
692 /** artpec6_crypto_setup_out_descr - Setup an out descriptor
727 /** artpec6_crypto_setup_in_descr_phys - Setup an in channel with a
740 struct artpec6_crypto_dma_descriptors *dma = common->dma; in artpec6_crypto_setup_in_descr_phys()
743 if (dma->in_cnt >= PDMA_DESCR_COUNT || in artpec6_crypto_setup_in_descr_phys()
746 return -ENOSPC; in artpec6_crypto_setup_in_descr_phys()
748 d = &dma->in[dma->in_cnt++]; in artpec6_crypto_setup_in_descr_phys()
751 d->ctrl.intr = intr; in artpec6_crypto_setup_in_descr_phys()
752 d->data.len = len; in artpec6_crypto_setup_in_descr_phys()
753 d->data.buf = addr; in artpec6_crypto_setup_in_descr_phys()
757 /** artpec6_crypto_setup_in_descr - Setup an in channel descriptor
764 * Short descriptors are not used for the in channel
793 bbuf->buf = PTR_ALIGN(base, ARTPEC_CACHE_LINE_MAX); in artpec6_crypto_alloc_bounce()
803 bbuf = artpec6_crypto_alloc_bounce(common->gfp_flags); in setup_bounce_buffer_in()
805 return -ENOMEM; in setup_bounce_buffer_in()
807 bbuf->length = size; in setup_bounce_buffer_in()
808 bbuf->sg = walk->sg; in setup_bounce_buffer_in()
809 bbuf->offset = walk->offset; in setup_bounce_buffer_in()
811 ret = artpec6_crypto_setup_in_descr(common, bbuf->buf, size, false); in setup_bounce_buffer_in()
817 pr_debug("BOUNCE %zu offset %zu\n", size, walk->offset); in setup_bounce_buffer_in()
818 list_add_tail(&bbuf->list, &common->dma->bounce_buffers); in setup_bounce_buffer_in()
831 while (walk->sg && count) { in artpec6_crypto_setup_sg_descrs_in()
836 * size we need bounce buffers. The DMA-API requires that the in artpec6_crypto_setup_sg_descrs_in()
838 * for the case when coherent DMA is used. in artpec6_crypto_setup_sg_descrs_in()
842 ALIGN(addr, ARTPEC_CACHE_LINE_MAX) - in artpec6_crypto_setup_sg_descrs_in()
845 pr_debug("CHUNK-b %pad:%zu\n", &addr, chunk); in artpec6_crypto_setup_sg_descrs_in()
848 pr_debug("CHUNK-b %pad:%zu\n", &addr, chunk); in artpec6_crypto_setup_sg_descrs_in()
853 chunk = chunk & ~(ARTPEC_CACHE_LINE_MAX-1); in artpec6_crypto_setup_sg_descrs_in()
858 sg_page(walk->sg), in artpec6_crypto_setup_sg_descrs_in()
859 walk->sg->offset + in artpec6_crypto_setup_sg_descrs_in()
860 walk->offset, in artpec6_crypto_setup_sg_descrs_in()
875 count = count - chunk; in artpec6_crypto_setup_sg_descrs_in()
882 return count ? -EINVAL : 0; in artpec6_crypto_setup_sg_descrs_in()
894 while (walk->sg && count) { in artpec6_crypto_setup_sg_descrs_out()
898 pr_debug("OUT-CHUNK %pad:%zu\n", &addr, chunk); in artpec6_crypto_setup_sg_descrs_out()
903 chunk = min_t(size_t, chunk, (4-(addr&3))); in artpec6_crypto_setup_sg_descrs_out()
905 sg_pcopy_to_buffer(walk->sg, 1, buf, chunk, in artpec6_crypto_setup_sg_descrs_out()
906 walk->offset); in artpec6_crypto_setup_sg_descrs_out()
915 sg_page(walk->sg), in artpec6_crypto_setup_sg_descrs_out()
916 walk->sg->offset + in artpec6_crypto_setup_sg_descrs_out()
917 walk->offset, in artpec6_crypto_setup_sg_descrs_out()
932 count = count - chunk; in artpec6_crypto_setup_sg_descrs_out()
939 return count ? -EINVAL : 0; in artpec6_crypto_setup_sg_descrs_out()
943 /** artpec6_crypto_terminate_out_descrs - Set the EOP on the last out descriptor
945 * If the out descriptor list is non-empty, then the eop flag on the
949 * -EINVAL if the out descriptor is empty or has overflown
954 struct artpec6_crypto_dma_descriptors *dma = common->dma; in artpec6_crypto_terminate_out_descrs()
957 if (!dma->out_cnt || dma->out_cnt > PDMA_DESCR_COUNT) { in artpec6_crypto_terminate_out_descrs()
959 MODULE_NAME, dma->out_cnt ? "empty" : "full"); in artpec6_crypto_terminate_out_descrs()
960 return -EINVAL; in artpec6_crypto_terminate_out_descrs()
964 d = &dma->out[dma->out_cnt-1]; in artpec6_crypto_terminate_out_descrs()
965 d->ctrl.eop = 1; in artpec6_crypto_terminate_out_descrs()
970 /** artpec6_crypto_terminate_in_descrs - Set the interrupt flag on the last
973 * See artpec6_crypto_terminate_out_descrs() for return values
978 struct artpec6_crypto_dma_descriptors *dma = common->dma; in artpec6_crypto_terminate_in_descrs()
981 if (!dma->in_cnt || dma->in_cnt > PDMA_DESCR_COUNT) { in artpec6_crypto_terminate_in_descrs()
983 MODULE_NAME, dma->in_cnt ? "empty" : "full"); in artpec6_crypto_terminate_in_descrs()
984 return -EINVAL; in artpec6_crypto_terminate_in_descrs()
987 d = &dma->in[dma->in_cnt-1]; in artpec6_crypto_terminate_in_descrs()
988 d->ctrl.intr = 1; in artpec6_crypto_terminate_in_descrs()
992 /** create_hash_pad - Create a Secure Hash conformant pad
1022 target -= 1; in create_hash_pad()
1023 diff = dgstlen & (mod - 1); in create_hash_pad()
1024 pad_bytes = diff > target ? target + mod - diff : target - diff; in create_hash_pad()
1047 flags = (parent->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? in artpec6_crypto_common_init()
1050 common->gfp_flags = flags; in artpec6_crypto_common_init()
1051 common->dma = kmem_cache_alloc(ac->dma_cache, flags); in artpec6_crypto_common_init()
1052 if (!common->dma) in artpec6_crypto_common_init()
1053 return -ENOMEM; in artpec6_crypto_common_init()
1055 common->req = parent; in artpec6_crypto_common_init()
1056 common->complete = complete; in artpec6_crypto_common_init()
1066 list_for_each_entry_safe(b, next, &dma->bounce_buffers, list) { in artpec6_crypto_bounce_destroy()
1077 artpec6_crypto_bounce_destroy(common->dma); in artpec6_crypto_common_destroy()
1078 kmem_cache_free(ac->dma_cache, common->dma); in artpec6_crypto_common_destroy()
1079 common->dma = NULL; in artpec6_crypto_common_destroy()
1096 switch (ctx->crypto_type) { in artpec6_crypto_encrypt()
1100 req_ctx->decrypt = 0; in artpec6_crypto_encrypt()
1106 switch (ctx->crypto_type) { in artpec6_crypto_encrypt()
1115 ret = artpec6_crypto_common_init(&req_ctx->common, in artpec6_crypto_encrypt()
1116 &req->base, in artpec6_crypto_encrypt()
1118 req->dst, req->cryptlen); in artpec6_crypto_encrypt()
1124 artpec6_crypto_common_destroy(&req_ctx->common); in artpec6_crypto_encrypt()
1128 return artpec6_crypto_submit(&req_ctx->common); in artpec6_crypto_encrypt()
1141 switch (ctx->crypto_type) { in artpec6_crypto_decrypt()
1145 req_ctx->decrypt = 1; in artpec6_crypto_decrypt()
1152 switch (ctx->crypto_type) { in artpec6_crypto_decrypt()
1161 ret = artpec6_crypto_common_init(&req_ctx->common, &req->base, in artpec6_crypto_decrypt()
1163 req->dst, req->cryptlen); in artpec6_crypto_decrypt()
1169 artpec6_crypto_common_destroy(&req_ctx->common); in artpec6_crypto_decrypt()
1173 return artpec6_crypto_submit(&req_ctx->common); in artpec6_crypto_decrypt()
1183 (req->iv + iv_len - 4)); in artpec6_crypto_ctr_crypt()
1184 unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / in artpec6_crypto_ctr_crypt()
1188 * The hardware uses only the last 32-bits as the counter while the in artpec6_crypto_ctr_crypt()
1189 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that in artpec6_crypto_ctr_crypt()
1199 ret = crypto_sync_skcipher_setkey(ctx->fallback, ctx->aes_key, in artpec6_crypto_ctr_crypt()
1200 ctx->key_length); in artpec6_crypto_ctr_crypt()
1205 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback); in artpec6_crypto_ctr_crypt()
1207 skcipher_request_set_sync_tfm(subreq, ctx->fallback); in artpec6_crypto_ctr_crypt()
1208 skcipher_request_set_callback(subreq, req->base.flags, in artpec6_crypto_ctr_crypt()
1210 skcipher_request_set_crypt(subreq, req->src, req->dst, in artpec6_crypto_ctr_crypt()
1211 req->cryptlen, req->iv); in artpec6_crypto_ctr_crypt()
1251 struct artpec6_cryptotfm_context *ctx = crypto_tfm_ctx(&tfm->base); in artpec6_crypto_aead_set_key()
1254 return -EINVAL; in artpec6_crypto_aead_set_key()
1256 ctx->key_length = len; in artpec6_crypto_aead_set_key()
1258 memcpy(ctx->aes_key, key, len); in artpec6_crypto_aead_set_key()
1267 req_ctx->decrypt = false; in artpec6_crypto_aead_encrypt()
1268 ret = artpec6_crypto_common_init(&req_ctx->common, &req->base, in artpec6_crypto_aead_encrypt()
1276 artpec6_crypto_common_destroy(&req_ctx->common); in artpec6_crypto_aead_encrypt()
1280 return artpec6_crypto_submit(&req_ctx->common); in artpec6_crypto_aead_encrypt()
1288 req_ctx->decrypt = true; in artpec6_crypto_aead_decrypt()
1289 if (req->cryptlen < AES_BLOCK_SIZE) in artpec6_crypto_aead_decrypt()
1290 return -EINVAL; in artpec6_crypto_aead_decrypt()
1292 ret = artpec6_crypto_common_init(&req_ctx->common, in artpec6_crypto_aead_decrypt()
1293 &req->base, in artpec6_crypto_aead_decrypt()
1301 artpec6_crypto_common_destroy(&req_ctx->common); in artpec6_crypto_aead_decrypt()
1305 return artpec6_crypto_submit(&req_ctx->common); in artpec6_crypto_aead_decrypt()
1310 struct artpec6_hashalg_context *ctx = crypto_tfm_ctx(areq->base.tfm); in artpec6_crypto_prepare_hash()
1316 struct artpec6_crypto_req_common *common = &req_ctx->common; in artpec6_crypto_prepare_hash()
1318 enum artpec6_crypto_variant variant = ac->variant; in artpec6_crypto_prepare_hash()
1327 if (req_ctx->hash_flags & HASH_FLAG_HMAC) { in artpec6_crypto_prepare_hash()
1329 req_ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER, in artpec6_crypto_prepare_hash()
1332 req_ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER, in artpec6_crypto_prepare_hash()
1337 memcpy(req_ctx->key_buffer, ctx->hmac_key, in artpec6_crypto_prepare_hash()
1338 ctx->hmac_key_length); in artpec6_crypto_prepare_hash()
1339 memset(req_ctx->key_buffer + ctx->hmac_key_length, 0, in artpec6_crypto_prepare_hash()
1340 blocksize - ctx->hmac_key_length); in artpec6_crypto_prepare_hash()
1343 (void *)&req_ctx->key_md, in artpec6_crypto_prepare_hash()
1344 sizeof(req_ctx->key_md), false, false); in artpec6_crypto_prepare_hash()
1349 req_ctx->key_buffer, blocksize, in artpec6_crypto_prepare_hash()
1355 if (!(req_ctx->hash_flags & HASH_FLAG_INIT_CTX)) { in artpec6_crypto_prepare_hash()
1364 req_ctx->hash_md &= ~A6_CRY_MD_HASH_SEL_CTX; in artpec6_crypto_prepare_hash()
1365 req_ctx->hash_md |= FIELD_PREP(A6_CRY_MD_HASH_SEL_CTX, sel_ctx); in artpec6_crypto_prepare_hash()
1368 if (req_ctx->hash_flags & HASH_FLAG_FINALIZE) in artpec6_crypto_prepare_hash()
1369 req_ctx->hash_md |= A6_CRY_MD_HASH_HMAC_FIN; in artpec6_crypto_prepare_hash()
1371 req_ctx->hash_md &= ~A7_CRY_MD_HASH_SEL_CTX; in artpec6_crypto_prepare_hash()
1372 req_ctx->hash_md |= FIELD_PREP(A7_CRY_MD_HASH_SEL_CTX, sel_ctx); in artpec6_crypto_prepare_hash()
1375 if (req_ctx->hash_flags & HASH_FLAG_FINALIZE) in artpec6_crypto_prepare_hash()
1376 req_ctx->hash_md |= A7_CRY_MD_HASH_HMAC_FIN; in artpec6_crypto_prepare_hash()
1381 (void *)&req_ctx->hash_md, in artpec6_crypto_prepare_hash()
1382 sizeof(req_ctx->hash_md), false, false); in artpec6_crypto_prepare_hash()
1386 error = artpec6_crypto_setup_in_descr(common, ac->pad_buffer, 4, false); in artpec6_crypto_prepare_hash()
1392 req_ctx->digeststate, in artpec6_crypto_prepare_hash()
1399 if (req_ctx->hash_flags & HASH_FLAG_UPDATE) { in artpec6_crypto_prepare_hash()
1401 size_t total_bytes = areq->nbytes + req_ctx->partial_bytes; in artpec6_crypto_prepare_hash()
1406 if (req_ctx->partial_bytes && ready_bytes) { in artpec6_crypto_prepare_hash()
1411 memcpy(req_ctx->partial_buffer_out, in artpec6_crypto_prepare_hash()
1412 req_ctx->partial_buffer, in artpec6_crypto_prepare_hash()
1413 req_ctx->partial_bytes); in artpec6_crypto_prepare_hash()
1416 req_ctx->partial_buffer_out, in artpec6_crypto_prepare_hash()
1417 req_ctx->partial_bytes, in artpec6_crypto_prepare_hash()
1423 done_bytes += req_ctx->partial_bytes; in artpec6_crypto_prepare_hash()
1424 req_ctx->partial_bytes = 0; in artpec6_crypto_prepare_hash()
1427 artpec6_crypto_walk_init(&walk, areq->src); in artpec6_crypto_prepare_hash()
1430 ready_bytes - in artpec6_crypto_prepare_hash()
1436 size_t sg_skip = ready_bytes - done_bytes; in artpec6_crypto_prepare_hash()
1437 size_t sg_rem = areq->nbytes - sg_skip; in artpec6_crypto_prepare_hash()
1439 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), in artpec6_crypto_prepare_hash()
1440 req_ctx->partial_buffer + in artpec6_crypto_prepare_hash()
1441 req_ctx->partial_bytes, in artpec6_crypto_prepare_hash()
1444 req_ctx->partial_bytes += sg_rem; in artpec6_crypto_prepare_hash()
1447 req_ctx->digcnt += ready_bytes; in artpec6_crypto_prepare_hash()
1448 req_ctx->hash_flags &= ~(HASH_FLAG_UPDATE); in artpec6_crypto_prepare_hash()
1452 if (req_ctx->hash_flags & HASH_FLAG_FINALIZE) { in artpec6_crypto_prepare_hash()
1458 oper = FIELD_GET(A6_CRY_MD_OPER, req_ctx->hash_md); in artpec6_crypto_prepare_hash()
1460 oper = FIELD_GET(A7_CRY_MD_OPER, req_ctx->hash_md); in artpec6_crypto_prepare_hash()
1463 if (req_ctx->partial_bytes) { in artpec6_crypto_prepare_hash()
1464 memcpy(req_ctx->partial_buffer_out, in artpec6_crypto_prepare_hash()
1465 req_ctx->partial_buffer, in artpec6_crypto_prepare_hash()
1466 req_ctx->partial_bytes); in artpec6_crypto_prepare_hash()
1468 req_ctx->partial_buffer_out, in artpec6_crypto_prepare_hash()
1469 req_ctx->partial_bytes, in artpec6_crypto_prepare_hash()
1474 req_ctx->digcnt += req_ctx->partial_bytes; in artpec6_crypto_prepare_hash()
1475 req_ctx->partial_bytes = 0; in artpec6_crypto_prepare_hash()
1478 if (req_ctx->hash_flags & HASH_FLAG_HMAC) in artpec6_crypto_prepare_hash()
1479 digest_bits = 8 * (req_ctx->digcnt + blocksize); in artpec6_crypto_prepare_hash()
1481 digest_bits = 8 * req_ctx->digcnt; in artpec6_crypto_prepare_hash()
1484 hash_pad_len = create_hash_pad(oper, req_ctx->pad_buffer, in artpec6_crypto_prepare_hash()
1485 req_ctx->digcnt, digest_bits); in artpec6_crypto_prepare_hash()
1487 req_ctx->pad_buffer, in artpec6_crypto_prepare_hash()
1490 req_ctx->digcnt = 0; in artpec6_crypto_prepare_hash()
1495 /* Descriptor for the final result */ in artpec6_crypto_prepare_hash()
1496 error = artpec6_crypto_setup_in_descr(common, areq->result, in artpec6_crypto_prepare_hash()
1502 } else { /* This is not the final operation for this request */ in artpec6_crypto_prepare_hash()
1508 req_ctx->digeststate, in artpec6_crypto_prepare_hash()
1515 req_ctx->hash_flags &= ~(HASH_FLAG_INIT_CTX | HASH_FLAG_UPDATE | in artpec6_crypto_prepare_hash()
1540 ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_ECB; in artpec6_crypto_aes_ecb_init()
1549 ctx->fallback = in artpec6_crypto_aes_ctr_init()
1550 crypto_alloc_sync_skcipher(crypto_tfm_alg_name(&tfm->base), in artpec6_crypto_aes_ctr_init()
1552 if (IS_ERR(ctx->fallback)) in artpec6_crypto_aes_ctr_init()
1553 return PTR_ERR(ctx->fallback); in artpec6_crypto_aes_ctr_init()
1557 ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_CTR; in artpec6_crypto_aes_ctr_init()
1568 ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_CBC; in artpec6_crypto_aes_cbc_init()
1579 ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_XTS; in artpec6_crypto_aes_xts_init()
1595 crypto_free_sync_skcipher(ctx->fallback); in artpec6_crypto_aes_ctr_exit()
1612 return -EINVAL; in artpec6_crypto_cipher_set_key()
1615 memcpy(ctx->aes_key, key, keylen); in artpec6_crypto_cipher_set_key()
1616 ctx->key_length = keylen; in artpec6_crypto_cipher_set_key()
1638 return -EINVAL; in artpec6_crypto_xts_set_key()
1641 memcpy(ctx->aes_key, key, keylen); in artpec6_crypto_xts_set_key()
1642 ctx->key_length = keylen; in artpec6_crypto_xts_set_key()
1646 /** artpec6_crypto_process_crypto - Prepare an async block cipher crypto request
1653 * This function sets up the PDMA descriptors for a block cipher request.
1655 * The required padding is added for AES-CTR using a statically defined
1660 * OUT: [KEY_MD][KEY][EOP]<CIPHER_MD>[IV]<data_0>...[data_n][AES-CTR_pad]<eop>
1673 enum artpec6_crypto_variant variant = ac->variant; in artpec6_crypto_prepare_crypto()
1677 u32 cipher_len = 0; /* Same as regk_crypto_key_128 for NULL crypto */ in artpec6_crypto_prepare_crypto()
1681 common = &req_ctx->common; in artpec6_crypto_prepare_crypto()
1686 ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER, a6_regk_crypto_dlkey); in artpec6_crypto_prepare_crypto()
1688 ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER, a7_regk_crypto_dlkey); in artpec6_crypto_prepare_crypto()
1690 ret = artpec6_crypto_setup_out_descr(common, (void *)&ctx->key_md, in artpec6_crypto_prepare_crypto()
1691 sizeof(ctx->key_md), false, false); in artpec6_crypto_prepare_crypto()
1695 ret = artpec6_crypto_setup_out_descr(common, ctx->aes_key, in artpec6_crypto_prepare_crypto()
1696 ctx->key_length, true, false); in artpec6_crypto_prepare_crypto()
1700 req_ctx->cipher_md = 0; in artpec6_crypto_prepare_crypto()
1702 if (ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_XTS) in artpec6_crypto_prepare_crypto()
1703 cipher_klen = ctx->key_length/2; in artpec6_crypto_prepare_crypto()
1705 cipher_klen = ctx->key_length; in artpec6_crypto_prepare_crypto()
1720 MODULE_NAME, ctx->key_length); in artpec6_crypto_prepare_crypto()
1721 return -EINVAL; in artpec6_crypto_prepare_crypto()
1724 switch (ctx->crypto_type) { in artpec6_crypto_prepare_crypto()
1727 cipher_decr = req_ctx->decrypt; in artpec6_crypto_prepare_crypto()
1732 cipher_decr = req_ctx->decrypt; in artpec6_crypto_prepare_crypto()
1742 cipher_decr = req_ctx->decrypt; in artpec6_crypto_prepare_crypto()
1745 req_ctx->cipher_md |= A6_CRY_MD_CIPHER_DSEQ; in artpec6_crypto_prepare_crypto()
1747 req_ctx->cipher_md |= A7_CRY_MD_CIPHER_DSEQ; in artpec6_crypto_prepare_crypto()
1752 MODULE_NAME, ctx->crypto_type); in artpec6_crypto_prepare_crypto()
1753 return -EINVAL; in artpec6_crypto_prepare_crypto()
1757 req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_OPER, oper); in artpec6_crypto_prepare_crypto()
1758 req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_CIPHER_LEN, in artpec6_crypto_prepare_crypto()
1761 req_ctx->cipher_md |= A6_CRY_MD_CIPHER_DECR; in artpec6_crypto_prepare_crypto()
1763 req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_OPER, oper); in artpec6_crypto_prepare_crypto()
1764 req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_CIPHER_LEN, in artpec6_crypto_prepare_crypto()
1767 req_ctx->cipher_md |= A7_CRY_MD_CIPHER_DECR; in artpec6_crypto_prepare_crypto()
1771 &req_ctx->cipher_md, in artpec6_crypto_prepare_crypto()
1772 sizeof(req_ctx->cipher_md), in artpec6_crypto_prepare_crypto()
1777 ret = artpec6_crypto_setup_in_descr(common, ac->pad_buffer, 4, false); in artpec6_crypto_prepare_crypto()
1782 ret = artpec6_crypto_setup_out_descr(common, areq->iv, iv_len, in artpec6_crypto_prepare_crypto()
1788 artpec6_crypto_walk_init(&walk, areq->src); in artpec6_crypto_prepare_crypto()
1789 ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, areq->cryptlen); in artpec6_crypto_prepare_crypto()
1794 artpec6_crypto_walk_init(&walk, areq->dst); in artpec6_crypto_prepare_crypto()
1795 ret = artpec6_crypto_setup_sg_descrs_in(common, &walk, areq->cryptlen); in artpec6_crypto_prepare_crypto()
1799 /* CTR-mode padding required by the HW. */ in artpec6_crypto_prepare_crypto()
1800 if (ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_CTR || in artpec6_crypto_prepare_crypto()
1801 ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_XTS) { in artpec6_crypto_prepare_crypto()
1802 size_t pad = ALIGN(areq->cryptlen, AES_BLOCK_SIZE) - in artpec6_crypto_prepare_crypto()
1803 areq->cryptlen; in artpec6_crypto_prepare_crypto()
1807 ac->pad_buffer, in artpec6_crypto_prepare_crypto()
1813 ac->pad_buffer, pad, in artpec6_crypto_prepare_crypto()
1836 struct artpec6_cryptotfm_context *ctx = crypto_tfm_ctx(areq->base.tfm); in artpec6_crypto_prepare_aead()
1839 struct artpec6_crypto_req_common *common = &req_ctx->common; in artpec6_crypto_prepare_aead()
1841 enum artpec6_crypto_variant variant = ac->variant; in artpec6_crypto_prepare_aead()
1848 ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER, in artpec6_crypto_prepare_aead()
1851 ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER, in artpec6_crypto_prepare_aead()
1854 ret = artpec6_crypto_setup_out_descr(common, (void *)&ctx->key_md, in artpec6_crypto_prepare_aead()
1855 sizeof(ctx->key_md), false, false); in artpec6_crypto_prepare_aead()
1859 ret = artpec6_crypto_setup_out_descr(common, ctx->aes_key, in artpec6_crypto_prepare_aead()
1860 ctx->key_length, true, false); in artpec6_crypto_prepare_aead()
1864 req_ctx->cipher_md = 0; in artpec6_crypto_prepare_aead()
1866 switch (ctx->key_length) { in artpec6_crypto_prepare_aead()
1877 return -EINVAL; in artpec6_crypto_prepare_aead()
1881 req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_OPER, in artpec6_crypto_prepare_aead()
1883 req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_CIPHER_LEN, in artpec6_crypto_prepare_aead()
1885 if (req_ctx->decrypt) in artpec6_crypto_prepare_aead()
1886 req_ctx->cipher_md |= A6_CRY_MD_CIPHER_DECR; in artpec6_crypto_prepare_aead()
1888 req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_OPER, in artpec6_crypto_prepare_aead()
1890 req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_CIPHER_LEN, in artpec6_crypto_prepare_aead()
1892 if (req_ctx->decrypt) in artpec6_crypto_prepare_aead()
1893 req_ctx->cipher_md |= A7_CRY_MD_CIPHER_DECR; in artpec6_crypto_prepare_aead()
1897 (void *) &req_ctx->cipher_md, in artpec6_crypto_prepare_aead()
1898 sizeof(req_ctx->cipher_md), false, in artpec6_crypto_prepare_aead()
1903 ret = artpec6_crypto_setup_in_descr(common, ac->pad_buffer, 4, false); in artpec6_crypto_prepare_aead()
1907 /* For the decryption, cryptlen includes the tag. */ in artpec6_crypto_prepare_aead()
1908 input_length = areq->cryptlen; in artpec6_crypto_prepare_aead()
1909 if (req_ctx->decrypt) in artpec6_crypto_prepare_aead()
1910 input_length -= crypto_aead_authsize(cipher); in artpec6_crypto_prepare_aead()
1913 req_ctx->hw_ctx.aad_length_bits = in artpec6_crypto_prepare_aead()
1914 __cpu_to_be64(8*areq->assoclen); in artpec6_crypto_prepare_aead()
1916 req_ctx->hw_ctx.text_length_bits = in artpec6_crypto_prepare_aead()
1919 memcpy(req_ctx->hw_ctx.J0, areq->iv, crypto_aead_ivsize(cipher)); in artpec6_crypto_prepare_aead()
1921 memcpy(req_ctx->hw_ctx.J0 + GCM_AES_IV_SIZE, "\x00\x00\x00\x01", 4); in artpec6_crypto_prepare_aead()
1923 ret = artpec6_crypto_setup_out_descr(common, &req_ctx->hw_ctx, in artpec6_crypto_prepare_aead()
1931 artpec6_crypto_walk_init(&walk, areq->src); in artpec6_crypto_prepare_aead()
1934 count = areq->assoclen; in artpec6_crypto_prepare_aead()
1939 if (!IS_ALIGNED(areq->assoclen, 16)) { in artpec6_crypto_prepare_aead()
1940 size_t assoc_pad = 16 - (areq->assoclen % 16); in artpec6_crypto_prepare_aead()
1943 ac->zero_buffer, in artpec6_crypto_prepare_aead()
1957 size_t crypto_pad = 16 - (input_length % 16); in artpec6_crypto_prepare_aead()
1960 ac->zero_buffer, in artpec6_crypto_prepare_aead()
1972 size_t output_len = areq->cryptlen; in artpec6_crypto_prepare_aead()
1974 if (req_ctx->decrypt) in artpec6_crypto_prepare_aead()
1975 output_len -= crypto_aead_authsize(cipher); in artpec6_crypto_prepare_aead()
1977 artpec6_crypto_walk_init(&walk, areq->dst); in artpec6_crypto_prepare_aead()
1980 count = artpec6_crypto_walk_advance(&walk, areq->assoclen); in artpec6_crypto_prepare_aead()
1982 return -EINVAL; in artpec6_crypto_prepare_aead()
1991 size_t crypto_pad = 16 - (output_len % 16); in artpec6_crypto_prepare_aead()
1994 ac->pad_buffer, in artpec6_crypto_prepare_aead()
2001 * the output ciphertext. For decryption it is put in a context in artpec6_crypto_prepare_aead()
2002 * buffer for later compare against the input tag. in artpec6_crypto_prepare_aead()
2005 if (req_ctx->decrypt) { in artpec6_crypto_prepare_aead()
2007 req_ctx->decryption_tag, AES_BLOCK_SIZE, false); in artpec6_crypto_prepare_aead()
2012 /* For encryption the requested tag size may be smaller in artpec6_crypto_prepare_aead()
2013 * than the hardware's generated tag. in artpec6_crypto_prepare_aead()
2023 count = AES_BLOCK_SIZE - authsize; in artpec6_crypto_prepare_aead()
2025 ac->pad_buffer, in artpec6_crypto_prepare_aead()
2050 while (!list_empty(&ac->queue) && !artpec6_crypto_busy()) { in artpec6_crypto_process_queue()
2051 req = list_first_entry(&ac->queue, in artpec6_crypto_process_queue()
2054 list_move_tail(&req->list, &ac->pending); in artpec6_crypto_process_queue()
2057 list_add_tail(&req->complete_in_progress, completions); in artpec6_crypto_process_queue()
2061 * In some cases, the hardware can raise an in_eop_flush interrupt in artpec6_crypto_process_queue()
2067 if (ac->pending_count) in artpec6_crypto_process_queue()
2068 mod_timer(&ac->timer, jiffies + msecs_to_jiffies(100)); in artpec6_crypto_process_queue()
2070 timer_delete(&ac->timer); in artpec6_crypto_process_queue()
2079 tasklet_schedule(&ac->task); in artpec6_crypto_timeout()
2093 if (list_empty(&ac->pending)) { in artpec6_crypto_task()
2098 spin_lock(&ac->queue_lock); in artpec6_crypto_task()
2100 list_for_each_entry_safe(req, n, &ac->pending, list) { in artpec6_crypto_task()
2101 struct artpec6_crypto_dma_descriptors *dma = req->dma; in artpec6_crypto_task()
2105 stataddr = dma->stat_dma_addr + 4 * (req->dma->in_cnt - 1); in artpec6_crypto_task()
2111 stat = req->dma->stat[req->dma->in_cnt-1]; in artpec6_crypto_task()
2113 /* A non-zero final status descriptor indicates in artpec6_crypto_task()
2128 list_move_tail(&req->list, &complete_done); in artpec6_crypto_task()
2130 ac->pending_count--; in artpec6_crypto_task()
2135 spin_unlock(&ac->queue_lock); in artpec6_crypto_task()
2145 req->complete(req->req); in artpec6_crypto_task()
2150 crypto_request_complete(req->req, -EINPROGRESS); in artpec6_crypto_task()
2165 scatterwalk_map_and_copy(cipher_req->iv, cipher_req->src, in artpec6_crypto_complete_cbc_decrypt()
2166 cipher_req->cryptlen - AES_BLOCK_SIZE, in artpec6_crypto_complete_cbc_decrypt()
2177 scatterwalk_map_and_copy(cipher_req->iv, cipher_req->dst, in artpec6_crypto_complete_cbc_encrypt()
2178 cipher_req->cryptlen - AES_BLOCK_SIZE, in artpec6_crypto_complete_cbc_encrypt()
2193 if (req_ctx->decrypt) { in artpec6_crypto_complete_aead()
2197 sg_pcopy_to_buffer(areq->src, in artpec6_crypto_complete_aead()
2198 sg_nents(areq->src), in artpec6_crypto_complete_aead()
2201 areq->assoclen + areq->cryptlen - in artpec6_crypto_complete_aead()
2204 if (crypto_memneq(req_ctx->decryption_tag, in artpec6_crypto_complete_aead()
2211 req_ctx->decryption_tag, in artpec6_crypto_complete_aead()
2214 result = -EBADMSG; in artpec6_crypto_complete_aead()
2227 /*------------------- Hash functions -----------------------------------------*/
2232 struct artpec6_hashalg_context *tfm_ctx = crypto_tfm_ctx(&tfm->base); in artpec6_crypto_hash_set_key()
2239 return -EINVAL; in artpec6_crypto_hash_set_key()
2242 memset(tfm_ctx->hmac_key, 0, sizeof(tfm_ctx->hmac_key)); in artpec6_crypto_hash_set_key()
2247 tfm_ctx->hmac_key_length = blocksize; in artpec6_crypto_hash_set_key()
2249 ret = crypto_shash_tfm_digest(tfm_ctx->child_hash, key, keylen, in artpec6_crypto_hash_set_key()
2250 tfm_ctx->hmac_key); in artpec6_crypto_hash_set_key()
2254 memcpy(tfm_ctx->hmac_key, key, keylen); in artpec6_crypto_hash_set_key()
2255 tfm_ctx->hmac_key_length = keylen; in artpec6_crypto_hash_set_key()
2265 enum artpec6_crypto_variant variant = ac->variant; in artpec6_crypto_init_hash()
2271 req_ctx->hash_flags = HASH_FLAG_INIT_CTX; in artpec6_crypto_init_hash()
2273 req_ctx->hash_flags |= (HASH_FLAG_HMAC | HASH_FLAG_UPDATE_KEY); in artpec6_crypto_init_hash()
2284 return -EINVAL; in artpec6_crypto_init_hash()
2288 req_ctx->hash_md = FIELD_PREP(A6_CRY_MD_OPER, oper); in artpec6_crypto_init_hash()
2290 req_ctx->hash_md = FIELD_PREP(A7_CRY_MD_OPER, oper); in artpec6_crypto_init_hash()
2300 if (!req_ctx->common.dma) { in artpec6_crypto_prepare_submit_hash()
2301 ret = artpec6_crypto_common_init(&req_ctx->common, in artpec6_crypto_prepare_submit_hash()
2302 &req->base, in artpec6_crypto_prepare_submit_hash()
2313 ret = artpec6_crypto_submit(&req_ctx->common); in artpec6_crypto_prepare_submit_hash()
2321 artpec6_crypto_common_destroy(&req_ctx->common); in artpec6_crypto_prepare_submit_hash()
2332 req_ctx->hash_flags |= HASH_FLAG_FINALIZE; in artpec6_crypto_hash_final()
2341 req_ctx->hash_flags |= HASH_FLAG_UPDATE; in artpec6_crypto_hash_update()
2357 req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE; in artpec6_crypto_sha1_digest()
2372 req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE; in artpec6_crypto_sha256_digest()
2387 req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE; in artpec6_crypto_hmac_sha256_digest()
2410 tfm_ctx->child_hash = child; in artpec6_crypto_ahash_init_common()
2430 if (tfm_ctx->child_hash) in artpec6_crypto_ahash_exit()
2431 crypto_free_shash(tfm_ctx->child_hash); in artpec6_crypto_ahash_exit()
2433 memset(tfm_ctx->hmac_key, 0, sizeof(tfm_ctx->hmac_key)); in artpec6_crypto_ahash_exit()
2434 tfm_ctx->hmac_key_length = 0; in artpec6_crypto_ahash_exit()
2442 enum artpec6_crypto_variant variant = ac->variant; in artpec6_crypto_hash_export()
2444 BUILD_BUG_ON(sizeof(state->partial_buffer) != in artpec6_crypto_hash_export()
2445 sizeof(ctx->partial_buffer)); in artpec6_crypto_hash_export()
2446 BUILD_BUG_ON(sizeof(state->digeststate) != sizeof(ctx->digeststate)); in artpec6_crypto_hash_export()
2448 state->digcnt = ctx->digcnt; in artpec6_crypto_hash_export()
2449 state->partial_bytes = ctx->partial_bytes; in artpec6_crypto_hash_export()
2450 state->hash_flags = ctx->hash_flags; in artpec6_crypto_hash_export()
2453 state->oper = FIELD_GET(A6_CRY_MD_OPER, ctx->hash_md); in artpec6_crypto_hash_export()
2455 state->oper = FIELD_GET(A7_CRY_MD_OPER, ctx->hash_md); in artpec6_crypto_hash_export()
2457 memcpy(state->partial_buffer, ctx->partial_buffer, in artpec6_crypto_hash_export()
2458 sizeof(state->partial_buffer)); in artpec6_crypto_hash_export()
2459 memcpy(state->digeststate, ctx->digeststate, in artpec6_crypto_hash_export()
2460 sizeof(state->digeststate)); in artpec6_crypto_hash_export()
2470 enum artpec6_crypto_variant variant = ac->variant; in artpec6_crypto_hash_import()
2474 ctx->digcnt = state->digcnt; in artpec6_crypto_hash_import()
2475 ctx->partial_bytes = state->partial_bytes; in artpec6_crypto_hash_import()
2476 ctx->hash_flags = state->hash_flags; in artpec6_crypto_hash_import()
2479 ctx->hash_md = FIELD_PREP(A6_CRY_MD_OPER, state->oper); in artpec6_crypto_hash_import()
2481 ctx->hash_md = FIELD_PREP(A7_CRY_MD_OPER, state->oper); in artpec6_crypto_hash_import()
2483 memcpy(ctx->partial_buffer, state->partial_buffer, in artpec6_crypto_hash_import()
2484 sizeof(state->partial_buffer)); in artpec6_crypto_hash_import()
2485 memcpy(ctx->digeststate, state->digeststate, in artpec6_crypto_hash_import()
2486 sizeof(state->digeststate)); in artpec6_crypto_hash_import()
2493 enum artpec6_crypto_variant variant = ac->variant; in init_crypto_hw()
2494 void __iomem *base = ac->base; in init_crypto_hw()
2503 * The PDMA unit contains 1984 bytes of internal memory for the OUT in init_crypto_hw()
2504 * channels and 1024 bytes for the IN channel. This is an elastic in init_crypto_hw()
2509 out_data_buf_size = 16; /* 1024 bytes for data */ in init_crypto_hw()
2510 out_descr_buf_size = 15; /* 960 bytes for descriptors */ in init_crypto_hw()
2511 in_data_buf_size = 8; /* 512 bytes for data */ in init_crypto_hw()
2512 in_descr_buf_size = 4; /* 256 bytes for descriptors */ in init_crypto_hw()
2513 in_stat_buf_size = 4; /* 256 bytes for stat descrs */ in init_crypto_hw()
2553 enum artpec6_crypto_variant variant = ac->variant; in artpec6_crypto_disable_hw()
2554 void __iomem *base = ac->base; in artpec6_crypto_disable_hw()
2573 enum artpec6_crypto_variant variant = ac->variant; in artpec6_crypto_irq()
2574 void __iomem *base = ac->base; in artpec6_crypto_irq()
2599 * we request a status flush command to write the per-job in artpec6_crypto_irq()
2601 * tasklet can detect exactly how many submitted jobs in artpec6_crypto_irq()
2615 tasklet_schedule(&ac->task); in artpec6_crypto_irq()
2620 /*------------------- Algorithm definitions ----------------------------------*/
2624 /* SHA-1 */
2636 .cra_driver_name = "artpec-sha1",
2647 /* SHA-256 */
2659 .cra_driver_name = "artpec-sha256",
2670 /* HMAC SHA-256 */
2683 .cra_driver_name = "artpec-hmac-sha256",
2698 /* AES - ECB */
2702 .cra_driver_name = "artpec6-ecb-aes",
2719 /* AES - CTR */
2723 .cra_driver_name = "artpec6-ctr-aes",
2742 /* AES - CBC */
2746 .cra_driver_name = "artpec6-cbc-aes",
2764 /* AES - XTS */
2768 .cra_driver_name = "artpec6-xts-aes",
2799 .cra_driver_name = "artpec-gcm-aes",
2837 { .compatible = "axis,artpec6-crypto", .data = (void *)ARTPEC6_CRYPTO },
2838 { .compatible = "axis,artpec7-crypto", .data = (void *)ARTPEC7_CRYPTO },
2848 struct device *dev = &pdev->dev; in artpec6_crypto_probe()
2854 return -ENODEV; in artpec6_crypto_probe()
2856 match = of_match_node(artpec6_crypto_of_match, dev->of_node); in artpec6_crypto_probe()
2858 return -EINVAL; in artpec6_crypto_probe()
2860 variant = (enum artpec6_crypto_variant)match->data; in artpec6_crypto_probe()
2868 return -ENODEV; in artpec6_crypto_probe()
2870 ac = devm_kzalloc(&pdev->dev, sizeof(struct artpec6_crypto), in artpec6_crypto_probe()
2873 return -ENOMEM; in artpec6_crypto_probe()
2876 ac->variant = variant; in artpec6_crypto_probe()
2878 spin_lock_init(&ac->queue_lock); in artpec6_crypto_probe()
2879 INIT_LIST_HEAD(&ac->queue); in artpec6_crypto_probe()
2880 INIT_LIST_HEAD(&ac->pending); in artpec6_crypto_probe()
2881 timer_setup(&ac->timer, artpec6_crypto_timeout, 0); in artpec6_crypto_probe()
2883 ac->base = base; in artpec6_crypto_probe()
2885 ac->dma_cache = kmem_cache_create("artpec6_crypto_dma", in artpec6_crypto_probe()
2890 if (!ac->dma_cache) in artpec6_crypto_probe()
2891 return -ENOMEM; in artpec6_crypto_probe()
2897 tasklet_init(&ac->task, artpec6_crypto_task, in artpec6_crypto_probe()
2900 ac->pad_buffer = devm_kcalloc(&pdev->dev, 2, ARTPEC_CACHE_LINE_MAX, in artpec6_crypto_probe()
2902 if (!ac->pad_buffer) in artpec6_crypto_probe()
2903 return -ENOMEM; in artpec6_crypto_probe()
2904 ac->pad_buffer = PTR_ALIGN(ac->pad_buffer, ARTPEC_CACHE_LINE_MAX); in artpec6_crypto_probe()
2906 ac->zero_buffer = devm_kcalloc(&pdev->dev, 2, ARTPEC_CACHE_LINE_MAX, in artpec6_crypto_probe()
2908 if (!ac->zero_buffer) in artpec6_crypto_probe()
2909 return -ENOMEM; in artpec6_crypto_probe()
2910 ac->zero_buffer = PTR_ALIGN(ac->zero_buffer, ARTPEC_CACHE_LINE_MAX); in artpec6_crypto_probe()
2916 err = devm_request_irq(&pdev->dev, irq, artpec6_crypto_irq, 0, in artpec6_crypto_probe()
2917 "artpec6-crypto", ac); in artpec6_crypto_probe()
2921 artpec6_crypto_dev = &pdev->dev; in artpec6_crypto_probe()
2950 kmem_cache_destroy(ac->dma_cache); in artpec6_crypto_probe()
2963 tasklet_disable(&ac->task); in artpec6_crypto_remove()
2964 devm_free_irq(&pdev->dev, irq, ac); in artpec6_crypto_remove()
2965 tasklet_kill(&ac->task); in artpec6_crypto_remove()
2966 timer_delete_sync(&ac->timer); in artpec6_crypto_remove()
2970 kmem_cache_destroy(ac->dma_cache); in artpec6_crypto_remove()
2980 .name = "artpec6-crypto",
2988 MODULE_DESCRIPTION("ARTPEC-6 Crypto driver");