Lines Matching +full:op +full:- +full:mode
1 // SPDX-License-Identifier: GPL-2.0-only
5 * Copyright (C) 2011-2012 International Business Machines Inc.
33 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in nx_xcbc_set_key()
37 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; in nx_xcbc_set_key()
40 return -EINVAL; in nx_xcbc_set_key()
43 memcpy(csbcpb->cpb.aes_xcbc.key, in_key, key_len); in nx_xcbc_set_key()
49 * Based on RFC 3566, for a zero-length message:
61 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); in nx_xcbc_empty()
62 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in nx_xcbc_empty()
69 /* Change to ECB mode */ in nx_xcbc_empty()
70 csbcpb->cpb.hdr.mode = NX_MODE_AES_ECB; in nx_xcbc_empty()
71 memcpy(key, csbcpb->cpb.aes_xcbc.key, AES_BLOCK_SIZE); in nx_xcbc_empty()
72 memcpy(csbcpb->cpb.aes_ecb.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty()
81 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys, &len, in nx_xcbc_empty()
82 nx_ctx->ap->sglen); in nx_xcbc_empty()
85 return -EINVAL; in nx_xcbc_empty()
87 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) keys, &len, in nx_xcbc_empty()
88 nx_ctx->ap->sglen); in nx_xcbc_empty()
91 return -EINVAL; in nx_xcbc_empty()
93 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
94 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
96 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_xcbc_empty()
99 atomic_inc(&(nx_ctx->stats->aes_ops)); in nx_xcbc_empty()
107 memcpy(csbcpb->cpb.aes_ecb.key, keys[0], AES_BLOCK_SIZE); in nx_xcbc_empty()
108 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys[1], &len, in nx_xcbc_empty()
109 nx_ctx->ap->sglen); in nx_xcbc_empty()
112 return -EINVAL; in nx_xcbc_empty()
115 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_xcbc_empty()
116 nx_ctx->ap->sglen); in nx_xcbc_empty()
119 return -EINVAL; in nx_xcbc_empty()
121 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
122 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
124 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_xcbc_empty()
127 atomic_inc(&(nx_ctx->stats->aes_ops)); in nx_xcbc_empty()
130 /* Restore XCBC mode */ in nx_xcbc_empty()
131 csbcpb->cpb.hdr.mode = NX_MODE_AES_XCBC_MAC; in nx_xcbc_empty()
132 memcpy(csbcpb->cpb.aes_xcbc.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty()
141 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in nx_crypto_ctx_aes_xcbc_init2()
151 csbcpb->cpb.hdr.mode = NX_MODE_AES_XCBC_MAC; in nx_crypto_ctx_aes_xcbc_init2()
170 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); in nx_xcbc_update()
171 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in nx_xcbc_update()
180 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in nx_xcbc_update()
183 total = sctx->count + len; in nx_xcbc_update()
190 memcpy(sctx->buffer + sctx->count, data, len); in nx_xcbc_update()
191 sctx->count += len; in nx_xcbc_update()
195 in_sg = nx_ctx->in_sg; in nx_xcbc_update()
197 nx_ctx->ap->sglen); in nx_xcbc_update()
199 nx_ctx->ap->databytelen/NX_PAGE_SIZE); in nx_xcbc_update()
202 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, in nx_xcbc_update()
203 &len, nx_ctx->ap->sglen); in nx_xcbc_update()
206 rc = -EINVAL; in nx_xcbc_update()
210 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_update()
213 to_process = total - to_process; in nx_xcbc_update()
214 to_process = to_process & ~(AES_BLOCK_SIZE - 1); in nx_xcbc_update()
216 leftover = total - to_process; in nx_xcbc_update()
224 to_process -= AES_BLOCK_SIZE; in nx_xcbc_update()
228 if (sctx->count) { in nx_xcbc_update()
229 data_len = sctx->count; in nx_xcbc_update()
230 in_sg = nx_build_sg_list(nx_ctx->in_sg, in nx_xcbc_update()
231 (u8 *) sctx->buffer, in nx_xcbc_update()
234 if (data_len != sctx->count) { in nx_xcbc_update()
235 rc = -EINVAL; in nx_xcbc_update()
240 data_len = to_process - sctx->count; in nx_xcbc_update()
246 if (data_len != to_process - sctx->count) { in nx_xcbc_update()
247 rc = -EINVAL; in nx_xcbc_update()
251 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * in nx_xcbc_update()
257 memcpy(csbcpb->cpb.aes_xcbc.cv, in nx_xcbc_update()
258 csbcpb->cpb.aes_xcbc.out_cv_mac, in nx_xcbc_update()
263 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { in nx_xcbc_update()
264 rc = -EINVAL; in nx_xcbc_update()
268 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_xcbc_update()
272 atomic_inc(&(nx_ctx->stats->aes_ops)); in nx_xcbc_update()
277 total -= to_process; in nx_xcbc_update()
278 data += to_process - sctx->count; in nx_xcbc_update()
279 sctx->count = 0; in nx_xcbc_update()
280 in_sg = nx_ctx->in_sg; in nx_xcbc_update()
284 memcpy(sctx->buffer, data, leftover); in nx_xcbc_update()
285 sctx->count = leftover; in nx_xcbc_update()
288 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in nx_xcbc_update()
295 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); in nx_xcbc_final()
296 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in nx_xcbc_final()
302 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in nx_xcbc_final()
307 memcpy(csbcpb->cpb.aes_xcbc.cv, in nx_xcbc_final()
308 csbcpb->cpb.aes_xcbc.out_cv_mac, AES_BLOCK_SIZE); in nx_xcbc_final()
309 } else if (sctx->count == 0) { in nx_xcbc_final()
311 * we've never seen an update, so this is a 0 byte op. The in nx_xcbc_final()
312 * hardware cannot handle a 0 byte op, so just ECB to in nx_xcbc_final()
323 len = sctx->count; in nx_xcbc_final()
324 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *)sctx->buffer, in nx_xcbc_final()
325 &len, nx_ctx->ap->sglen); in nx_xcbc_final()
327 if (len != sctx->count) { in nx_xcbc_final()
328 rc = -EINVAL; in nx_xcbc_final()
333 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, in nx_xcbc_final()
334 nx_ctx->ap->sglen); in nx_xcbc_final()
337 rc = -EINVAL; in nx_xcbc_final()
341 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_final()
342 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in nx_xcbc_final()
344 if (!nx_ctx->op.outlen) { in nx_xcbc_final()
345 rc = -EINVAL; in nx_xcbc_final()
349 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 0); in nx_xcbc_final()
353 atomic_inc(&(nx_ctx->stats->aes_ops)); in nx_xcbc_final()
355 memcpy(out, csbcpb->cpb.aes_xcbc.out_cv_mac, AES_BLOCK_SIZE); in nx_xcbc_final()
357 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in nx_xcbc_final()
371 .cra_driver_name = "xcbc-aes-nx",