xref: /linux/drivers/crypto/ti/dthev2-aes.c (revision a619fe35ab41fded440d3762d4fbad84ff86a4d4)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * K3 DTHE V2 crypto accelerator driver
4  *
5  * Copyright (C) Texas Instruments 2025 - https://www.ti.com
6  * Author: T Pratham <t-pratham@ti.com>
7  */
8 
9 #include <crypto/aead.h>
10 #include <crypto/aes.h>
11 #include <crypto/algapi.h>
12 #include <crypto/engine.h>
13 #include <crypto/internal/aead.h>
14 #include <crypto/internal/skcipher.h>
15 
16 #include "dthev2-common.h"
17 
18 #include <linux/delay.h>
19 #include <linux/dmaengine.h>
20 #include <linux/dma-mapping.h>
21 #include <linux/io.h>
22 #include <linux/scatterlist.h>
23 
24 /* Registers */
25 
26 // AES Engine
27 #define DTHE_P_AES_BASE		0x7000
28 
29 #define DTHE_P_AES_KEY1_0	0x0038
30 #define DTHE_P_AES_KEY1_1	0x003C
31 #define DTHE_P_AES_KEY1_2	0x0030
32 #define DTHE_P_AES_KEY1_3	0x0034
33 #define DTHE_P_AES_KEY1_4	0x0028
34 #define DTHE_P_AES_KEY1_5	0x002C
35 #define DTHE_P_AES_KEY1_6	0x0020
36 #define DTHE_P_AES_KEY1_7	0x0024
37 
38 #define DTHE_P_AES_KEY2_0	0x0018
39 #define DTHE_P_AES_KEY2_1	0x001C
40 #define DTHE_P_AES_KEY2_2	0x0010
41 #define DTHE_P_AES_KEY2_3	0x0014
42 #define DTHE_P_AES_KEY2_4	0x0008
43 #define DTHE_P_AES_KEY2_5	0x000C
44 #define DTHE_P_AES_KEY2_6	0x0000
45 #define DTHE_P_AES_KEY2_7	0x0004
46 
47 #define DTHE_P_AES_IV_IN_0	0x0040
48 #define DTHE_P_AES_IV_IN_1	0x0044
49 #define DTHE_P_AES_IV_IN_2	0x0048
50 #define DTHE_P_AES_IV_IN_3	0x004C
51 #define DTHE_P_AES_CTRL		0x0050
52 #define DTHE_P_AES_C_LENGTH_0	0x0054
53 #define DTHE_P_AES_C_LENGTH_1	0x0058
54 #define DTHE_P_AES_AUTH_LENGTH	0x005C
55 #define DTHE_P_AES_DATA_IN_OUT	0x0060
56 
57 #define DTHE_P_AES_SYSCONFIG	0x0084
58 #define DTHE_P_AES_IRQSTATUS	0x008C
59 #define DTHE_P_AES_IRQENABLE	0x0090
60 
61 /* Register write values and macros */
62 
63 enum aes_ctrl_mode_masks {
64 	AES_CTRL_ECB_MASK = 0x00,
65 	AES_CTRL_CBC_MASK = BIT(5),
66 	AES_CTRL_XTS_MASK = BIT(12) | BIT(11),
67 };
68 
69 #define DTHE_AES_CTRL_MODE_CLEAR_MASK		~GENMASK(28, 5)
70 
71 #define DTHE_AES_CTRL_DIR_ENC			BIT(2)
72 
73 #define DTHE_AES_CTRL_KEYSIZE_16B		BIT(3)
74 #define DTHE_AES_CTRL_KEYSIZE_24B		BIT(4)
75 #define DTHE_AES_CTRL_KEYSIZE_32B		(BIT(3) | BIT(4))
76 
77 #define DTHE_AES_CTRL_SAVE_CTX_SET		BIT(29)
78 
79 #define DTHE_AES_CTRL_OUTPUT_READY		BIT_MASK(0)
80 #define DTHE_AES_CTRL_INPUT_READY		BIT_MASK(1)
81 #define DTHE_AES_CTRL_SAVED_CTX_READY		BIT_MASK(30)
82 #define DTHE_AES_CTRL_CTX_READY			BIT_MASK(31)
83 
84 #define DTHE_AES_SYSCONFIG_DMA_DATA_IN_OUT_EN	GENMASK(6, 5)
85 #define DTHE_AES_IRQENABLE_EN_ALL		GENMASK(3, 0)
86 
87 /* Misc */
88 #define AES_IV_SIZE				AES_BLOCK_SIZE
89 #define AES_BLOCK_WORDS				(AES_BLOCK_SIZE / sizeof(u32))
90 #define AES_IV_WORDS				AES_BLOCK_WORDS
91 
dthe_cipher_init_tfm(struct crypto_skcipher * tfm)92 static int dthe_cipher_init_tfm(struct crypto_skcipher *tfm)
93 {
94 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
95 	struct dthe_data *dev_data = dthe_get_dev(ctx);
96 
97 	ctx->dev_data = dev_data;
98 	ctx->keylen = 0;
99 
100 	return 0;
101 }
102 
dthe_cipher_xts_init_tfm(struct crypto_skcipher * tfm)103 static int dthe_cipher_xts_init_tfm(struct crypto_skcipher *tfm)
104 {
105 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
106 	struct dthe_data *dev_data = dthe_get_dev(ctx);
107 
108 	ctx->dev_data = dev_data;
109 	ctx->keylen = 0;
110 
111 	ctx->skcipher_fb = crypto_alloc_sync_skcipher("xts(aes)", 0,
112 						      CRYPTO_ALG_NEED_FALLBACK);
113 	if (IS_ERR(ctx->skcipher_fb)) {
114 		dev_err(dev_data->dev, "fallback driver xts(aes) couldn't be loaded\n");
115 		return PTR_ERR(ctx->skcipher_fb);
116 	}
117 
118 	return 0;
119 }
120 
dthe_cipher_xts_exit_tfm(struct crypto_skcipher * tfm)121 static void dthe_cipher_xts_exit_tfm(struct crypto_skcipher *tfm)
122 {
123 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
124 
125 	crypto_free_sync_skcipher(ctx->skcipher_fb);
126 }
127 
dthe_aes_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)128 static int dthe_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
129 {
130 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
131 
132 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_256)
133 		return -EINVAL;
134 
135 	ctx->keylen = keylen;
136 	memcpy(ctx->key, key, keylen);
137 
138 	return 0;
139 }
140 
dthe_aes_ecb_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)141 static int dthe_aes_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
142 {
143 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
144 
145 	ctx->aes_mode = DTHE_AES_ECB;
146 
147 	return dthe_aes_setkey(tfm, key, keylen);
148 }
149 
dthe_aes_cbc_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)150 static int dthe_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
151 {
152 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
153 
154 	ctx->aes_mode = DTHE_AES_CBC;
155 
156 	return dthe_aes_setkey(tfm, key, keylen);
157 }
158 
dthe_aes_xts_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)159 static int dthe_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
160 {
161 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
162 
163 	if (keylen != 2 * AES_KEYSIZE_128 &&
164 	    keylen != 2 * AES_KEYSIZE_192 &&
165 	    keylen != 2 * AES_KEYSIZE_256)
166 		return -EINVAL;
167 
168 	ctx->aes_mode = DTHE_AES_XTS;
169 	ctx->keylen = keylen / 2;
170 	memcpy(ctx->key, key, keylen);
171 
172 	crypto_sync_skcipher_clear_flags(ctx->skcipher_fb, CRYPTO_TFM_REQ_MASK);
173 	crypto_sync_skcipher_set_flags(ctx->skcipher_fb,
174 				  crypto_skcipher_get_flags(tfm) &
175 				  CRYPTO_TFM_REQ_MASK);
176 
177 	return crypto_sync_skcipher_setkey(ctx->skcipher_fb, key, keylen);
178 }
179 
dthe_aes_set_ctrl_key(struct dthe_tfm_ctx * ctx,struct dthe_aes_req_ctx * rctx,u32 * iv_in)180 static void dthe_aes_set_ctrl_key(struct dthe_tfm_ctx *ctx,
181 				  struct dthe_aes_req_ctx *rctx,
182 				  u32 *iv_in)
183 {
184 	struct dthe_data *dev_data = dthe_get_dev(ctx);
185 	void __iomem *aes_base_reg = dev_data->regs + DTHE_P_AES_BASE;
186 	u32 ctrl_val = 0;
187 
188 	writel_relaxed(ctx->key[0], aes_base_reg + DTHE_P_AES_KEY1_0);
189 	writel_relaxed(ctx->key[1], aes_base_reg + DTHE_P_AES_KEY1_1);
190 	writel_relaxed(ctx->key[2], aes_base_reg + DTHE_P_AES_KEY1_2);
191 	writel_relaxed(ctx->key[3], aes_base_reg + DTHE_P_AES_KEY1_3);
192 
193 	if (ctx->keylen > AES_KEYSIZE_128) {
194 		writel_relaxed(ctx->key[4], aes_base_reg + DTHE_P_AES_KEY1_4);
195 		writel_relaxed(ctx->key[5], aes_base_reg + DTHE_P_AES_KEY1_5);
196 	}
197 	if (ctx->keylen == AES_KEYSIZE_256) {
198 		writel_relaxed(ctx->key[6], aes_base_reg + DTHE_P_AES_KEY1_6);
199 		writel_relaxed(ctx->key[7], aes_base_reg + DTHE_P_AES_KEY1_7);
200 	}
201 
202 	if (ctx->aes_mode == DTHE_AES_XTS) {
203 		size_t key2_offset = ctx->keylen / sizeof(u32);
204 
205 		writel_relaxed(ctx->key[key2_offset + 0], aes_base_reg + DTHE_P_AES_KEY2_0);
206 		writel_relaxed(ctx->key[key2_offset + 1], aes_base_reg + DTHE_P_AES_KEY2_1);
207 		writel_relaxed(ctx->key[key2_offset + 2], aes_base_reg + DTHE_P_AES_KEY2_2);
208 		writel_relaxed(ctx->key[key2_offset + 3], aes_base_reg + DTHE_P_AES_KEY2_3);
209 
210 		if (ctx->keylen > AES_KEYSIZE_128) {
211 			writel_relaxed(ctx->key[key2_offset + 4], aes_base_reg + DTHE_P_AES_KEY2_4);
212 			writel_relaxed(ctx->key[key2_offset + 5], aes_base_reg + DTHE_P_AES_KEY2_5);
213 		}
214 		if (ctx->keylen == AES_KEYSIZE_256) {
215 			writel_relaxed(ctx->key[key2_offset + 6], aes_base_reg + DTHE_P_AES_KEY2_6);
216 			writel_relaxed(ctx->key[key2_offset + 7], aes_base_reg + DTHE_P_AES_KEY2_7);
217 		}
218 	}
219 
220 	if (rctx->enc)
221 		ctrl_val |= DTHE_AES_CTRL_DIR_ENC;
222 
223 	if (ctx->keylen == AES_KEYSIZE_128)
224 		ctrl_val |= DTHE_AES_CTRL_KEYSIZE_16B;
225 	else if (ctx->keylen == AES_KEYSIZE_192)
226 		ctrl_val |= DTHE_AES_CTRL_KEYSIZE_24B;
227 	else
228 		ctrl_val |= DTHE_AES_CTRL_KEYSIZE_32B;
229 
230 	// Write AES mode
231 	ctrl_val &= DTHE_AES_CTRL_MODE_CLEAR_MASK;
232 	switch (ctx->aes_mode) {
233 	case DTHE_AES_ECB:
234 		ctrl_val |= AES_CTRL_ECB_MASK;
235 		break;
236 	case DTHE_AES_CBC:
237 		ctrl_val |= AES_CTRL_CBC_MASK;
238 		break;
239 	case DTHE_AES_XTS:
240 		ctrl_val |= AES_CTRL_XTS_MASK;
241 		break;
242 	}
243 
244 	if (iv_in) {
245 		ctrl_val |= DTHE_AES_CTRL_SAVE_CTX_SET;
246 		for (int i = 0; i < AES_IV_WORDS; ++i)
247 			writel_relaxed(iv_in[i],
248 				       aes_base_reg + DTHE_P_AES_IV_IN_0 + (DTHE_REG_SIZE * i));
249 	}
250 
251 	writel_relaxed(ctrl_val, aes_base_reg + DTHE_P_AES_CTRL);
252 }
253 
dthe_aes_dma_in_callback(void * data)254 static void dthe_aes_dma_in_callback(void *data)
255 {
256 	struct skcipher_request *req = (struct skcipher_request *)data;
257 	struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
258 
259 	complete(&rctx->aes_compl);
260 }
261 
dthe_aes_run(struct crypto_engine * engine,void * areq)262 static int dthe_aes_run(struct crypto_engine *engine, void *areq)
263 {
264 	struct skcipher_request *req = container_of(areq, struct skcipher_request, base);
265 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
266 	struct dthe_data *dev_data = dthe_get_dev(ctx);
267 	struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
268 
269 	unsigned int len = req->cryptlen;
270 	struct scatterlist *src = req->src;
271 	struct scatterlist *dst = req->dst;
272 
273 	int src_nents = sg_nents_for_len(src, len);
274 	int dst_nents;
275 
276 	int src_mapped_nents;
277 	int dst_mapped_nents;
278 
279 	bool diff_dst;
280 	enum dma_data_direction src_dir, dst_dir;
281 
282 	struct device *tx_dev, *rx_dev;
283 	struct dma_async_tx_descriptor *desc_in, *desc_out;
284 
285 	int ret;
286 
287 	void __iomem *aes_base_reg = dev_data->regs + DTHE_P_AES_BASE;
288 
289 	u32 aes_irqenable_val = readl_relaxed(aes_base_reg + DTHE_P_AES_IRQENABLE);
290 	u32 aes_sysconfig_val = readl_relaxed(aes_base_reg + DTHE_P_AES_SYSCONFIG);
291 
292 	aes_sysconfig_val |= DTHE_AES_SYSCONFIG_DMA_DATA_IN_OUT_EN;
293 	writel_relaxed(aes_sysconfig_val, aes_base_reg + DTHE_P_AES_SYSCONFIG);
294 
295 	aes_irqenable_val |= DTHE_AES_IRQENABLE_EN_ALL;
296 	writel_relaxed(aes_irqenable_val, aes_base_reg + DTHE_P_AES_IRQENABLE);
297 
298 	if (src == dst) {
299 		diff_dst = false;
300 		src_dir = DMA_BIDIRECTIONAL;
301 		dst_dir = DMA_BIDIRECTIONAL;
302 	} else {
303 		diff_dst = true;
304 		src_dir = DMA_TO_DEVICE;
305 		dst_dir  = DMA_FROM_DEVICE;
306 	}
307 
308 	tx_dev = dmaengine_get_dma_device(dev_data->dma_aes_tx);
309 	rx_dev = dmaengine_get_dma_device(dev_data->dma_aes_rx);
310 
311 	src_mapped_nents = dma_map_sg(tx_dev, src, src_nents, src_dir);
312 	if (src_mapped_nents == 0) {
313 		ret = -EINVAL;
314 		goto aes_err;
315 	}
316 
317 	if (!diff_dst) {
318 		dst_nents = src_nents;
319 		dst_mapped_nents = src_mapped_nents;
320 	} else {
321 		dst_nents = sg_nents_for_len(dst, len);
322 		dst_mapped_nents = dma_map_sg(rx_dev, dst, dst_nents, dst_dir);
323 		if (dst_mapped_nents == 0) {
324 			dma_unmap_sg(tx_dev, src, src_nents, src_dir);
325 			ret = -EINVAL;
326 			goto aes_err;
327 		}
328 	}
329 
330 	desc_in = dmaengine_prep_slave_sg(dev_data->dma_aes_rx, dst, dst_mapped_nents,
331 					  DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
332 	if (!desc_in) {
333 		dev_err(dev_data->dev, "IN prep_slave_sg() failed\n");
334 		ret = -EINVAL;
335 		goto aes_prep_err;
336 	}
337 
338 	desc_out = dmaengine_prep_slave_sg(dev_data->dma_aes_tx, src, src_mapped_nents,
339 					   DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
340 	if (!desc_out) {
341 		dev_err(dev_data->dev, "OUT prep_slave_sg() failed\n");
342 		ret = -EINVAL;
343 		goto aes_prep_err;
344 	}
345 
346 	desc_in->callback = dthe_aes_dma_in_callback;
347 	desc_in->callback_param = req;
348 
349 	init_completion(&rctx->aes_compl);
350 
351 	if (ctx->aes_mode == DTHE_AES_ECB)
352 		dthe_aes_set_ctrl_key(ctx, rctx, NULL);
353 	else
354 		dthe_aes_set_ctrl_key(ctx, rctx, (u32 *)req->iv);
355 
356 	writel_relaxed(lower_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_0);
357 	writel_relaxed(upper_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_1);
358 
359 	dmaengine_submit(desc_in);
360 	dmaengine_submit(desc_out);
361 
362 	dma_async_issue_pending(dev_data->dma_aes_rx);
363 	dma_async_issue_pending(dev_data->dma_aes_tx);
364 
365 	// Need to do a timeout to ensure finalise gets called if DMA callback fails for any reason
366 	ret = wait_for_completion_timeout(&rctx->aes_compl, msecs_to_jiffies(DTHE_DMA_TIMEOUT_MS));
367 	if (!ret) {
368 		ret = -ETIMEDOUT;
369 		dmaengine_terminate_sync(dev_data->dma_aes_rx);
370 		dmaengine_terminate_sync(dev_data->dma_aes_tx);
371 
372 		for (int i = 0; i < AES_BLOCK_WORDS; ++i)
373 			readl_relaxed(aes_base_reg + DTHE_P_AES_DATA_IN_OUT + (DTHE_REG_SIZE * i));
374 	} else {
375 		ret = 0;
376 	}
377 
378 	// For modes other than ECB, read IV_OUT
379 	if (ctx->aes_mode != DTHE_AES_ECB) {
380 		u32 *iv_out = (u32 *)req->iv;
381 
382 		for (int i = 0; i < AES_IV_WORDS; ++i)
383 			iv_out[i] = readl_relaxed(aes_base_reg +
384 						  DTHE_P_AES_IV_IN_0 +
385 						  (DTHE_REG_SIZE * i));
386 	}
387 
388 aes_prep_err:
389 	dma_unmap_sg(tx_dev, src, src_nents, src_dir);
390 	if (dst_dir != DMA_BIDIRECTIONAL)
391 		dma_unmap_sg(rx_dev, dst, dst_nents, dst_dir);
392 
393 aes_err:
394 	local_bh_disable();
395 	crypto_finalize_skcipher_request(dev_data->engine, req, ret);
396 	local_bh_enable();
397 	return 0;
398 }
399 
dthe_aes_crypt(struct skcipher_request * req)400 static int dthe_aes_crypt(struct skcipher_request *req)
401 {
402 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
403 	struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
404 	struct dthe_data *dev_data = dthe_get_dev(ctx);
405 	struct crypto_engine *engine;
406 
407 	/*
408 	 * If data is not a multiple of AES_BLOCK_SIZE:
409 	 * - need to return -EINVAL for ECB, CBC as they are block ciphers
410 	 * - need to fallback to software as H/W doesn't support Ciphertext Stealing for XTS
411 	 */
412 	if (req->cryptlen % AES_BLOCK_SIZE) {
413 		if (ctx->aes_mode == DTHE_AES_XTS) {
414 			SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->skcipher_fb);
415 
416 			skcipher_request_set_callback(subreq, skcipher_request_flags(req),
417 						      req->base.complete, req->base.data);
418 			skcipher_request_set_crypt(subreq, req->src, req->dst,
419 						   req->cryptlen, req->iv);
420 
421 			return rctx->enc ? crypto_skcipher_encrypt(subreq) :
422 				crypto_skcipher_decrypt(subreq);
423 		}
424 		return -EINVAL;
425 	}
426 
427 	/*
428 	 * If data length input is zero, no need to do any operation.
429 	 * Except for XTS mode, where data length should be non-zero.
430 	 */
431 	if (req->cryptlen == 0) {
432 		if (ctx->aes_mode == DTHE_AES_XTS)
433 			return -EINVAL;
434 		return 0;
435 	}
436 
437 	engine = dev_data->engine;
438 	return crypto_transfer_skcipher_request_to_engine(engine, req);
439 }
440 
dthe_aes_encrypt(struct skcipher_request * req)441 static int dthe_aes_encrypt(struct skcipher_request *req)
442 {
443 	struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
444 
445 	rctx->enc = 1;
446 	return dthe_aes_crypt(req);
447 }
448 
dthe_aes_decrypt(struct skcipher_request * req)449 static int dthe_aes_decrypt(struct skcipher_request *req)
450 {
451 	struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
452 
453 	rctx->enc = 0;
454 	return dthe_aes_crypt(req);
455 }
456 
457 static struct skcipher_engine_alg cipher_algs[] = {
458 	{
459 		.base.init			= dthe_cipher_init_tfm,
460 		.base.setkey			= dthe_aes_ecb_setkey,
461 		.base.encrypt			= dthe_aes_encrypt,
462 		.base.decrypt			= dthe_aes_decrypt,
463 		.base.min_keysize		= AES_MIN_KEY_SIZE,
464 		.base.max_keysize		= AES_MAX_KEY_SIZE,
465 		.base.base = {
466 			.cra_name		= "ecb(aes)",
467 			.cra_driver_name	= "ecb-aes-dthev2",
468 			.cra_priority		= 299,
469 			.cra_flags		= CRYPTO_ALG_TYPE_SKCIPHER |
470 						  CRYPTO_ALG_ASYNC |
471 						  CRYPTO_ALG_KERN_DRIVER_ONLY,
472 			.cra_alignmask		= AES_BLOCK_SIZE - 1,
473 			.cra_blocksize		= AES_BLOCK_SIZE,
474 			.cra_ctxsize		= sizeof(struct dthe_tfm_ctx),
475 			.cra_reqsize		= sizeof(struct dthe_aes_req_ctx),
476 			.cra_module		= THIS_MODULE,
477 		},
478 		.op.do_one_request = dthe_aes_run,
479 	}, /* ECB AES */
480 	{
481 		.base.init			= dthe_cipher_init_tfm,
482 		.base.setkey			= dthe_aes_cbc_setkey,
483 		.base.encrypt			= dthe_aes_encrypt,
484 		.base.decrypt			= dthe_aes_decrypt,
485 		.base.min_keysize		= AES_MIN_KEY_SIZE,
486 		.base.max_keysize		= AES_MAX_KEY_SIZE,
487 		.base.ivsize			= AES_IV_SIZE,
488 		.base.base = {
489 			.cra_name		= "cbc(aes)",
490 			.cra_driver_name	= "cbc-aes-dthev2",
491 			.cra_priority		= 299,
492 			.cra_flags		= CRYPTO_ALG_TYPE_SKCIPHER |
493 						  CRYPTO_ALG_ASYNC |
494 						  CRYPTO_ALG_KERN_DRIVER_ONLY,
495 			.cra_alignmask		= AES_BLOCK_SIZE - 1,
496 			.cra_blocksize		= AES_BLOCK_SIZE,
497 			.cra_ctxsize		= sizeof(struct dthe_tfm_ctx),
498 			.cra_reqsize		= sizeof(struct dthe_aes_req_ctx),
499 			.cra_module		= THIS_MODULE,
500 		},
501 		.op.do_one_request = dthe_aes_run,
502 	}, /* CBC AES */
503 	{
504 		.base.init			= dthe_cipher_xts_init_tfm,
505 		.base.exit			= dthe_cipher_xts_exit_tfm,
506 		.base.setkey			= dthe_aes_xts_setkey,
507 		.base.encrypt			= dthe_aes_encrypt,
508 		.base.decrypt			= dthe_aes_decrypt,
509 		.base.min_keysize		= AES_MIN_KEY_SIZE * 2,
510 		.base.max_keysize		= AES_MAX_KEY_SIZE * 2,
511 		.base.ivsize			= AES_IV_SIZE,
512 		.base.base = {
513 			.cra_name		= "xts(aes)",
514 			.cra_driver_name	= "xts-aes-dthev2",
515 			.cra_priority		= 299,
516 			.cra_flags		= CRYPTO_ALG_TYPE_SKCIPHER |
517 						  CRYPTO_ALG_ASYNC |
518 						  CRYPTO_ALG_KERN_DRIVER_ONLY |
519 						  CRYPTO_ALG_NEED_FALLBACK,
520 			.cra_alignmask		= AES_BLOCK_SIZE - 1,
521 			.cra_blocksize		= AES_BLOCK_SIZE,
522 			.cra_ctxsize		= sizeof(struct dthe_tfm_ctx),
523 			.cra_reqsize		= sizeof(struct dthe_aes_req_ctx),
524 			.cra_module		= THIS_MODULE,
525 		},
526 		.op.do_one_request = dthe_aes_run,
527 	}, /* XTS AES */
528 };
529 
dthe_register_aes_algs(void)530 int dthe_register_aes_algs(void)
531 {
532 	return crypto_engine_register_skciphers(cipher_algs, ARRAY_SIZE(cipher_algs));
533 }
534 
dthe_unregister_aes_algs(void)535 void dthe_unregister_aes_algs(void)
536 {
537 	crypto_engine_unregister_skciphers(cipher_algs, ARRAY_SIZE(cipher_algs));
538 }
539