xref: /linux/drivers/crypto/ti/dthev2-aes.c (revision e3966940559d52aa1800a008dcfeec218dd31f88)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * K3 DTHE V2 crypto accelerator driver
4  *
5  * Copyright (C) Texas Instruments 2025 - https://www.ti.com
6  * Author: T Pratham <t-pratham@ti.com>
7  */
8 
9 #include <crypto/aead.h>
10 #include <crypto/aes.h>
11 #include <crypto/algapi.h>
12 #include <crypto/engine.h>
13 #include <crypto/internal/aead.h>
14 #include <crypto/internal/skcipher.h>
15 
16 #include "dthev2-common.h"
17 
18 #include <linux/delay.h>
19 #include <linux/dmaengine.h>
20 #include <linux/dma-mapping.h>
21 #include <linux/io.h>
22 #include <linux/scatterlist.h>
23 
24 /* Registers */
25 
26 // AES Engine
27 #define DTHE_P_AES_BASE		0x7000
28 #define DTHE_P_AES_KEY1_0	0x0038
29 #define DTHE_P_AES_KEY1_1	0x003C
30 #define DTHE_P_AES_KEY1_2	0x0030
31 #define DTHE_P_AES_KEY1_3	0x0034
32 #define DTHE_P_AES_KEY1_4	0x0028
33 #define DTHE_P_AES_KEY1_5	0x002C
34 #define DTHE_P_AES_KEY1_6	0x0020
35 #define DTHE_P_AES_KEY1_7	0x0024
36 #define DTHE_P_AES_IV_IN_0	0x0040
37 #define DTHE_P_AES_IV_IN_1	0x0044
38 #define DTHE_P_AES_IV_IN_2	0x0048
39 #define DTHE_P_AES_IV_IN_3	0x004C
40 #define DTHE_P_AES_CTRL		0x0050
41 #define DTHE_P_AES_C_LENGTH_0	0x0054
42 #define DTHE_P_AES_C_LENGTH_1	0x0058
43 #define DTHE_P_AES_AUTH_LENGTH	0x005C
44 #define DTHE_P_AES_DATA_IN_OUT	0x0060
45 
46 #define DTHE_P_AES_SYSCONFIG	0x0084
47 #define DTHE_P_AES_IRQSTATUS	0x008C
48 #define DTHE_P_AES_IRQENABLE	0x0090
49 
50 /* Register write values and macros */
51 
52 enum aes_ctrl_mode_masks {
53 	AES_CTRL_ECB_MASK = 0x00,
54 	AES_CTRL_CBC_MASK = BIT(5),
55 };
56 
57 #define DTHE_AES_CTRL_MODE_CLEAR_MASK		~GENMASK(28, 5)
58 
59 #define DTHE_AES_CTRL_DIR_ENC			BIT(2)
60 
61 #define DTHE_AES_CTRL_KEYSIZE_16B		BIT(3)
62 #define DTHE_AES_CTRL_KEYSIZE_24B		BIT(4)
63 #define DTHE_AES_CTRL_KEYSIZE_32B		(BIT(3) | BIT(4))
64 
65 #define DTHE_AES_CTRL_SAVE_CTX_SET		BIT(29)
66 
67 #define DTHE_AES_CTRL_OUTPUT_READY		BIT_MASK(0)
68 #define DTHE_AES_CTRL_INPUT_READY		BIT_MASK(1)
69 #define DTHE_AES_CTRL_SAVED_CTX_READY		BIT_MASK(30)
70 #define DTHE_AES_CTRL_CTX_READY			BIT_MASK(31)
71 
72 #define DTHE_AES_SYSCONFIG_DMA_DATA_IN_OUT_EN	GENMASK(6, 5)
73 #define DTHE_AES_IRQENABLE_EN_ALL		GENMASK(3, 0)
74 
75 /* Misc */
76 #define AES_IV_SIZE				AES_BLOCK_SIZE
77 #define AES_BLOCK_WORDS				(AES_BLOCK_SIZE / sizeof(u32))
78 #define AES_IV_WORDS				AES_BLOCK_WORDS
79 
80 static int dthe_cipher_init_tfm(struct crypto_skcipher *tfm)
81 {
82 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
83 	struct dthe_data *dev_data = dthe_get_dev(ctx);
84 
85 	ctx->dev_data = dev_data;
86 	ctx->keylen = 0;
87 
88 	return 0;
89 }
90 
91 static int dthe_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
92 {
93 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
94 
95 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_256)
96 		return -EINVAL;
97 
98 	ctx->keylen = keylen;
99 	memcpy(ctx->key, key, keylen);
100 
101 	return 0;
102 }
103 
104 static int dthe_aes_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
105 {
106 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
107 
108 	ctx->aes_mode = DTHE_AES_ECB;
109 
110 	return dthe_aes_setkey(tfm, key, keylen);
111 }
112 
113 static int dthe_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
114 {
115 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
116 
117 	ctx->aes_mode = DTHE_AES_CBC;
118 
119 	return dthe_aes_setkey(tfm, key, keylen);
120 }
121 
122 static void dthe_aes_set_ctrl_key(struct dthe_tfm_ctx *ctx,
123 				  struct dthe_aes_req_ctx *rctx,
124 				  u32 *iv_in)
125 {
126 	struct dthe_data *dev_data = dthe_get_dev(ctx);
127 	void __iomem *aes_base_reg = dev_data->regs + DTHE_P_AES_BASE;
128 	u32 ctrl_val = 0;
129 
130 	writel_relaxed(ctx->key[0], aes_base_reg + DTHE_P_AES_KEY1_0);
131 	writel_relaxed(ctx->key[1], aes_base_reg + DTHE_P_AES_KEY1_1);
132 	writel_relaxed(ctx->key[2], aes_base_reg + DTHE_P_AES_KEY1_2);
133 	writel_relaxed(ctx->key[3], aes_base_reg + DTHE_P_AES_KEY1_3);
134 
135 	if (ctx->keylen > AES_KEYSIZE_128) {
136 		writel_relaxed(ctx->key[4], aes_base_reg + DTHE_P_AES_KEY1_4);
137 		writel_relaxed(ctx->key[5], aes_base_reg + DTHE_P_AES_KEY1_5);
138 	}
139 	if (ctx->keylen == AES_KEYSIZE_256) {
140 		writel_relaxed(ctx->key[6], aes_base_reg + DTHE_P_AES_KEY1_6);
141 		writel_relaxed(ctx->key[7], aes_base_reg + DTHE_P_AES_KEY1_7);
142 	}
143 
144 	if (rctx->enc)
145 		ctrl_val |= DTHE_AES_CTRL_DIR_ENC;
146 
147 	if (ctx->keylen == AES_KEYSIZE_128)
148 		ctrl_val |= DTHE_AES_CTRL_KEYSIZE_16B;
149 	else if (ctx->keylen == AES_KEYSIZE_192)
150 		ctrl_val |= DTHE_AES_CTRL_KEYSIZE_24B;
151 	else
152 		ctrl_val |= DTHE_AES_CTRL_KEYSIZE_32B;
153 
154 	// Write AES mode
155 	ctrl_val &= DTHE_AES_CTRL_MODE_CLEAR_MASK;
156 	switch (ctx->aes_mode) {
157 	case DTHE_AES_ECB:
158 		ctrl_val |= AES_CTRL_ECB_MASK;
159 		break;
160 	case DTHE_AES_CBC:
161 		ctrl_val |= AES_CTRL_CBC_MASK;
162 		break;
163 	}
164 
165 	if (iv_in) {
166 		ctrl_val |= DTHE_AES_CTRL_SAVE_CTX_SET;
167 		for (int i = 0; i < AES_IV_WORDS; ++i)
168 			writel_relaxed(iv_in[i],
169 				       aes_base_reg + DTHE_P_AES_IV_IN_0 + (DTHE_REG_SIZE * i));
170 	}
171 
172 	writel_relaxed(ctrl_val, aes_base_reg + DTHE_P_AES_CTRL);
173 }
174 
175 static void dthe_aes_dma_in_callback(void *data)
176 {
177 	struct skcipher_request *req = (struct skcipher_request *)data;
178 	struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
179 
180 	complete(&rctx->aes_compl);
181 }
182 
183 static int dthe_aes_run(struct crypto_engine *engine, void *areq)
184 {
185 	struct skcipher_request *req = container_of(areq, struct skcipher_request, base);
186 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
187 	struct dthe_data *dev_data = dthe_get_dev(ctx);
188 	struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
189 
190 	unsigned int len = req->cryptlen;
191 	struct scatterlist *src = req->src;
192 	struct scatterlist *dst = req->dst;
193 
194 	int src_nents = sg_nents_for_len(src, len);
195 	int dst_nents;
196 
197 	int src_mapped_nents;
198 	int dst_mapped_nents;
199 
200 	bool diff_dst;
201 	enum dma_data_direction src_dir, dst_dir;
202 
203 	struct device *tx_dev, *rx_dev;
204 	struct dma_async_tx_descriptor *desc_in, *desc_out;
205 
206 	int ret;
207 
208 	void __iomem *aes_base_reg = dev_data->regs + DTHE_P_AES_BASE;
209 
210 	u32 aes_irqenable_val = readl_relaxed(aes_base_reg + DTHE_P_AES_IRQENABLE);
211 	u32 aes_sysconfig_val = readl_relaxed(aes_base_reg + DTHE_P_AES_SYSCONFIG);
212 
213 	aes_sysconfig_val |= DTHE_AES_SYSCONFIG_DMA_DATA_IN_OUT_EN;
214 	writel_relaxed(aes_sysconfig_val, aes_base_reg + DTHE_P_AES_SYSCONFIG);
215 
216 	aes_irqenable_val |= DTHE_AES_IRQENABLE_EN_ALL;
217 	writel_relaxed(aes_irqenable_val, aes_base_reg + DTHE_P_AES_IRQENABLE);
218 
219 	if (src == dst) {
220 		diff_dst = false;
221 		src_dir = DMA_BIDIRECTIONAL;
222 		dst_dir = DMA_BIDIRECTIONAL;
223 	} else {
224 		diff_dst = true;
225 		src_dir = DMA_TO_DEVICE;
226 		dst_dir  = DMA_FROM_DEVICE;
227 	}
228 
229 	tx_dev = dmaengine_get_dma_device(dev_data->dma_aes_tx);
230 	rx_dev = dmaengine_get_dma_device(dev_data->dma_aes_rx);
231 
232 	src_mapped_nents = dma_map_sg(tx_dev, src, src_nents, src_dir);
233 	if (src_mapped_nents == 0) {
234 		ret = -EINVAL;
235 		goto aes_err;
236 	}
237 
238 	if (!diff_dst) {
239 		dst_nents = src_nents;
240 		dst_mapped_nents = src_mapped_nents;
241 	} else {
242 		dst_nents = sg_nents_for_len(dst, len);
243 		dst_mapped_nents = dma_map_sg(rx_dev, dst, dst_nents, dst_dir);
244 		if (dst_mapped_nents == 0) {
245 			dma_unmap_sg(tx_dev, src, src_nents, src_dir);
246 			ret = -EINVAL;
247 			goto aes_err;
248 		}
249 	}
250 
251 	desc_in = dmaengine_prep_slave_sg(dev_data->dma_aes_rx, dst, dst_mapped_nents,
252 					  DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
253 	if (!desc_in) {
254 		dev_err(dev_data->dev, "IN prep_slave_sg() failed\n");
255 		ret = -EINVAL;
256 		goto aes_prep_err;
257 	}
258 
259 	desc_out = dmaengine_prep_slave_sg(dev_data->dma_aes_tx, src, src_mapped_nents,
260 					   DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
261 	if (!desc_out) {
262 		dev_err(dev_data->dev, "OUT prep_slave_sg() failed\n");
263 		ret = -EINVAL;
264 		goto aes_prep_err;
265 	}
266 
267 	desc_in->callback = dthe_aes_dma_in_callback;
268 	desc_in->callback_param = req;
269 
270 	init_completion(&rctx->aes_compl);
271 
272 	if (ctx->aes_mode == DTHE_AES_ECB)
273 		dthe_aes_set_ctrl_key(ctx, rctx, NULL);
274 	else
275 		dthe_aes_set_ctrl_key(ctx, rctx, (u32 *)req->iv);
276 
277 	writel_relaxed(lower_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_0);
278 	writel_relaxed(upper_32_bits(req->cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_1);
279 
280 	dmaengine_submit(desc_in);
281 	dmaengine_submit(desc_out);
282 
283 	dma_async_issue_pending(dev_data->dma_aes_rx);
284 	dma_async_issue_pending(dev_data->dma_aes_tx);
285 
286 	// Need to do a timeout to ensure finalise gets called if DMA callback fails for any reason
287 	ret = wait_for_completion_timeout(&rctx->aes_compl, msecs_to_jiffies(DTHE_DMA_TIMEOUT_MS));
288 	if (!ret) {
289 		ret = -ETIMEDOUT;
290 		dmaengine_terminate_sync(dev_data->dma_aes_rx);
291 		dmaengine_terminate_sync(dev_data->dma_aes_tx);
292 
293 		for (int i = 0; i < AES_BLOCK_WORDS; ++i)
294 			readl_relaxed(aes_base_reg + DTHE_P_AES_DATA_IN_OUT + (DTHE_REG_SIZE * i));
295 	} else {
296 		ret = 0;
297 	}
298 
299 	// For modes other than ECB, read IV_OUT
300 	if (ctx->aes_mode != DTHE_AES_ECB) {
301 		u32 *iv_out = (u32 *)req->iv;
302 
303 		for (int i = 0; i < AES_IV_WORDS; ++i)
304 			iv_out[i] = readl_relaxed(aes_base_reg +
305 						  DTHE_P_AES_IV_IN_0 +
306 						  (DTHE_REG_SIZE * i));
307 	}
308 
309 aes_prep_err:
310 	dma_unmap_sg(tx_dev, src, src_nents, src_dir);
311 	if (dst_dir != DMA_BIDIRECTIONAL)
312 		dma_unmap_sg(rx_dev, dst, dst_nents, dst_dir);
313 
314 aes_err:
315 	local_bh_disable();
316 	crypto_finalize_skcipher_request(dev_data->engine, req, ret);
317 	local_bh_enable();
318 	return ret;
319 }
320 
321 static int dthe_aes_crypt(struct skcipher_request *req)
322 {
323 	struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
324 	struct dthe_data *dev_data = dthe_get_dev(ctx);
325 	struct crypto_engine *engine;
326 
327 	/*
328 	 * If data is not a multiple of AES_BLOCK_SIZE, need to return -EINVAL
329 	 * If data length input is zero, no need to do any operation.
330 	 */
331 	if (req->cryptlen % AES_BLOCK_SIZE)
332 		return -EINVAL;
333 
334 	if (req->cryptlen == 0)
335 		return 0;
336 
337 	engine = dev_data->engine;
338 	return crypto_transfer_skcipher_request_to_engine(engine, req);
339 }
340 
341 static int dthe_aes_encrypt(struct skcipher_request *req)
342 {
343 	struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
344 
345 	rctx->enc = 1;
346 	return dthe_aes_crypt(req);
347 }
348 
349 static int dthe_aes_decrypt(struct skcipher_request *req)
350 {
351 	struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
352 
353 	rctx->enc = 0;
354 	return dthe_aes_crypt(req);
355 }
356 
357 static struct skcipher_engine_alg cipher_algs[] = {
358 	{
359 		.base.init			= dthe_cipher_init_tfm,
360 		.base.setkey			= dthe_aes_ecb_setkey,
361 		.base.encrypt			= dthe_aes_encrypt,
362 		.base.decrypt			= dthe_aes_decrypt,
363 		.base.min_keysize		= AES_MIN_KEY_SIZE,
364 		.base.max_keysize		= AES_MAX_KEY_SIZE,
365 		.base.base = {
366 			.cra_name		= "ecb(aes)",
367 			.cra_driver_name	= "ecb-aes-dthev2",
368 			.cra_priority		= 299,
369 			.cra_flags		= CRYPTO_ALG_TYPE_SKCIPHER |
370 						  CRYPTO_ALG_ASYNC |
371 						  CRYPTO_ALG_KERN_DRIVER_ONLY,
372 			.cra_alignmask		= AES_BLOCK_SIZE - 1,
373 			.cra_blocksize		= AES_BLOCK_SIZE,
374 			.cra_ctxsize		= sizeof(struct dthe_tfm_ctx),
375 			.cra_reqsize		= sizeof(struct dthe_aes_req_ctx),
376 			.cra_module		= THIS_MODULE,
377 		},
378 		.op.do_one_request = dthe_aes_run,
379 	}, /* ECB AES */
380 	{
381 		.base.init			= dthe_cipher_init_tfm,
382 		.base.setkey			= dthe_aes_cbc_setkey,
383 		.base.encrypt			= dthe_aes_encrypt,
384 		.base.decrypt			= dthe_aes_decrypt,
385 		.base.min_keysize		= AES_MIN_KEY_SIZE,
386 		.base.max_keysize		= AES_MAX_KEY_SIZE,
387 		.base.ivsize			= AES_IV_SIZE,
388 		.base.base = {
389 			.cra_name		= "cbc(aes)",
390 			.cra_driver_name	= "cbc-aes-dthev2",
391 			.cra_priority		= 299,
392 			.cra_flags		= CRYPTO_ALG_TYPE_SKCIPHER |
393 						  CRYPTO_ALG_ASYNC |
394 						  CRYPTO_ALG_KERN_DRIVER_ONLY,
395 			.cra_alignmask		= AES_BLOCK_SIZE - 1,
396 			.cra_blocksize		= AES_BLOCK_SIZE,
397 			.cra_ctxsize		= sizeof(struct dthe_tfm_ctx),
398 			.cra_reqsize		= sizeof(struct dthe_aes_req_ctx),
399 			.cra_module		= THIS_MODULE,
400 		},
401 		.op.do_one_request = dthe_aes_run,
402 	} /* CBC AES */
403 };
404 
405 int dthe_register_aes_algs(void)
406 {
407 	return crypto_engine_register_skciphers(cipher_algs, ARRAY_SIZE(cipher_algs));
408 }
409 
410 void dthe_unregister_aes_algs(void)
411 {
412 	crypto_engine_unregister_skciphers(cipher_algs, ARRAY_SIZE(cipher_algs));
413 }
414