xref: /linux/drivers/crypto/allwinner/sun8i-ss/sun8i-ss-cipher.c (revision 0a94608f0f7de9b1135ffea3546afe68eafef57f)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * sun8i-ss-cipher.c - hardware cryptographic offloader for
4  * Allwinner A80/A83T SoC
5  *
6  * Copyright (C) 2016-2019 Corentin LABBE <clabbe.montjoie@gmail.com>
7  *
8  * This file add support for AES cipher with 128,192,256 bits keysize in
9  * CBC and ECB mode.
10  *
11  * You could find a link for the datasheet in Documentation/arm/sunxi.rst
12  */
13 
14 #include <linux/bottom_half.h>
15 #include <linux/crypto.h>
16 #include <linux/dma-mapping.h>
17 #include <linux/io.h>
18 #include <linux/pm_runtime.h>
19 #include <crypto/scatterwalk.h>
20 #include <crypto/internal/skcipher.h>
21 #include "sun8i-ss.h"
22 
23 static bool sun8i_ss_need_fallback(struct skcipher_request *areq)
24 {
25 	struct scatterlist *in_sg = areq->src;
26 	struct scatterlist *out_sg = areq->dst;
27 	struct scatterlist *sg;
28 
29 	if (areq->cryptlen == 0 || areq->cryptlen % 16)
30 		return true;
31 
32 	if (sg_nents(areq->src) > 8 || sg_nents(areq->dst) > 8)
33 		return true;
34 
35 	sg = areq->src;
36 	while (sg) {
37 		if ((sg->length % 16) != 0)
38 			return true;
39 		if ((sg_dma_len(sg) % 16) != 0)
40 			return true;
41 		if (!IS_ALIGNED(sg->offset, 16))
42 			return true;
43 		sg = sg_next(sg);
44 	}
45 	sg = areq->dst;
46 	while (sg) {
47 		if ((sg->length % 16) != 0)
48 			return true;
49 		if ((sg_dma_len(sg) % 16) != 0)
50 			return true;
51 		if (!IS_ALIGNED(sg->offset, 16))
52 			return true;
53 		sg = sg_next(sg);
54 	}
55 
56 	/* SS need same numbers of SG (with same length) for source and destination */
57 	in_sg = areq->src;
58 	out_sg = areq->dst;
59 	while (in_sg && out_sg) {
60 		if (in_sg->length != out_sg->length)
61 			return true;
62 		in_sg = sg_next(in_sg);
63 		out_sg = sg_next(out_sg);
64 	}
65 	if (in_sg || out_sg)
66 		return true;
67 	return false;
68 }
69 
70 static int sun8i_ss_cipher_fallback(struct skcipher_request *areq)
71 {
72 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
73 	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
74 	struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
75 	int err;
76 
77 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
78 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
79 	struct sun8i_ss_alg_template *algt;
80 
81 	algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher);
82 	algt->stat_fb++;
83 #endif
84 	skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);
85 	skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
86 				      areq->base.complete, areq->base.data);
87 	skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
88 				   areq->cryptlen, areq->iv);
89 	if (rctx->op_dir & SS_DECRYPTION)
90 		err = crypto_skcipher_decrypt(&rctx->fallback_req);
91 	else
92 		err = crypto_skcipher_encrypt(&rctx->fallback_req);
93 	return err;
94 }
95 
96 static int sun8i_ss_cipher(struct skcipher_request *areq)
97 {
98 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
99 	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
100 	struct sun8i_ss_dev *ss = op->ss;
101 	struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
102 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
103 	struct sun8i_ss_alg_template *algt;
104 	struct scatterlist *sg;
105 	unsigned int todo, len, offset, ivsize;
106 	void *backup_iv = NULL;
107 	int nr_sgs = 0;
108 	int nr_sgd = 0;
109 	int err = 0;
110 	int i;
111 
112 	algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher);
113 
114 	dev_dbg(ss->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__,
115 		crypto_tfm_alg_name(areq->base.tfm),
116 		areq->cryptlen,
117 		rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm),
118 		op->keylen);
119 
120 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
121 	algt->stat_req++;
122 #endif
123 
124 	rctx->op_mode = ss->variant->op_mode[algt->ss_blockmode];
125 	rctx->method = ss->variant->alg_cipher[algt->ss_algo_id];
126 	rctx->keylen = op->keylen;
127 
128 	rctx->p_key = dma_map_single(ss->dev, op->key, op->keylen, DMA_TO_DEVICE);
129 	if (dma_mapping_error(ss->dev, rctx->p_key)) {
130 		dev_err(ss->dev, "Cannot DMA MAP KEY\n");
131 		err = -EFAULT;
132 		goto theend;
133 	}
134 
135 	ivsize = crypto_skcipher_ivsize(tfm);
136 	if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {
137 		rctx->ivlen = ivsize;
138 		rctx->biv = kzalloc(ivsize, GFP_KERNEL | GFP_DMA);
139 		if (!rctx->biv) {
140 			err = -ENOMEM;
141 			goto theend_key;
142 		}
143 		if (rctx->op_dir & SS_DECRYPTION) {
144 			backup_iv = kzalloc(ivsize, GFP_KERNEL);
145 			if (!backup_iv) {
146 				err = -ENOMEM;
147 				goto theend_key;
148 			}
149 			offset = areq->cryptlen - ivsize;
150 			scatterwalk_map_and_copy(backup_iv, areq->src, offset,
151 						 ivsize, 0);
152 		}
153 		memcpy(rctx->biv, areq->iv, ivsize);
154 		rctx->p_iv = dma_map_single(ss->dev, rctx->biv, rctx->ivlen,
155 					    DMA_TO_DEVICE);
156 		if (dma_mapping_error(ss->dev, rctx->p_iv)) {
157 			dev_err(ss->dev, "Cannot DMA MAP IV\n");
158 			err = -ENOMEM;
159 			goto theend_iv;
160 		}
161 	}
162 	if (areq->src == areq->dst) {
163 		nr_sgs = dma_map_sg(ss->dev, areq->src, sg_nents(areq->src),
164 				    DMA_BIDIRECTIONAL);
165 		if (nr_sgs <= 0 || nr_sgs > 8) {
166 			dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs);
167 			err = -EINVAL;
168 			goto theend_iv;
169 		}
170 		nr_sgd = nr_sgs;
171 	} else {
172 		nr_sgs = dma_map_sg(ss->dev, areq->src, sg_nents(areq->src),
173 				    DMA_TO_DEVICE);
174 		if (nr_sgs <= 0 || nr_sgs > 8) {
175 			dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs);
176 			err = -EINVAL;
177 			goto theend_iv;
178 		}
179 		nr_sgd = dma_map_sg(ss->dev, areq->dst, sg_nents(areq->dst),
180 				    DMA_FROM_DEVICE);
181 		if (nr_sgd <= 0 || nr_sgd > 8) {
182 			dev_err(ss->dev, "Invalid sg number %d\n", nr_sgd);
183 			err = -EINVAL;
184 			goto theend_sgs;
185 		}
186 	}
187 
188 	len = areq->cryptlen;
189 	i = 0;
190 	sg = areq->src;
191 	while (i < nr_sgs && sg && len) {
192 		if (sg_dma_len(sg) == 0)
193 			goto sgs_next;
194 		rctx->t_src[i].addr = sg_dma_address(sg);
195 		todo = min(len, sg_dma_len(sg));
196 		rctx->t_src[i].len = todo / 4;
197 		dev_dbg(ss->dev, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__,
198 			areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo);
199 		len -= todo;
200 		i++;
201 sgs_next:
202 		sg = sg_next(sg);
203 	}
204 	if (len > 0) {
205 		dev_err(ss->dev, "remaining len %d\n", len);
206 		err = -EINVAL;
207 		goto theend_sgs;
208 	}
209 
210 	len = areq->cryptlen;
211 	i = 0;
212 	sg = areq->dst;
213 	while (i < nr_sgd && sg && len) {
214 		if (sg_dma_len(sg) == 0)
215 			goto sgd_next;
216 		rctx->t_dst[i].addr = sg_dma_address(sg);
217 		todo = min(len, sg_dma_len(sg));
218 		rctx->t_dst[i].len = todo / 4;
219 		dev_dbg(ss->dev, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__,
220 			areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo);
221 		len -= todo;
222 		i++;
223 sgd_next:
224 		sg = sg_next(sg);
225 	}
226 	if (len > 0) {
227 		dev_err(ss->dev, "remaining len %d\n", len);
228 		err = -EINVAL;
229 		goto theend_sgs;
230 	}
231 
232 	err = sun8i_ss_run_task(ss, rctx, crypto_tfm_alg_name(areq->base.tfm));
233 
234 theend_sgs:
235 	if (areq->src == areq->dst) {
236 		dma_unmap_sg(ss->dev, areq->src, sg_nents(areq->src),
237 			     DMA_BIDIRECTIONAL);
238 	} else {
239 		dma_unmap_sg(ss->dev, areq->src, sg_nents(areq->src),
240 			     DMA_TO_DEVICE);
241 		dma_unmap_sg(ss->dev, areq->dst, sg_nents(areq->dst),
242 			     DMA_FROM_DEVICE);
243 	}
244 
245 theend_iv:
246 	if (rctx->p_iv)
247 		dma_unmap_single(ss->dev, rctx->p_iv, rctx->ivlen,
248 				 DMA_TO_DEVICE);
249 
250 	if (areq->iv && ivsize > 0) {
251 		if (rctx->biv) {
252 			offset = areq->cryptlen - ivsize;
253 			if (rctx->op_dir & SS_DECRYPTION) {
254 				memcpy(areq->iv, backup_iv, ivsize);
255 				kfree_sensitive(backup_iv);
256 			} else {
257 				scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
258 							 ivsize, 0);
259 			}
260 			kfree(rctx->biv);
261 		}
262 	}
263 
264 theend_key:
265 	dma_unmap_single(ss->dev, rctx->p_key, op->keylen, DMA_TO_DEVICE);
266 
267 theend:
268 
269 	return err;
270 }
271 
272 static int sun8i_ss_handle_cipher_request(struct crypto_engine *engine, void *areq)
273 {
274 	int err;
275 	struct skcipher_request *breq = container_of(areq, struct skcipher_request, base);
276 
277 	err = sun8i_ss_cipher(breq);
278 	local_bh_disable();
279 	crypto_finalize_skcipher_request(engine, breq, err);
280 	local_bh_enable();
281 
282 	return 0;
283 }
284 
285 int sun8i_ss_skdecrypt(struct skcipher_request *areq)
286 {
287 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
288 	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
289 	struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
290 	struct crypto_engine *engine;
291 	int e;
292 
293 	memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx));
294 	rctx->op_dir = SS_DECRYPTION;
295 
296 	if (sun8i_ss_need_fallback(areq))
297 		return sun8i_ss_cipher_fallback(areq);
298 
299 	e = sun8i_ss_get_engine_number(op->ss);
300 	engine = op->ss->flows[e].engine;
301 	rctx->flow = e;
302 
303 	return crypto_transfer_skcipher_request_to_engine(engine, areq);
304 }
305 
306 int sun8i_ss_skencrypt(struct skcipher_request *areq)
307 {
308 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
309 	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
310 	struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
311 	struct crypto_engine *engine;
312 	int e;
313 
314 	memset(rctx, 0, sizeof(struct sun8i_cipher_req_ctx));
315 	rctx->op_dir = SS_ENCRYPTION;
316 
317 	if (sun8i_ss_need_fallback(areq))
318 		return sun8i_ss_cipher_fallback(areq);
319 
320 	e = sun8i_ss_get_engine_number(op->ss);
321 	engine = op->ss->flows[e].engine;
322 	rctx->flow = e;
323 
324 	return crypto_transfer_skcipher_request_to_engine(engine, areq);
325 }
326 
327 int sun8i_ss_cipher_init(struct crypto_tfm *tfm)
328 {
329 	struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);
330 	struct sun8i_ss_alg_template *algt;
331 	const char *name = crypto_tfm_alg_name(tfm);
332 	struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm);
333 	struct skcipher_alg *alg = crypto_skcipher_alg(sktfm);
334 	int err;
335 
336 	memset(op, 0, sizeof(struct sun8i_cipher_tfm_ctx));
337 
338 	algt = container_of(alg, struct sun8i_ss_alg_template, alg.skcipher);
339 	op->ss = algt->ss;
340 
341 	op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
342 	if (IS_ERR(op->fallback_tfm)) {
343 		dev_err(op->ss->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
344 			name, PTR_ERR(op->fallback_tfm));
345 		return PTR_ERR(op->fallback_tfm);
346 	}
347 
348 	sktfm->reqsize = sizeof(struct sun8i_cipher_req_ctx) +
349 			 crypto_skcipher_reqsize(op->fallback_tfm);
350 
351 
352 	dev_info(op->ss->dev, "Fallback for %s is %s\n",
353 		 crypto_tfm_alg_driver_name(&sktfm->base),
354 		 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op->fallback_tfm)));
355 
356 	op->enginectx.op.do_one_request = sun8i_ss_handle_cipher_request;
357 	op->enginectx.op.prepare_request = NULL;
358 	op->enginectx.op.unprepare_request = NULL;
359 
360 	err = pm_runtime_resume_and_get(op->ss->dev);
361 	if (err < 0) {
362 		dev_err(op->ss->dev, "pm error %d\n", err);
363 		goto error_pm;
364 	}
365 
366 	return 0;
367 error_pm:
368 	crypto_free_skcipher(op->fallback_tfm);
369 	return err;
370 }
371 
372 void sun8i_ss_cipher_exit(struct crypto_tfm *tfm)
373 {
374 	struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm);
375 
376 	kfree_sensitive(op->key);
377 	crypto_free_skcipher(op->fallback_tfm);
378 	pm_runtime_put_sync(op->ss->dev);
379 }
380 
381 int sun8i_ss_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
382 			unsigned int keylen)
383 {
384 	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
385 	struct sun8i_ss_dev *ss = op->ss;
386 
387 	switch (keylen) {
388 	case 128 / 8:
389 		break;
390 	case 192 / 8:
391 		break;
392 	case 256 / 8:
393 		break;
394 	default:
395 		dev_dbg(ss->dev, "ERROR: Invalid keylen %u\n", keylen);
396 		return -EINVAL;
397 	}
398 	kfree_sensitive(op->key);
399 	op->keylen = keylen;
400 	op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA);
401 	if (!op->key)
402 		return -ENOMEM;
403 
404 	crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
405 	crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
406 
407 	return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);
408 }
409 
410 int sun8i_ss_des3_setkey(struct crypto_skcipher *tfm, const u8 *key,
411 			 unsigned int keylen)
412 {
413 	struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm);
414 	struct sun8i_ss_dev *ss = op->ss;
415 
416 	if (unlikely(keylen != 3 * DES_KEY_SIZE)) {
417 		dev_dbg(ss->dev, "Invalid keylen %u\n", keylen);
418 		return -EINVAL;
419 	}
420 
421 	kfree_sensitive(op->key);
422 	op->keylen = keylen;
423 	op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA);
424 	if (!op->key)
425 		return -ENOMEM;
426 
427 	crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
428 	crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
429 
430 	return crypto_skcipher_setkey(op->fallback_tfm, key, keylen);
431 }
432