xref: /linux/crypto/hctr2.c (revision 5abe8d8efc022cc78b6273d01e4a453242b9f4d8)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * HCTR2 length-preserving encryption mode
4  *
5  * Copyright 2021 Google LLC
6  */
7 
8 
9 /*
10  * HCTR2 is a length-preserving encryption mode that is efficient on
11  * processors with instructions to accelerate AES and carryless
12  * multiplication, e.g. x86 processors with AES-NI and CLMUL, and ARM
13  * processors with the ARMv8 crypto extensions.
14  *
15  * For more details, see the paper: "Length-preserving encryption with HCTR2"
16  * (https://eprint.iacr.org/2021/1441.pdf)
17  */
18 
19 #include <crypto/internal/cipher.h>
20 #include <crypto/internal/skcipher.h>
21 #include <crypto/polyval.h>
22 #include <crypto/scatterwalk.h>
23 #include <linux/module.h>
24 
25 #define BLOCKCIPHER_BLOCK_SIZE		16
26 
27 /*
28  * The specification allows variable-length tweaks, but Linux's crypto API
29  * currently only allows algorithms to support a single length.  The "natural"
30  * tweak length for HCTR2 is 16, since that fits into one POLYVAL block for
31  * the best performance.  But longer tweaks are useful for fscrypt, to avoid
32  * needing to derive per-file keys.  So instead we use two blocks, or 32 bytes.
33  */
34 #define TWEAK_SIZE		32
35 
36 struct hctr2_instance_ctx {
37 	struct crypto_cipher_spawn blockcipher_spawn;
38 	struct crypto_skcipher_spawn xctr_spawn;
39 };
40 
41 struct hctr2_tfm_ctx {
42 	struct crypto_cipher *blockcipher;
43 	struct crypto_skcipher *xctr;
44 	struct polyval_key poly_key;
45 	struct polyval_elem hashed_tweaklens[2];
46 	u8 L[BLOCKCIPHER_BLOCK_SIZE];
47 };
48 
49 struct hctr2_request_ctx {
50 	u8 first_block[BLOCKCIPHER_BLOCK_SIZE];
51 	u8 xctr_iv[BLOCKCIPHER_BLOCK_SIZE];
52 	struct scatterlist *bulk_part_dst;
53 	struct scatterlist *bulk_part_src;
54 	struct scatterlist sg_src[2];
55 	struct scatterlist sg_dst[2];
56 	struct polyval_elem hashed_tweak;
57 	/*
58 	 * skcipher sub-request size is unknown at compile-time, so it needs to
59 	 * go after the members with known sizes.
60 	 */
61 	union {
62 		struct polyval_ctx poly_ctx;
63 		struct skcipher_request xctr_req;
64 	} u;
65 };
66 
67 /*
68  * The input data for each HCTR2 hash step begins with a 16-byte block that
69  * contains the tweak length and a flag that indicates whether the input is evenly
70  * divisible into blocks.  Since this implementation only supports one tweak
71  * length, we precompute the two hash states resulting from hashing the two
72  * possible values of this initial block.  This reduces by one block the amount of
73  * data that needs to be hashed for each encryption/decryption
74  *
75  * These precomputed hashes are stored in hctr2_tfm_ctx.
76  */
hctr2_hash_tweaklens(struct hctr2_tfm_ctx * tctx)77 static void hctr2_hash_tweaklens(struct hctr2_tfm_ctx *tctx)
78 {
79 	struct polyval_ctx ctx;
80 
81 	for (int has_remainder = 0; has_remainder < 2; has_remainder++) {
82 		const __le64 tweak_length_block[2] = {
83 			cpu_to_le64(TWEAK_SIZE * 8 * 2 + 2 + has_remainder),
84 		};
85 
86 		polyval_init(&ctx, &tctx->poly_key);
87 		polyval_update(&ctx, (const u8 *)&tweak_length_block,
88 			       sizeof(tweak_length_block));
89 		static_assert(sizeof(tweak_length_block) == POLYVAL_BLOCK_SIZE);
90 		polyval_export_blkaligned(
91 			&ctx, &tctx->hashed_tweaklens[has_remainder]);
92 	}
93 	memzero_explicit(&ctx, sizeof(ctx));
94 }
95 
hctr2_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)96 static int hctr2_setkey(struct crypto_skcipher *tfm, const u8 *key,
97 			unsigned int keylen)
98 {
99 	struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
100 	u8 hbar[BLOCKCIPHER_BLOCK_SIZE];
101 	int err;
102 
103 	crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK);
104 	crypto_cipher_set_flags(tctx->blockcipher,
105 				crypto_skcipher_get_flags(tfm) &
106 				CRYPTO_TFM_REQ_MASK);
107 	err = crypto_cipher_setkey(tctx->blockcipher, key, keylen);
108 	if (err)
109 		return err;
110 
111 	crypto_skcipher_clear_flags(tctx->xctr, CRYPTO_TFM_REQ_MASK);
112 	crypto_skcipher_set_flags(tctx->xctr,
113 				  crypto_skcipher_get_flags(tfm) &
114 				  CRYPTO_TFM_REQ_MASK);
115 	err = crypto_skcipher_setkey(tctx->xctr, key, keylen);
116 	if (err)
117 		return err;
118 
119 	memset(hbar, 0, sizeof(hbar));
120 	crypto_cipher_encrypt_one(tctx->blockcipher, hbar, hbar);
121 
122 	memset(tctx->L, 0, sizeof(tctx->L));
123 	tctx->L[0] = 0x01;
124 	crypto_cipher_encrypt_one(tctx->blockcipher, tctx->L, tctx->L);
125 
126 	static_assert(sizeof(hbar) == POLYVAL_BLOCK_SIZE);
127 	polyval_preparekey(&tctx->poly_key, hbar);
128 	memzero_explicit(hbar, sizeof(hbar));
129 
130 	hctr2_hash_tweaklens(tctx);
131 	return 0;
132 }
133 
hctr2_hash_tweak(struct skcipher_request * req)134 static void hctr2_hash_tweak(struct skcipher_request *req)
135 {
136 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
137 	const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
138 	struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
139 	struct polyval_ctx *poly_ctx = &rctx->u.poly_ctx;
140 	bool has_remainder = req->cryptlen % POLYVAL_BLOCK_SIZE;
141 
142 	polyval_import_blkaligned(poly_ctx, &tctx->poly_key,
143 				  &tctx->hashed_tweaklens[has_remainder]);
144 	polyval_update(poly_ctx, req->iv, TWEAK_SIZE);
145 
146 	// Store the hashed tweak, since we need it when computing both
147 	// H(T || N) and H(T || V).
148 	static_assert(TWEAK_SIZE % POLYVAL_BLOCK_SIZE == 0);
149 	polyval_export_blkaligned(poly_ctx, &rctx->hashed_tweak);
150 }
151 
hctr2_hash_message(struct skcipher_request * req,struct scatterlist * sgl,u8 digest[POLYVAL_DIGEST_SIZE])152 static void hctr2_hash_message(struct skcipher_request *req,
153 			       struct scatterlist *sgl,
154 			       u8 digest[POLYVAL_DIGEST_SIZE])
155 {
156 	static const u8 padding = 0x1;
157 	struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
158 	struct polyval_ctx *poly_ctx = &rctx->u.poly_ctx;
159 	const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
160 	struct sg_mapping_iter miter;
161 	int i;
162 	int n = 0;
163 
164 	sg_miter_start(&miter, sgl, sg_nents(sgl),
165 		       SG_MITER_FROM_SG | SG_MITER_ATOMIC);
166 	for (i = 0; i < bulk_len; i += n) {
167 		sg_miter_next(&miter);
168 		n = min_t(unsigned int, miter.length, bulk_len - i);
169 		polyval_update(poly_ctx, miter.addr, n);
170 	}
171 	sg_miter_stop(&miter);
172 
173 	if (req->cryptlen % BLOCKCIPHER_BLOCK_SIZE)
174 		polyval_update(poly_ctx, &padding, 1);
175 	polyval_final(poly_ctx, digest);
176 }
177 
hctr2_finish(struct skcipher_request * req)178 static int hctr2_finish(struct skcipher_request *req)
179 {
180 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
181 	const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
182 	struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
183 	struct polyval_ctx *poly_ctx = &rctx->u.poly_ctx;
184 	u8 digest[POLYVAL_DIGEST_SIZE];
185 
186 	// U = UU ^ H(T || V)
187 	// or M = MM ^ H(T || N)
188 	polyval_import_blkaligned(poly_ctx, &tctx->poly_key,
189 				  &rctx->hashed_tweak);
190 	hctr2_hash_message(req, rctx->bulk_part_dst, digest);
191 	crypto_xor(rctx->first_block, digest, BLOCKCIPHER_BLOCK_SIZE);
192 
193 	// Copy U (or M) into dst scatterlist
194 	scatterwalk_map_and_copy(rctx->first_block, req->dst,
195 				 0, BLOCKCIPHER_BLOCK_SIZE, 1);
196 	return 0;
197 }
198 
hctr2_xctr_done(void * data,int err)199 static void hctr2_xctr_done(void *data, int err)
200 {
201 	struct skcipher_request *req = data;
202 
203 	if (!err)
204 		err = hctr2_finish(req);
205 
206 	skcipher_request_complete(req, err);
207 }
208 
hctr2_crypt(struct skcipher_request * req,bool enc)209 static int hctr2_crypt(struct skcipher_request *req, bool enc)
210 {
211 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
212 	const struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
213 	struct hctr2_request_ctx *rctx = skcipher_request_ctx(req);
214 	u8 digest[POLYVAL_DIGEST_SIZE];
215 	int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
216 
217 	// Requests must be at least one block
218 	if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE)
219 		return -EINVAL;
220 
221 	// Copy M (or U) into a temporary buffer
222 	scatterwalk_map_and_copy(rctx->first_block, req->src,
223 				 0, BLOCKCIPHER_BLOCK_SIZE, 0);
224 
225 	// Create scatterlists for N and V
226 	rctx->bulk_part_src = scatterwalk_ffwd(rctx->sg_src, req->src,
227 					       BLOCKCIPHER_BLOCK_SIZE);
228 	rctx->bulk_part_dst = scatterwalk_ffwd(rctx->sg_dst, req->dst,
229 					       BLOCKCIPHER_BLOCK_SIZE);
230 
231 	// MM = M ^ H(T || N)
232 	// or UU = U ^ H(T || V)
233 	hctr2_hash_tweak(req);
234 	hctr2_hash_message(req, rctx->bulk_part_src, digest);
235 	crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
236 
237 	// UU = E(MM)
238 	// or MM = D(UU)
239 	if (enc)
240 		crypto_cipher_encrypt_one(tctx->blockcipher, rctx->first_block,
241 					  digest);
242 	else
243 		crypto_cipher_decrypt_one(tctx->blockcipher, rctx->first_block,
244 					  digest);
245 
246 	// S = MM ^ UU ^ L
247 	crypto_xor(digest, rctx->first_block, BLOCKCIPHER_BLOCK_SIZE);
248 	crypto_xor_cpy(rctx->xctr_iv, digest, tctx->L, BLOCKCIPHER_BLOCK_SIZE);
249 
250 	// V = XCTR(S, N)
251 	// or N = XCTR(S, V)
252 	skcipher_request_set_tfm(&rctx->u.xctr_req, tctx->xctr);
253 	skcipher_request_set_crypt(&rctx->u.xctr_req, rctx->bulk_part_src,
254 				   rctx->bulk_part_dst, bulk_len,
255 				   rctx->xctr_iv);
256 	skcipher_request_set_callback(&rctx->u.xctr_req,
257 				      req->base.flags,
258 				      hctr2_xctr_done, req);
259 	return crypto_skcipher_encrypt(&rctx->u.xctr_req) ?:
260 		hctr2_finish(req);
261 }
262 
hctr2_encrypt(struct skcipher_request * req)263 static int hctr2_encrypt(struct skcipher_request *req)
264 {
265 	return hctr2_crypt(req, true);
266 }
267 
hctr2_decrypt(struct skcipher_request * req)268 static int hctr2_decrypt(struct skcipher_request *req)
269 {
270 	return hctr2_crypt(req, false);
271 }
272 
hctr2_init_tfm(struct crypto_skcipher * tfm)273 static int hctr2_init_tfm(struct crypto_skcipher *tfm)
274 {
275 	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
276 	struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst);
277 	struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
278 	struct crypto_skcipher *xctr;
279 	struct crypto_cipher *blockcipher;
280 	int err;
281 
282 	xctr = crypto_spawn_skcipher(&ictx->xctr_spawn);
283 	if (IS_ERR(xctr))
284 		return PTR_ERR(xctr);
285 
286 	blockcipher = crypto_spawn_cipher(&ictx->blockcipher_spawn);
287 	if (IS_ERR(blockcipher)) {
288 		err = PTR_ERR(blockcipher);
289 		goto err_free_xctr;
290 	}
291 
292 	tctx->xctr = xctr;
293 	tctx->blockcipher = blockcipher;
294 
295 	BUILD_BUG_ON(offsetofend(struct hctr2_request_ctx, u) !=
296 				 sizeof(struct hctr2_request_ctx));
297 	crypto_skcipher_set_reqsize(
298 		tfm, max(sizeof(struct hctr2_request_ctx),
299 			 offsetofend(struct hctr2_request_ctx, u.xctr_req) +
300 				 crypto_skcipher_reqsize(xctr)));
301 	return 0;
302 
303 err_free_xctr:
304 	crypto_free_skcipher(xctr);
305 	return err;
306 }
307 
hctr2_exit_tfm(struct crypto_skcipher * tfm)308 static void hctr2_exit_tfm(struct crypto_skcipher *tfm)
309 {
310 	struct hctr2_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
311 
312 	crypto_free_cipher(tctx->blockcipher);
313 	crypto_free_skcipher(tctx->xctr);
314 }
315 
hctr2_free_instance(struct skcipher_instance * inst)316 static void hctr2_free_instance(struct skcipher_instance *inst)
317 {
318 	struct hctr2_instance_ctx *ictx = skcipher_instance_ctx(inst);
319 
320 	crypto_drop_cipher(&ictx->blockcipher_spawn);
321 	crypto_drop_skcipher(&ictx->xctr_spawn);
322 	kfree(inst);
323 }
324 
hctr2_create_common(struct crypto_template * tmpl,struct rtattr ** tb,const char * xctr_name)325 static int hctr2_create_common(struct crypto_template *tmpl, struct rtattr **tb,
326 			       const char *xctr_name)
327 {
328 	struct skcipher_alg_common *xctr_alg;
329 	u32 mask;
330 	struct skcipher_instance *inst;
331 	struct hctr2_instance_ctx *ictx;
332 	struct crypto_alg *blockcipher_alg;
333 	char blockcipher_name[CRYPTO_MAX_ALG_NAME];
334 	int len;
335 	int err;
336 
337 	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
338 	if (err)
339 		return err;
340 
341 	inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
342 	if (!inst)
343 		return -ENOMEM;
344 	ictx = skcipher_instance_ctx(inst);
345 
346 	/* Stream cipher, xctr(block_cipher) */
347 	err = crypto_grab_skcipher(&ictx->xctr_spawn,
348 				   skcipher_crypto_instance(inst),
349 				   xctr_name, 0, mask);
350 	if (err)
351 		goto err_free_inst;
352 	xctr_alg = crypto_spawn_skcipher_alg_common(&ictx->xctr_spawn);
353 
354 	err = -EINVAL;
355 	if (strncmp(xctr_alg->base.cra_name, "xctr(", 5))
356 		goto err_free_inst;
357 	len = strscpy(blockcipher_name, xctr_alg->base.cra_name + 5,
358 		      sizeof(blockcipher_name));
359 	if (len < 1)
360 		goto err_free_inst;
361 	if (blockcipher_name[len - 1] != ')')
362 		goto err_free_inst;
363 	blockcipher_name[len - 1] = 0;
364 
365 	/* Block cipher, e.g. "aes" */
366 	err = crypto_grab_cipher(&ictx->blockcipher_spawn,
367 				 skcipher_crypto_instance(inst),
368 				 blockcipher_name, 0, mask);
369 	if (err)
370 		goto err_free_inst;
371 	blockcipher_alg = crypto_spawn_cipher_alg(&ictx->blockcipher_spawn);
372 
373 	/* Require blocksize of 16 bytes */
374 	err = -EINVAL;
375 	if (blockcipher_alg->cra_blocksize != BLOCKCIPHER_BLOCK_SIZE)
376 		goto err_free_inst;
377 
378 	/* Instance fields */
379 
380 	err = -ENAMETOOLONG;
381 	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, "hctr2(%s)",
382 		     blockcipher_alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
383 		goto err_free_inst;
384 	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
385 		     "hctr2_base(%s,polyval-lib)",
386 		     xctr_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
387 		goto err_free_inst;
388 
389 	inst->alg.base.cra_blocksize = BLOCKCIPHER_BLOCK_SIZE;
390 	inst->alg.base.cra_ctxsize = sizeof(struct hctr2_tfm_ctx);
391 	inst->alg.base.cra_alignmask = xctr_alg->base.cra_alignmask;
392 	inst->alg.base.cra_priority = (2 * xctr_alg->base.cra_priority +
393 				       blockcipher_alg->cra_priority) /
394 				      3;
395 
396 	inst->alg.setkey = hctr2_setkey;
397 	inst->alg.encrypt = hctr2_encrypt;
398 	inst->alg.decrypt = hctr2_decrypt;
399 	inst->alg.init = hctr2_init_tfm;
400 	inst->alg.exit = hctr2_exit_tfm;
401 	inst->alg.min_keysize = xctr_alg->min_keysize;
402 	inst->alg.max_keysize = xctr_alg->max_keysize;
403 	inst->alg.ivsize = TWEAK_SIZE;
404 
405 	inst->free = hctr2_free_instance;
406 
407 	err = skcipher_register_instance(tmpl, inst);
408 	if (err) {
409 err_free_inst:
410 		hctr2_free_instance(inst);
411 	}
412 	return err;
413 }
414 
hctr2_create_base(struct crypto_template * tmpl,struct rtattr ** tb)415 static int hctr2_create_base(struct crypto_template *tmpl, struct rtattr **tb)
416 {
417 	const char *xctr_name;
418 	const char *polyval_name;
419 
420 	xctr_name = crypto_attr_alg_name(tb[1]);
421 	if (IS_ERR(xctr_name))
422 		return PTR_ERR(xctr_name);
423 
424 	polyval_name = crypto_attr_alg_name(tb[2]);
425 	if (IS_ERR(polyval_name))
426 		return PTR_ERR(polyval_name);
427 	if (strcmp(polyval_name, "polyval") != 0 &&
428 	    strcmp(polyval_name, "polyval-lib") != 0)
429 		return -ENOENT;
430 
431 	return hctr2_create_common(tmpl, tb, xctr_name);
432 }
433 
hctr2_create(struct crypto_template * tmpl,struct rtattr ** tb)434 static int hctr2_create(struct crypto_template *tmpl, struct rtattr **tb)
435 {
436 	const char *blockcipher_name;
437 	char xctr_name[CRYPTO_MAX_ALG_NAME];
438 
439 	blockcipher_name = crypto_attr_alg_name(tb[1]);
440 	if (IS_ERR(blockcipher_name))
441 		return PTR_ERR(blockcipher_name);
442 
443 	if (snprintf(xctr_name, CRYPTO_MAX_ALG_NAME, "xctr(%s)",
444 		    blockcipher_name) >= CRYPTO_MAX_ALG_NAME)
445 		return -ENAMETOOLONG;
446 
447 	return hctr2_create_common(tmpl, tb, xctr_name);
448 }
449 
450 static struct crypto_template hctr2_tmpls[] = {
451 	{
452 		/* hctr2_base(xctr_name, polyval_name) */
453 		.name = "hctr2_base",
454 		.create = hctr2_create_base,
455 		.module = THIS_MODULE,
456 	}, {
457 		/* hctr2(blockcipher_name) */
458 		.name = "hctr2",
459 		.create = hctr2_create,
460 		.module = THIS_MODULE,
461 	}
462 };
463 
hctr2_module_init(void)464 static int __init hctr2_module_init(void)
465 {
466 	return crypto_register_templates(hctr2_tmpls, ARRAY_SIZE(hctr2_tmpls));
467 }
468 
hctr2_module_exit(void)469 static void __exit hctr2_module_exit(void)
470 {
471 	return crypto_unregister_templates(hctr2_tmpls,
472 					   ARRAY_SIZE(hctr2_tmpls));
473 }
474 
475 module_init(hctr2_module_init);
476 module_exit(hctr2_module_exit);
477 
478 MODULE_DESCRIPTION("HCTR2 length-preserving encryption mode");
479 MODULE_LICENSE("GPL v2");
480 MODULE_ALIAS_CRYPTO("hctr2");
481 MODULE_IMPORT_NS("CRYPTO_INTERNAL");
482