xref: /linux/drivers/nvme/common/auth.c (revision f047daed179a451657d1e66b5fe4030a593a000c)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2020 Hannes Reinecke, SUSE Linux
4  */
5 
6 #include <linux/module.h>
7 #include <linux/crc32.h>
8 #include <linux/base64.h>
9 #include <linux/prandom.h>
10 #include <linux/scatterlist.h>
11 #include <asm/unaligned.h>
12 #include <crypto/hash.h>
13 #include <crypto/dh.h>
14 #include <linux/nvme.h>
15 #include <linux/nvme-auth.h>
16 
17 static u32 nvme_dhchap_seqnum;
18 static DEFINE_MUTEX(nvme_dhchap_mutex);
19 
20 u32 nvme_auth_get_seqnum(void)
21 {
22 	u32 seqnum;
23 
24 	mutex_lock(&nvme_dhchap_mutex);
25 	if (!nvme_dhchap_seqnum)
26 		nvme_dhchap_seqnum = get_random_u32();
27 	else {
28 		nvme_dhchap_seqnum++;
29 		if (!nvme_dhchap_seqnum)
30 			nvme_dhchap_seqnum++;
31 	}
32 	seqnum = nvme_dhchap_seqnum;
33 	mutex_unlock(&nvme_dhchap_mutex);
34 	return seqnum;
35 }
36 EXPORT_SYMBOL_GPL(nvme_auth_get_seqnum);
37 
38 static struct nvme_auth_dhgroup_map {
39 	const char name[16];
40 	const char kpp[16];
41 } dhgroup_map[] = {
42 	[NVME_AUTH_DHGROUP_NULL] = {
43 		.name = "null", .kpp = "null" },
44 	[NVME_AUTH_DHGROUP_2048] = {
45 		.name = "ffdhe2048", .kpp = "ffdhe2048(dh)" },
46 	[NVME_AUTH_DHGROUP_3072] = {
47 		.name = "ffdhe3072", .kpp = "ffdhe3072(dh)" },
48 	[NVME_AUTH_DHGROUP_4096] = {
49 		.name = "ffdhe4096", .kpp = "ffdhe4096(dh)" },
50 	[NVME_AUTH_DHGROUP_6144] = {
51 		.name = "ffdhe6144", .kpp = "ffdhe6144(dh)" },
52 	[NVME_AUTH_DHGROUP_8192] = {
53 		.name = "ffdhe8192", .kpp = "ffdhe8192(dh)" },
54 };
55 
56 const char *nvme_auth_dhgroup_name(u8 dhgroup_id)
57 {
58 	if (dhgroup_id >= ARRAY_SIZE(dhgroup_map))
59 		return NULL;
60 	return dhgroup_map[dhgroup_id].name;
61 }
62 EXPORT_SYMBOL_GPL(nvme_auth_dhgroup_name);
63 
64 const char *nvme_auth_dhgroup_kpp(u8 dhgroup_id)
65 {
66 	if (dhgroup_id >= ARRAY_SIZE(dhgroup_map))
67 		return NULL;
68 	return dhgroup_map[dhgroup_id].kpp;
69 }
70 EXPORT_SYMBOL_GPL(nvme_auth_dhgroup_kpp);
71 
72 u8 nvme_auth_dhgroup_id(const char *dhgroup_name)
73 {
74 	int i;
75 
76 	if (!dhgroup_name || !strlen(dhgroup_name))
77 		return NVME_AUTH_DHGROUP_INVALID;
78 	for (i = 0; i < ARRAY_SIZE(dhgroup_map); i++) {
79 		if (!strlen(dhgroup_map[i].name))
80 			continue;
81 		if (!strncmp(dhgroup_map[i].name, dhgroup_name,
82 			     strlen(dhgroup_map[i].name)))
83 			return i;
84 	}
85 	return NVME_AUTH_DHGROUP_INVALID;
86 }
87 EXPORT_SYMBOL_GPL(nvme_auth_dhgroup_id);
88 
89 static struct nvme_dhchap_hash_map {
90 	int len;
91 	const char hmac[15];
92 	const char digest[8];
93 } hash_map[] = {
94 	[NVME_AUTH_HASH_SHA256] = {
95 		.len = 32,
96 		.hmac = "hmac(sha256)",
97 		.digest = "sha256",
98 	},
99 	[NVME_AUTH_HASH_SHA384] = {
100 		.len = 48,
101 		.hmac = "hmac(sha384)",
102 		.digest = "sha384",
103 	},
104 	[NVME_AUTH_HASH_SHA512] = {
105 		.len = 64,
106 		.hmac = "hmac(sha512)",
107 		.digest = "sha512",
108 	},
109 };
110 
111 const char *nvme_auth_hmac_name(u8 hmac_id)
112 {
113 	if (hmac_id >= ARRAY_SIZE(hash_map))
114 		return NULL;
115 	return hash_map[hmac_id].hmac;
116 }
117 EXPORT_SYMBOL_GPL(nvme_auth_hmac_name);
118 
119 const char *nvme_auth_digest_name(u8 hmac_id)
120 {
121 	if (hmac_id >= ARRAY_SIZE(hash_map))
122 		return NULL;
123 	return hash_map[hmac_id].digest;
124 }
125 EXPORT_SYMBOL_GPL(nvme_auth_digest_name);
126 
127 u8 nvme_auth_hmac_id(const char *hmac_name)
128 {
129 	int i;
130 
131 	if (!hmac_name || !strlen(hmac_name))
132 		return NVME_AUTH_HASH_INVALID;
133 
134 	for (i = 0; i < ARRAY_SIZE(hash_map); i++) {
135 		if (!strlen(hash_map[i].hmac))
136 			continue;
137 		if (!strncmp(hash_map[i].hmac, hmac_name,
138 			     strlen(hash_map[i].hmac)))
139 			return i;
140 	}
141 	return NVME_AUTH_HASH_INVALID;
142 }
143 EXPORT_SYMBOL_GPL(nvme_auth_hmac_id);
144 
145 size_t nvme_auth_hmac_hash_len(u8 hmac_id)
146 {
147 	if (hmac_id >= ARRAY_SIZE(hash_map))
148 		return 0;
149 	return hash_map[hmac_id].len;
150 }
151 EXPORT_SYMBOL_GPL(nvme_auth_hmac_hash_len);
152 
153 u32 nvme_auth_key_struct_size(u32 key_len)
154 {
155 	struct nvme_dhchap_key key;
156 
157 	return struct_size(&key, key, key_len);
158 }
159 EXPORT_SYMBOL_GPL(nvme_auth_key_struct_size);
160 
161 struct nvme_dhchap_key *nvme_auth_extract_key(unsigned char *secret,
162 					      u8 key_hash)
163 {
164 	struct nvme_dhchap_key *key;
165 	unsigned char *p;
166 	u32 crc;
167 	int ret, key_len;
168 	size_t allocated_len = strlen(secret);
169 
170 	/* Secret might be affixed with a ':' */
171 	p = strrchr(secret, ':');
172 	if (p)
173 		allocated_len = p - secret;
174 	key = nvme_auth_alloc_key(allocated_len, 0);
175 	if (!key)
176 		return ERR_PTR(-ENOMEM);
177 
178 	key_len = base64_decode(secret, allocated_len, key->key);
179 	if (key_len < 0) {
180 		pr_debug("base64 key decoding error %d\n",
181 			 key_len);
182 		ret = key_len;
183 		goto out_free_secret;
184 	}
185 
186 	if (key_len != 36 && key_len != 52 &&
187 	    key_len != 68) {
188 		pr_err("Invalid key len %d\n", key_len);
189 		ret = -EINVAL;
190 		goto out_free_secret;
191 	}
192 
193 	if (key_hash > 0 &&
194 	    (key_len - 4) != nvme_auth_hmac_hash_len(key_hash)) {
195 		pr_err("Mismatched key len %d for %s\n", key_len,
196 		       nvme_auth_hmac_name(key_hash));
197 		ret = -EINVAL;
198 		goto out_free_secret;
199 	}
200 
201 	/* The last four bytes is the CRC in little-endian format */
202 	key_len -= 4;
203 	/*
204 	 * The linux implementation doesn't do pre- and post-increments,
205 	 * so we have to do it manually.
206 	 */
207 	crc = ~crc32(~0, key->key, key_len);
208 
209 	if (get_unaligned_le32(key->key + key_len) != crc) {
210 		pr_err("key crc mismatch (key %08x, crc %08x)\n",
211 		       get_unaligned_le32(key->key + key_len), crc);
212 		ret = -EKEYREJECTED;
213 		goto out_free_secret;
214 	}
215 	key->len = key_len;
216 	key->hash = key_hash;
217 	return key;
218 out_free_secret:
219 	nvme_auth_free_key(key);
220 	return ERR_PTR(ret);
221 }
222 EXPORT_SYMBOL_GPL(nvme_auth_extract_key);
223 
224 struct nvme_dhchap_key *nvme_auth_alloc_key(u32 len, u8 hash)
225 {
226 	u32 num_bytes = nvme_auth_key_struct_size(len);
227 	struct nvme_dhchap_key *key = kzalloc(num_bytes, GFP_KERNEL);
228 
229 	if (key) {
230 		key->len = len;
231 		key->hash = hash;
232 	}
233 	return key;
234 }
235 EXPORT_SYMBOL_GPL(nvme_auth_alloc_key);
236 
237 void nvme_auth_free_key(struct nvme_dhchap_key *key)
238 {
239 	if (!key)
240 		return;
241 	kfree_sensitive(key);
242 }
243 EXPORT_SYMBOL_GPL(nvme_auth_free_key);
244 
245 struct nvme_dhchap_key *nvme_auth_transform_key(
246 		struct nvme_dhchap_key *key, char *nqn)
247 {
248 	const char *hmac_name;
249 	struct crypto_shash *key_tfm;
250 	struct shash_desc *shash;
251 	struct nvme_dhchap_key *transformed_key;
252 	int ret, key_len;
253 
254 	if (!key) {
255 		pr_warn("No key specified\n");
256 		return ERR_PTR(-ENOKEY);
257 	}
258 	if (key->hash == 0) {
259 		key_len = nvme_auth_key_struct_size(key->len);
260 		transformed_key = kmemdup(key, key_len, GFP_KERNEL);
261 		if (!transformed_key)
262 			return ERR_PTR(-ENOMEM);
263 		return transformed_key;
264 	}
265 	hmac_name = nvme_auth_hmac_name(key->hash);
266 	if (!hmac_name) {
267 		pr_warn("Invalid key hash id %d\n", key->hash);
268 		return ERR_PTR(-EINVAL);
269 	}
270 
271 	key_tfm = crypto_alloc_shash(hmac_name, 0, 0);
272 	if (IS_ERR(key_tfm))
273 		return ERR_CAST(key_tfm);
274 
275 	shash = kmalloc(sizeof(struct shash_desc) +
276 			crypto_shash_descsize(key_tfm),
277 			GFP_KERNEL);
278 	if (!shash) {
279 		ret = -ENOMEM;
280 		goto out_free_key;
281 	}
282 
283 	key_len = crypto_shash_digestsize(key_tfm);
284 	transformed_key = nvme_auth_alloc_key(key_len, key->hash);
285 	if (!transformed_key) {
286 		ret = -ENOMEM;
287 		goto out_free_shash;
288 	}
289 
290 	shash->tfm = key_tfm;
291 	ret = crypto_shash_setkey(key_tfm, key->key, key->len);
292 	if (ret < 0)
293 		goto out_free_transformed_key;
294 	ret = crypto_shash_init(shash);
295 	if (ret < 0)
296 		goto out_free_transformed_key;
297 	ret = crypto_shash_update(shash, nqn, strlen(nqn));
298 	if (ret < 0)
299 		goto out_free_transformed_key;
300 	ret = crypto_shash_update(shash, "NVMe-over-Fabrics", 17);
301 	if (ret < 0)
302 		goto out_free_transformed_key;
303 	ret = crypto_shash_final(shash, transformed_key->key);
304 	if (ret < 0)
305 		goto out_free_transformed_key;
306 
307 	kfree(shash);
308 	crypto_free_shash(key_tfm);
309 
310 	return transformed_key;
311 
312 out_free_transformed_key:
313 	nvme_auth_free_key(transformed_key);
314 out_free_shash:
315 	kfree(shash);
316 out_free_key:
317 	crypto_free_shash(key_tfm);
318 
319 	return ERR_PTR(ret);
320 }
321 EXPORT_SYMBOL_GPL(nvme_auth_transform_key);
322 
323 static int nvme_auth_hash_skey(int hmac_id, u8 *skey, size_t skey_len, u8 *hkey)
324 {
325 	const char *digest_name;
326 	struct crypto_shash *tfm;
327 	int ret;
328 
329 	digest_name = nvme_auth_digest_name(hmac_id);
330 	if (!digest_name) {
331 		pr_debug("%s: failed to get digest for %d\n", __func__,
332 			 hmac_id);
333 		return -EINVAL;
334 	}
335 	tfm = crypto_alloc_shash(digest_name, 0, 0);
336 	if (IS_ERR(tfm))
337 		return -ENOMEM;
338 
339 	ret = crypto_shash_tfm_digest(tfm, skey, skey_len, hkey);
340 	if (ret < 0)
341 		pr_debug("%s: Failed to hash digest len %zu\n", __func__,
342 			 skey_len);
343 
344 	crypto_free_shash(tfm);
345 	return ret;
346 }
347 
348 int nvme_auth_augmented_challenge(u8 hmac_id, u8 *skey, size_t skey_len,
349 		u8 *challenge, u8 *aug, size_t hlen)
350 {
351 	struct crypto_shash *tfm;
352 	struct shash_desc *desc;
353 	u8 *hashed_key;
354 	const char *hmac_name;
355 	int ret;
356 
357 	hashed_key = kmalloc(hlen, GFP_KERNEL);
358 	if (!hashed_key)
359 		return -ENOMEM;
360 
361 	ret = nvme_auth_hash_skey(hmac_id, skey,
362 				  skey_len, hashed_key);
363 	if (ret < 0)
364 		goto out_free_key;
365 
366 	hmac_name = nvme_auth_hmac_name(hmac_id);
367 	if (!hmac_name) {
368 		pr_warn("%s: invalid hash algorithm %d\n",
369 			__func__, hmac_id);
370 		ret = -EINVAL;
371 		goto out_free_key;
372 	}
373 
374 	tfm = crypto_alloc_shash(hmac_name, 0, 0);
375 	if (IS_ERR(tfm)) {
376 		ret = PTR_ERR(tfm);
377 		goto out_free_key;
378 	}
379 
380 	desc = kmalloc(sizeof(struct shash_desc) + crypto_shash_descsize(tfm),
381 		       GFP_KERNEL);
382 	if (!desc) {
383 		ret = -ENOMEM;
384 		goto out_free_hash;
385 	}
386 	desc->tfm = tfm;
387 
388 	ret = crypto_shash_setkey(tfm, hashed_key, hlen);
389 	if (ret)
390 		goto out_free_desc;
391 
392 	ret = crypto_shash_init(desc);
393 	if (ret)
394 		goto out_free_desc;
395 
396 	ret = crypto_shash_update(desc, challenge, hlen);
397 	if (ret)
398 		goto out_free_desc;
399 
400 	ret = crypto_shash_final(desc, aug);
401 out_free_desc:
402 	kfree_sensitive(desc);
403 out_free_hash:
404 	crypto_free_shash(tfm);
405 out_free_key:
406 	kfree_sensitive(hashed_key);
407 	return ret;
408 }
409 EXPORT_SYMBOL_GPL(nvme_auth_augmented_challenge);
410 
411 int nvme_auth_gen_privkey(struct crypto_kpp *dh_tfm, u8 dh_gid)
412 {
413 	int ret;
414 
415 	ret = crypto_kpp_set_secret(dh_tfm, NULL, 0);
416 	if (ret)
417 		pr_debug("failed to set private key, error %d\n", ret);
418 
419 	return ret;
420 }
421 EXPORT_SYMBOL_GPL(nvme_auth_gen_privkey);
422 
423 int nvme_auth_gen_pubkey(struct crypto_kpp *dh_tfm,
424 		u8 *host_key, size_t host_key_len)
425 {
426 	struct kpp_request *req;
427 	struct crypto_wait wait;
428 	struct scatterlist dst;
429 	int ret;
430 
431 	req = kpp_request_alloc(dh_tfm, GFP_KERNEL);
432 	if (!req)
433 		return -ENOMEM;
434 
435 	crypto_init_wait(&wait);
436 	kpp_request_set_input(req, NULL, 0);
437 	sg_init_one(&dst, host_key, host_key_len);
438 	kpp_request_set_output(req, &dst, host_key_len);
439 	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
440 				 crypto_req_done, &wait);
441 
442 	ret = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
443 	kpp_request_free(req);
444 	return ret;
445 }
446 EXPORT_SYMBOL_GPL(nvme_auth_gen_pubkey);
447 
448 int nvme_auth_gen_shared_secret(struct crypto_kpp *dh_tfm,
449 		u8 *ctrl_key, size_t ctrl_key_len,
450 		u8 *sess_key, size_t sess_key_len)
451 {
452 	struct kpp_request *req;
453 	struct crypto_wait wait;
454 	struct scatterlist src, dst;
455 	int ret;
456 
457 	req = kpp_request_alloc(dh_tfm, GFP_KERNEL);
458 	if (!req)
459 		return -ENOMEM;
460 
461 	crypto_init_wait(&wait);
462 	sg_init_one(&src, ctrl_key, ctrl_key_len);
463 	kpp_request_set_input(req, &src, ctrl_key_len);
464 	sg_init_one(&dst, sess_key, sess_key_len);
465 	kpp_request_set_output(req, &dst, sess_key_len);
466 	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
467 				 crypto_req_done, &wait);
468 
469 	ret = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
470 
471 	kpp_request_free(req);
472 	return ret;
473 }
474 EXPORT_SYMBOL_GPL(nvme_auth_gen_shared_secret);
475 
476 int nvme_auth_generate_key(u8 *secret, struct nvme_dhchap_key **ret_key)
477 {
478 	struct nvme_dhchap_key *key;
479 	u8 key_hash;
480 
481 	if (!secret) {
482 		*ret_key = NULL;
483 		return 0;
484 	}
485 
486 	if (sscanf(secret, "DHHC-1:%hhd:%*s:", &key_hash) != 1)
487 		return -EINVAL;
488 
489 	/* Pass in the secret without the 'DHHC-1:XX:' prefix */
490 	key = nvme_auth_extract_key(secret + 10, key_hash);
491 	if (IS_ERR(key)) {
492 		*ret_key = NULL;
493 		return PTR_ERR(key);
494 	}
495 
496 	*ret_key = key;
497 	return 0;
498 }
499 EXPORT_SYMBOL_GPL(nvme_auth_generate_key);
500 
501 MODULE_LICENSE("GPL v2");
502