1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Copyright (c) 2020 Hannes Reinecke, SUSE Linux 4 */ 5 6 #include <linux/module.h> 7 #include <linux/crc32.h> 8 #include <linux/base64.h> 9 #include <linux/prandom.h> 10 #include <linux/scatterlist.h> 11 #include <asm/unaligned.h> 12 #include <crypto/hash.h> 13 #include <crypto/dh.h> 14 #include <linux/nvme.h> 15 #include <linux/nvme-auth.h> 16 17 static u32 nvme_dhchap_seqnum; 18 static DEFINE_MUTEX(nvme_dhchap_mutex); 19 20 u32 nvme_auth_get_seqnum(void) 21 { 22 u32 seqnum; 23 24 mutex_lock(&nvme_dhchap_mutex); 25 if (!nvme_dhchap_seqnum) 26 nvme_dhchap_seqnum = prandom_u32(); 27 else { 28 nvme_dhchap_seqnum++; 29 if (!nvme_dhchap_seqnum) 30 nvme_dhchap_seqnum++; 31 } 32 seqnum = nvme_dhchap_seqnum; 33 mutex_unlock(&nvme_dhchap_mutex); 34 return seqnum; 35 } 36 EXPORT_SYMBOL_GPL(nvme_auth_get_seqnum); 37 38 static struct nvme_auth_dhgroup_map { 39 const char name[16]; 40 const char kpp[16]; 41 } dhgroup_map[] = { 42 [NVME_AUTH_DHGROUP_NULL] = { 43 .name = "null", .kpp = "null" }, 44 [NVME_AUTH_DHGROUP_2048] = { 45 .name = "ffdhe2048", .kpp = "ffdhe2048(dh)" }, 46 [NVME_AUTH_DHGROUP_3072] = { 47 .name = "ffdhe3072", .kpp = "ffdhe3072(dh)" }, 48 [NVME_AUTH_DHGROUP_4096] = { 49 .name = "ffdhe4096", .kpp = "ffdhe4096(dh)" }, 50 [NVME_AUTH_DHGROUP_6144] = { 51 .name = "ffdhe6144", .kpp = "ffdhe6144(dh)" }, 52 [NVME_AUTH_DHGROUP_8192] = { 53 .name = "ffdhe8192", .kpp = "ffdhe8192(dh)" }, 54 }; 55 56 const char *nvme_auth_dhgroup_name(u8 dhgroup_id) 57 { 58 if (dhgroup_id >= ARRAY_SIZE(dhgroup_map)) 59 return NULL; 60 return dhgroup_map[dhgroup_id].name; 61 } 62 EXPORT_SYMBOL_GPL(nvme_auth_dhgroup_name); 63 64 const char *nvme_auth_dhgroup_kpp(u8 dhgroup_id) 65 { 66 if (dhgroup_id >= ARRAY_SIZE(dhgroup_map)) 67 return NULL; 68 return dhgroup_map[dhgroup_id].kpp; 69 } 70 EXPORT_SYMBOL_GPL(nvme_auth_dhgroup_kpp); 71 72 u8 nvme_auth_dhgroup_id(const char *dhgroup_name) 73 { 74 int i; 75 76 if (!dhgroup_name || !strlen(dhgroup_name)) 77 return NVME_AUTH_DHGROUP_INVALID; 78 for (i = 0; i < ARRAY_SIZE(dhgroup_map); i++) { 79 if (!strlen(dhgroup_map[i].name)) 80 continue; 81 if (!strncmp(dhgroup_map[i].name, dhgroup_name, 82 strlen(dhgroup_map[i].name))) 83 return i; 84 } 85 return NVME_AUTH_DHGROUP_INVALID; 86 } 87 EXPORT_SYMBOL_GPL(nvme_auth_dhgroup_id); 88 89 static struct nvme_dhchap_hash_map { 90 int len; 91 const char hmac[15]; 92 const char digest[8]; 93 } hash_map[] = { 94 [NVME_AUTH_HASH_SHA256] = { 95 .len = 32, 96 .hmac = "hmac(sha256)", 97 .digest = "sha256", 98 }, 99 [NVME_AUTH_HASH_SHA384] = { 100 .len = 48, 101 .hmac = "hmac(sha384)", 102 .digest = "sha384", 103 }, 104 [NVME_AUTH_HASH_SHA512] = { 105 .len = 64, 106 .hmac = "hmac(sha512)", 107 .digest = "sha512", 108 }, 109 }; 110 111 const char *nvme_auth_hmac_name(u8 hmac_id) 112 { 113 if (hmac_id >= ARRAY_SIZE(hash_map)) 114 return NULL; 115 return hash_map[hmac_id].hmac; 116 } 117 EXPORT_SYMBOL_GPL(nvme_auth_hmac_name); 118 119 const char *nvme_auth_digest_name(u8 hmac_id) 120 { 121 if (hmac_id >= ARRAY_SIZE(hash_map)) 122 return NULL; 123 return hash_map[hmac_id].digest; 124 } 125 EXPORT_SYMBOL_GPL(nvme_auth_digest_name); 126 127 u8 nvme_auth_hmac_id(const char *hmac_name) 128 { 129 int i; 130 131 if (!hmac_name || !strlen(hmac_name)) 132 return NVME_AUTH_HASH_INVALID; 133 134 for (i = 0; i < ARRAY_SIZE(hash_map); i++) { 135 if (!strlen(hash_map[i].hmac)) 136 continue; 137 if (!strncmp(hash_map[i].hmac, hmac_name, 138 strlen(hash_map[i].hmac))) 139 return i; 140 } 141 return NVME_AUTH_HASH_INVALID; 142 } 143 EXPORT_SYMBOL_GPL(nvme_auth_hmac_id); 144 145 size_t nvme_auth_hmac_hash_len(u8 hmac_id) 146 { 147 if (hmac_id >= ARRAY_SIZE(hash_map)) 148 return 0; 149 return hash_map[hmac_id].len; 150 } 151 EXPORT_SYMBOL_GPL(nvme_auth_hmac_hash_len); 152 153 struct nvme_dhchap_key *nvme_auth_extract_key(unsigned char *secret, 154 u8 key_hash) 155 { 156 struct nvme_dhchap_key *key; 157 unsigned char *p; 158 u32 crc; 159 int ret, key_len; 160 size_t allocated_len = strlen(secret); 161 162 /* Secret might be affixed with a ':' */ 163 p = strrchr(secret, ':'); 164 if (p) 165 allocated_len = p - secret; 166 key = kzalloc(sizeof(*key), GFP_KERNEL); 167 if (!key) 168 return ERR_PTR(-ENOMEM); 169 key->key = kzalloc(allocated_len, GFP_KERNEL); 170 if (!key->key) { 171 ret = -ENOMEM; 172 goto out_free_key; 173 } 174 175 key_len = base64_decode(secret, allocated_len, key->key); 176 if (key_len < 0) { 177 pr_debug("base64 key decoding error %d\n", 178 key_len); 179 ret = key_len; 180 goto out_free_secret; 181 } 182 183 if (key_len != 36 && key_len != 52 && 184 key_len != 68) { 185 pr_err("Invalid key len %d\n", key_len); 186 ret = -EINVAL; 187 goto out_free_secret; 188 } 189 190 if (key_hash > 0 && 191 (key_len - 4) != nvme_auth_hmac_hash_len(key_hash)) { 192 pr_err("Mismatched key len %d for %s\n", key_len, 193 nvme_auth_hmac_name(key_hash)); 194 ret = -EINVAL; 195 goto out_free_secret; 196 } 197 198 /* The last four bytes is the CRC in little-endian format */ 199 key_len -= 4; 200 /* 201 * The linux implementation doesn't do pre- and post-increments, 202 * so we have to do it manually. 203 */ 204 crc = ~crc32(~0, key->key, key_len); 205 206 if (get_unaligned_le32(key->key + key_len) != crc) { 207 pr_err("key crc mismatch (key %08x, crc %08x)\n", 208 get_unaligned_le32(key->key + key_len), crc); 209 ret = -EKEYREJECTED; 210 goto out_free_secret; 211 } 212 key->len = key_len; 213 key->hash = key_hash; 214 return key; 215 out_free_secret: 216 kfree_sensitive(key->key); 217 out_free_key: 218 kfree(key); 219 return ERR_PTR(ret); 220 } 221 EXPORT_SYMBOL_GPL(nvme_auth_extract_key); 222 223 void nvme_auth_free_key(struct nvme_dhchap_key *key) 224 { 225 if (!key) 226 return; 227 kfree_sensitive(key->key); 228 kfree(key); 229 } 230 EXPORT_SYMBOL_GPL(nvme_auth_free_key); 231 232 u8 *nvme_auth_transform_key(struct nvme_dhchap_key *key, char *nqn) 233 { 234 const char *hmac_name; 235 struct crypto_shash *key_tfm; 236 struct shash_desc *shash; 237 u8 *transformed_key; 238 int ret; 239 240 if (!key || !key->key) { 241 pr_warn("No key specified\n"); 242 return ERR_PTR(-ENOKEY); 243 } 244 if (key->hash == 0) { 245 transformed_key = kmemdup(key->key, key->len, GFP_KERNEL); 246 return transformed_key ? transformed_key : ERR_PTR(-ENOMEM); 247 } 248 hmac_name = nvme_auth_hmac_name(key->hash); 249 if (!hmac_name) { 250 pr_warn("Invalid key hash id %d\n", key->hash); 251 return ERR_PTR(-EINVAL); 252 } 253 254 key_tfm = crypto_alloc_shash(hmac_name, 0, 0); 255 if (IS_ERR(key_tfm)) 256 return (u8 *)key_tfm; 257 258 shash = kmalloc(sizeof(struct shash_desc) + 259 crypto_shash_descsize(key_tfm), 260 GFP_KERNEL); 261 if (!shash) { 262 ret = -ENOMEM; 263 goto out_free_key; 264 } 265 266 transformed_key = kzalloc(crypto_shash_digestsize(key_tfm), GFP_KERNEL); 267 if (!transformed_key) { 268 ret = -ENOMEM; 269 goto out_free_shash; 270 } 271 272 shash->tfm = key_tfm; 273 ret = crypto_shash_setkey(key_tfm, key->key, key->len); 274 if (ret < 0) 275 goto out_free_transformed_key; 276 ret = crypto_shash_init(shash); 277 if (ret < 0) 278 goto out_free_transformed_key; 279 ret = crypto_shash_update(shash, nqn, strlen(nqn)); 280 if (ret < 0) 281 goto out_free_transformed_key; 282 ret = crypto_shash_update(shash, "NVMe-over-Fabrics", 17); 283 if (ret < 0) 284 goto out_free_transformed_key; 285 ret = crypto_shash_final(shash, transformed_key); 286 if (ret < 0) 287 goto out_free_transformed_key; 288 289 kfree(shash); 290 crypto_free_shash(key_tfm); 291 292 return transformed_key; 293 294 out_free_transformed_key: 295 kfree_sensitive(transformed_key); 296 out_free_shash: 297 kfree(shash); 298 out_free_key: 299 crypto_free_shash(key_tfm); 300 301 return ERR_PTR(ret); 302 } 303 EXPORT_SYMBOL_GPL(nvme_auth_transform_key); 304 305 static int nvme_auth_hash_skey(int hmac_id, u8 *skey, size_t skey_len, u8 *hkey) 306 { 307 const char *digest_name; 308 struct crypto_shash *tfm; 309 int ret; 310 311 digest_name = nvme_auth_digest_name(hmac_id); 312 if (!digest_name) { 313 pr_debug("%s: failed to get digest for %d\n", __func__, 314 hmac_id); 315 return -EINVAL; 316 } 317 tfm = crypto_alloc_shash(digest_name, 0, 0); 318 if (IS_ERR(tfm)) 319 return -ENOMEM; 320 321 ret = crypto_shash_tfm_digest(tfm, skey, skey_len, hkey); 322 if (ret < 0) 323 pr_debug("%s: Failed to hash digest len %zu\n", __func__, 324 skey_len); 325 326 crypto_free_shash(tfm); 327 return ret; 328 } 329 330 int nvme_auth_augmented_challenge(u8 hmac_id, u8 *skey, size_t skey_len, 331 u8 *challenge, u8 *aug, size_t hlen) 332 { 333 struct crypto_shash *tfm; 334 struct shash_desc *desc; 335 u8 *hashed_key; 336 const char *hmac_name; 337 int ret; 338 339 hashed_key = kmalloc(hlen, GFP_KERNEL); 340 if (!hashed_key) 341 return -ENOMEM; 342 343 ret = nvme_auth_hash_skey(hmac_id, skey, 344 skey_len, hashed_key); 345 if (ret < 0) 346 goto out_free_key; 347 348 hmac_name = nvme_auth_hmac_name(hmac_id); 349 if (!hmac_name) { 350 pr_warn("%s: invalid hash algorithm %d\n", 351 __func__, hmac_id); 352 ret = -EINVAL; 353 goto out_free_key; 354 } 355 356 tfm = crypto_alloc_shash(hmac_name, 0, 0); 357 if (IS_ERR(tfm)) { 358 ret = PTR_ERR(tfm); 359 goto out_free_key; 360 } 361 362 desc = kmalloc(sizeof(struct shash_desc) + crypto_shash_descsize(tfm), 363 GFP_KERNEL); 364 if (!desc) { 365 ret = -ENOMEM; 366 goto out_free_hash; 367 } 368 desc->tfm = tfm; 369 370 ret = crypto_shash_setkey(tfm, hashed_key, hlen); 371 if (ret) 372 goto out_free_desc; 373 374 ret = crypto_shash_init(desc); 375 if (ret) 376 goto out_free_desc; 377 378 ret = crypto_shash_update(desc, challenge, hlen); 379 if (ret) 380 goto out_free_desc; 381 382 ret = crypto_shash_final(desc, aug); 383 out_free_desc: 384 kfree_sensitive(desc); 385 out_free_hash: 386 crypto_free_shash(tfm); 387 out_free_key: 388 kfree_sensitive(hashed_key); 389 return ret; 390 } 391 EXPORT_SYMBOL_GPL(nvme_auth_augmented_challenge); 392 393 int nvme_auth_gen_privkey(struct crypto_kpp *dh_tfm, u8 dh_gid) 394 { 395 int ret; 396 397 ret = crypto_kpp_set_secret(dh_tfm, NULL, 0); 398 if (ret) 399 pr_debug("failed to set private key, error %d\n", ret); 400 401 return ret; 402 } 403 EXPORT_SYMBOL_GPL(nvme_auth_gen_privkey); 404 405 int nvme_auth_gen_pubkey(struct crypto_kpp *dh_tfm, 406 u8 *host_key, size_t host_key_len) 407 { 408 struct kpp_request *req; 409 struct crypto_wait wait; 410 struct scatterlist dst; 411 int ret; 412 413 req = kpp_request_alloc(dh_tfm, GFP_KERNEL); 414 if (!req) 415 return -ENOMEM; 416 417 crypto_init_wait(&wait); 418 kpp_request_set_input(req, NULL, 0); 419 sg_init_one(&dst, host_key, host_key_len); 420 kpp_request_set_output(req, &dst, host_key_len); 421 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 422 crypto_req_done, &wait); 423 424 ret = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait); 425 kpp_request_free(req); 426 return ret; 427 } 428 EXPORT_SYMBOL_GPL(nvme_auth_gen_pubkey); 429 430 int nvme_auth_gen_shared_secret(struct crypto_kpp *dh_tfm, 431 u8 *ctrl_key, size_t ctrl_key_len, 432 u8 *sess_key, size_t sess_key_len) 433 { 434 struct kpp_request *req; 435 struct crypto_wait wait; 436 struct scatterlist src, dst; 437 int ret; 438 439 req = kpp_request_alloc(dh_tfm, GFP_KERNEL); 440 if (!req) 441 return -ENOMEM; 442 443 crypto_init_wait(&wait); 444 sg_init_one(&src, ctrl_key, ctrl_key_len); 445 kpp_request_set_input(req, &src, ctrl_key_len); 446 sg_init_one(&dst, sess_key, sess_key_len); 447 kpp_request_set_output(req, &dst, sess_key_len); 448 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 449 crypto_req_done, &wait); 450 451 ret = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait); 452 453 kpp_request_free(req); 454 return ret; 455 } 456 EXPORT_SYMBOL_GPL(nvme_auth_gen_shared_secret); 457 458 int nvme_auth_generate_key(u8 *secret, struct nvme_dhchap_key **ret_key) 459 { 460 struct nvme_dhchap_key *key; 461 u8 key_hash; 462 463 if (!secret) { 464 *ret_key = NULL; 465 return 0; 466 } 467 468 if (sscanf(secret, "DHHC-1:%hhd:%*s:", &key_hash) != 1) 469 return -EINVAL; 470 471 /* Pass in the secret without the 'DHHC-1:XX:' prefix */ 472 key = nvme_auth_extract_key(secret + 10, key_hash); 473 if (IS_ERR(key)) { 474 *ret_key = NULL; 475 return PTR_ERR(key); 476 } 477 478 *ret_key = key; 479 return 0; 480 } 481 EXPORT_SYMBOL_GPL(nvme_auth_generate_key); 482 483 MODULE_LICENSE("GPL v2"); 484