1 // SPDX-License-Identifier: GPL-2.0 2 #include <linux/crypto.h> 3 #include <linux/kernel.h> 4 #include <linux/module.h> 5 #include <linux/printk.h> 6 7 #include <crypto/aes.h> 8 #include <crypto/skcipher.h> 9 #include <crypto/ctr.h> 10 #include <crypto/des.h> 11 #include <crypto/xts.h> 12 13 #include "nitrox_dev.h" 14 #include "nitrox_common.h" 15 #include "nitrox_req.h" 16 17 struct nitrox_cipher { 18 const char *name; 19 enum flexi_cipher value; 20 }; 21 22 /** 23 * supported cipher list 24 */ 25 static const struct nitrox_cipher flexi_cipher_table[] = { 26 { "null", CIPHER_NULL }, 27 { "cbc(des3_ede)", CIPHER_3DES_CBC }, 28 { "ecb(des3_ede)", CIPHER_3DES_ECB }, 29 { "cbc(aes)", CIPHER_AES_CBC }, 30 { "ecb(aes)", CIPHER_AES_ECB }, 31 { "cfb(aes)", CIPHER_AES_CFB }, 32 { "rfc3686(ctr(aes))", CIPHER_AES_CTR }, 33 { "xts(aes)", CIPHER_AES_XTS }, 34 { "cts(cbc(aes))", CIPHER_AES_CBC_CTS }, 35 { NULL, CIPHER_INVALID } 36 }; 37 38 static enum flexi_cipher flexi_cipher_type(const char *name) 39 { 40 const struct nitrox_cipher *cipher = flexi_cipher_table; 41 42 while (cipher->name) { 43 if (!strcmp(cipher->name, name)) 44 break; 45 cipher++; 46 } 47 return cipher->value; 48 } 49 50 static int nitrox_skcipher_init(struct crypto_skcipher *tfm) 51 { 52 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm); 53 struct crypto_ctx_hdr *chdr; 54 55 /* get the first device */ 56 nctx->ndev = nitrox_get_first_device(); 57 if (!nctx->ndev) 58 return -ENODEV; 59 60 /* allocate nitrox crypto context */ 61 chdr = crypto_alloc_context(nctx->ndev); 62 if (!chdr) { 63 nitrox_put_device(nctx->ndev); 64 return -ENOMEM; 65 } 66 nctx->chdr = chdr; 67 nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr + 68 sizeof(struct ctx_hdr)); 69 crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(tfm) + 70 sizeof(struct nitrox_kcrypt_request)); 71 return 0; 72 } 73 74 static void nitrox_skcipher_exit(struct crypto_skcipher *tfm) 75 { 76 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm); 77 78 /* free the nitrox crypto context */ 79 if (nctx->u.ctx_handle) { 80 struct flexi_crypto_context *fctx = nctx->u.fctx; 81 82 memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys)); 83 memzero_explicit(&fctx->auth, sizeof(struct auth_keys)); 84 crypto_free_context((void *)nctx->chdr); 85 } 86 nitrox_put_device(nctx->ndev); 87 88 nctx->u.ctx_handle = 0; 89 nctx->ndev = NULL; 90 } 91 92 static inline int nitrox_skcipher_setkey(struct crypto_skcipher *cipher, 93 int aes_keylen, const u8 *key, 94 unsigned int keylen) 95 { 96 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 97 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 98 struct flexi_crypto_context *fctx; 99 union fc_ctx_flags *flags; 100 enum flexi_cipher cipher_type; 101 const char *name; 102 103 name = crypto_tfm_alg_name(tfm); 104 cipher_type = flexi_cipher_type(name); 105 if (unlikely(cipher_type == CIPHER_INVALID)) { 106 pr_err("unsupported cipher: %s\n", name); 107 return -EINVAL; 108 } 109 110 /* fill crypto context */ 111 fctx = nctx->u.fctx; 112 flags = &fctx->flags; 113 flags->f = 0; 114 flags->w0.cipher_type = cipher_type; 115 flags->w0.aes_keylen = aes_keylen; 116 flags->w0.iv_source = IV_FROM_DPTR; 117 flags->f = cpu_to_be64(*(u64 *)&flags->w0); 118 /* copy the key to context */ 119 memcpy(fctx->crypto.u.key, key, keylen); 120 121 return 0; 122 } 123 124 static int nitrox_aes_setkey(struct crypto_skcipher *cipher, const u8 *key, 125 unsigned int keylen) 126 { 127 int aes_keylen; 128 129 aes_keylen = flexi_aes_keylen(keylen); 130 if (aes_keylen < 0) { 131 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 132 return -EINVAL; 133 } 134 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 135 } 136 137 static int alloc_src_sglist(struct skcipher_request *skreq, int ivsize) 138 { 139 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 140 int nents = sg_nents(skreq->src) + 1; 141 int ret; 142 143 /* Allocate buffer to hold IV and input scatterlist array */ 144 ret = alloc_src_req_buf(nkreq, nents, ivsize); 145 if (ret) 146 return ret; 147 148 nitrox_creq_copy_iv(nkreq->src, skreq->iv, ivsize); 149 nitrox_creq_set_src_sg(nkreq, nents, ivsize, skreq->src, 150 skreq->cryptlen); 151 152 return 0; 153 } 154 155 static int alloc_dst_sglist(struct skcipher_request *skreq, int ivsize) 156 { 157 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 158 int nents = sg_nents(skreq->dst) + 3; 159 int ret; 160 161 /* Allocate buffer to hold ORH, COMPLETION and output scatterlist 162 * array 163 */ 164 ret = alloc_dst_req_buf(nkreq, nents); 165 if (ret) 166 return ret; 167 168 nitrox_creq_set_orh(nkreq); 169 nitrox_creq_set_comp(nkreq); 170 nitrox_creq_set_dst_sg(nkreq, nents, ivsize, skreq->dst, 171 skreq->cryptlen); 172 173 return 0; 174 } 175 176 static void free_src_sglist(struct skcipher_request *skreq) 177 { 178 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 179 180 kfree(nkreq->src); 181 } 182 183 static void free_dst_sglist(struct skcipher_request *skreq) 184 { 185 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 186 187 kfree(nkreq->dst); 188 } 189 190 static void nitrox_skcipher_callback(void *arg, int err) 191 { 192 struct skcipher_request *skreq = arg; 193 194 free_src_sglist(skreq); 195 free_dst_sglist(skreq); 196 if (err) { 197 pr_err_ratelimited("request failed status 0x%0x\n", err); 198 err = -EINVAL; 199 } 200 201 skcipher_request_complete(skreq, err); 202 } 203 204 static int nitrox_skcipher_crypt(struct skcipher_request *skreq, bool enc) 205 { 206 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq); 207 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(cipher); 208 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 209 int ivsize = crypto_skcipher_ivsize(cipher); 210 struct se_crypto_request *creq; 211 int ret; 212 213 creq = &nkreq->creq; 214 creq->flags = skreq->base.flags; 215 creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 216 GFP_KERNEL : GFP_ATOMIC; 217 218 /* fill the request */ 219 creq->ctrl.value = 0; 220 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; 221 creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT); 222 /* param0: length of the data to be encrypted */ 223 creq->gph.param0 = cpu_to_be16(skreq->cryptlen); 224 creq->gph.param1 = 0; 225 /* param2: encryption data offset */ 226 creq->gph.param2 = cpu_to_be16(ivsize); 227 creq->gph.param3 = 0; 228 229 creq->ctx_handle = nctx->u.ctx_handle; 230 creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context); 231 232 ret = alloc_src_sglist(skreq, ivsize); 233 if (ret) 234 return ret; 235 236 ret = alloc_dst_sglist(skreq, ivsize); 237 if (ret) { 238 free_src_sglist(skreq); 239 return ret; 240 } 241 242 /* send the crypto request */ 243 return nitrox_process_se_request(nctx->ndev, creq, 244 nitrox_skcipher_callback, skreq); 245 } 246 247 static int nitrox_aes_encrypt(struct skcipher_request *skreq) 248 { 249 return nitrox_skcipher_crypt(skreq, true); 250 } 251 252 static int nitrox_aes_decrypt(struct skcipher_request *skreq) 253 { 254 return nitrox_skcipher_crypt(skreq, false); 255 } 256 257 static int nitrox_3des_setkey(struct crypto_skcipher *cipher, 258 const u8 *key, unsigned int keylen) 259 { 260 if (keylen != DES3_EDE_KEY_SIZE) { 261 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 262 return -EINVAL; 263 } 264 265 return nitrox_skcipher_setkey(cipher, 0, key, keylen); 266 } 267 268 static int nitrox_3des_encrypt(struct skcipher_request *skreq) 269 { 270 return nitrox_skcipher_crypt(skreq, true); 271 } 272 273 static int nitrox_3des_decrypt(struct skcipher_request *skreq) 274 { 275 return nitrox_skcipher_crypt(skreq, false); 276 } 277 278 static int nitrox_aes_xts_setkey(struct crypto_skcipher *cipher, 279 const u8 *key, unsigned int keylen) 280 { 281 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 282 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 283 struct flexi_crypto_context *fctx; 284 int aes_keylen, ret; 285 286 ret = xts_check_key(tfm, key, keylen); 287 if (ret) 288 return ret; 289 290 keylen /= 2; 291 292 aes_keylen = flexi_aes_keylen(keylen); 293 if (aes_keylen < 0) { 294 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 295 return -EINVAL; 296 } 297 298 fctx = nctx->u.fctx; 299 /* copy KEY2 */ 300 memcpy(fctx->auth.u.key2, (key + keylen), keylen); 301 302 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 303 } 304 305 static int nitrox_aes_ctr_rfc3686_setkey(struct crypto_skcipher *cipher, 306 const u8 *key, unsigned int keylen) 307 { 308 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 309 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 310 struct flexi_crypto_context *fctx; 311 int aes_keylen; 312 313 if (keylen < CTR_RFC3686_NONCE_SIZE) 314 return -EINVAL; 315 316 fctx = nctx->u.fctx; 317 318 memcpy(fctx->crypto.iv, key + (keylen - CTR_RFC3686_NONCE_SIZE), 319 CTR_RFC3686_NONCE_SIZE); 320 321 keylen -= CTR_RFC3686_NONCE_SIZE; 322 323 aes_keylen = flexi_aes_keylen(keylen); 324 if (aes_keylen < 0) { 325 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 326 return -EINVAL; 327 } 328 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 329 } 330 331 static struct skcipher_alg nitrox_skciphers[] = { { 332 .base = { 333 .cra_name = "cbc(aes)", 334 .cra_driver_name = "n5_cbc(aes)", 335 .cra_priority = PRIO, 336 .cra_flags = CRYPTO_ALG_ASYNC, 337 .cra_blocksize = AES_BLOCK_SIZE, 338 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 339 .cra_alignmask = 0, 340 .cra_module = THIS_MODULE, 341 }, 342 .min_keysize = AES_MIN_KEY_SIZE, 343 .max_keysize = AES_MAX_KEY_SIZE, 344 .ivsize = AES_BLOCK_SIZE, 345 .setkey = nitrox_aes_setkey, 346 .encrypt = nitrox_aes_encrypt, 347 .decrypt = nitrox_aes_decrypt, 348 .init = nitrox_skcipher_init, 349 .exit = nitrox_skcipher_exit, 350 }, { 351 .base = { 352 .cra_name = "ecb(aes)", 353 .cra_driver_name = "n5_ecb(aes)", 354 .cra_priority = PRIO, 355 .cra_flags = CRYPTO_ALG_ASYNC, 356 .cra_blocksize = AES_BLOCK_SIZE, 357 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 358 .cra_alignmask = 0, 359 .cra_module = THIS_MODULE, 360 }, 361 .min_keysize = AES_MIN_KEY_SIZE, 362 .max_keysize = AES_MAX_KEY_SIZE, 363 .ivsize = AES_BLOCK_SIZE, 364 .setkey = nitrox_aes_setkey, 365 .encrypt = nitrox_aes_encrypt, 366 .decrypt = nitrox_aes_decrypt, 367 .init = nitrox_skcipher_init, 368 .exit = nitrox_skcipher_exit, 369 }, { 370 .base = { 371 .cra_name = "cfb(aes)", 372 .cra_driver_name = "n5_cfb(aes)", 373 .cra_priority = PRIO, 374 .cra_flags = CRYPTO_ALG_ASYNC, 375 .cra_blocksize = AES_BLOCK_SIZE, 376 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 377 .cra_alignmask = 0, 378 .cra_module = THIS_MODULE, 379 }, 380 .min_keysize = AES_MIN_KEY_SIZE, 381 .max_keysize = AES_MAX_KEY_SIZE, 382 .ivsize = AES_BLOCK_SIZE, 383 .setkey = nitrox_aes_setkey, 384 .encrypt = nitrox_aes_encrypt, 385 .decrypt = nitrox_aes_decrypt, 386 .init = nitrox_skcipher_init, 387 .exit = nitrox_skcipher_exit, 388 }, { 389 .base = { 390 .cra_name = "xts(aes)", 391 .cra_driver_name = "n5_xts(aes)", 392 .cra_priority = PRIO, 393 .cra_flags = CRYPTO_ALG_ASYNC, 394 .cra_blocksize = AES_BLOCK_SIZE, 395 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 396 .cra_alignmask = 0, 397 .cra_module = THIS_MODULE, 398 }, 399 .min_keysize = 2 * AES_MIN_KEY_SIZE, 400 .max_keysize = 2 * AES_MAX_KEY_SIZE, 401 .ivsize = AES_BLOCK_SIZE, 402 .setkey = nitrox_aes_xts_setkey, 403 .encrypt = nitrox_aes_encrypt, 404 .decrypt = nitrox_aes_decrypt, 405 .init = nitrox_skcipher_init, 406 .exit = nitrox_skcipher_exit, 407 }, { 408 .base = { 409 .cra_name = "rfc3686(ctr(aes))", 410 .cra_driver_name = "n5_rfc3686(ctr(aes))", 411 .cra_priority = PRIO, 412 .cra_flags = CRYPTO_ALG_ASYNC, 413 .cra_blocksize = 1, 414 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 415 .cra_alignmask = 0, 416 .cra_module = THIS_MODULE, 417 }, 418 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 419 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 420 .ivsize = CTR_RFC3686_IV_SIZE, 421 .init = nitrox_skcipher_init, 422 .exit = nitrox_skcipher_exit, 423 .setkey = nitrox_aes_ctr_rfc3686_setkey, 424 .encrypt = nitrox_aes_encrypt, 425 .decrypt = nitrox_aes_decrypt, 426 }, { 427 .base = { 428 .cra_name = "cts(cbc(aes))", 429 .cra_driver_name = "n5_cts(cbc(aes))", 430 .cra_priority = PRIO, 431 .cra_flags = CRYPTO_ALG_ASYNC, 432 .cra_blocksize = AES_BLOCK_SIZE, 433 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 434 .cra_alignmask = 0, 435 .cra_type = &crypto_ablkcipher_type, 436 .cra_module = THIS_MODULE, 437 }, 438 .min_keysize = AES_MIN_KEY_SIZE, 439 .max_keysize = AES_MAX_KEY_SIZE, 440 .ivsize = AES_BLOCK_SIZE, 441 .setkey = nitrox_aes_setkey, 442 .encrypt = nitrox_aes_encrypt, 443 .decrypt = nitrox_aes_decrypt, 444 .init = nitrox_skcipher_init, 445 .exit = nitrox_skcipher_exit, 446 }, { 447 .base = { 448 .cra_name = "cbc(des3_ede)", 449 .cra_driver_name = "n5_cbc(des3_ede)", 450 .cra_priority = PRIO, 451 .cra_flags = CRYPTO_ALG_ASYNC, 452 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 453 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 454 .cra_alignmask = 0, 455 .cra_module = THIS_MODULE, 456 }, 457 .min_keysize = DES3_EDE_KEY_SIZE, 458 .max_keysize = DES3_EDE_KEY_SIZE, 459 .ivsize = DES3_EDE_BLOCK_SIZE, 460 .setkey = nitrox_3des_setkey, 461 .encrypt = nitrox_3des_encrypt, 462 .decrypt = nitrox_3des_decrypt, 463 .init = nitrox_skcipher_init, 464 .exit = nitrox_skcipher_exit, 465 }, { 466 .base = { 467 .cra_name = "ecb(des3_ede)", 468 .cra_driver_name = "n5_ecb(des3_ede)", 469 .cra_priority = PRIO, 470 .cra_flags = CRYPTO_ALG_ASYNC, 471 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 472 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 473 .cra_alignmask = 0, 474 .cra_module = THIS_MODULE, 475 }, 476 .min_keysize = DES3_EDE_KEY_SIZE, 477 .max_keysize = DES3_EDE_KEY_SIZE, 478 .ivsize = DES3_EDE_BLOCK_SIZE, 479 .setkey = nitrox_3des_setkey, 480 .encrypt = nitrox_3des_encrypt, 481 .decrypt = nitrox_3des_decrypt, 482 .init = nitrox_skcipher_init, 483 .exit = nitrox_skcipher_exit, 484 } 485 486 }; 487 488 int nitrox_register_skciphers(void) 489 { 490 return crypto_register_skciphers(nitrox_skciphers, 491 ARRAY_SIZE(nitrox_skciphers)); 492 } 493 494 void nitrox_unregister_skciphers(void) 495 { 496 crypto_unregister_skciphers(nitrox_skciphers, 497 ARRAY_SIZE(nitrox_skciphers)); 498 } 499