1 /*- 2 * Copyright (c) 2005-2008 Pawel Jakub Dawidek <pjd@FreeBSD.org> 3 * Copyright (c) 2010 Konstantin Belousov <kib@FreeBSD.org> 4 * Copyright (c) 2014,2016 The FreeBSD Foundation 5 * Copyright (c) 2020 Ampere Computing 6 * All rights reserved. 7 * 8 * Portions of this software were developed by John-Mark Gurney 9 * under sponsorship of the FreeBSD Foundation and 10 * Rubicon Communications, LLC (Netgate). 11 * 12 * This software was developed by Andrew Turner under 13 * sponsorship from the FreeBSD Foundation. 14 * 15 * Redistribution and use in source and binary forms, with or without 16 * modification, are permitted provided that the following conditions 17 * are met: 18 * 1. Redistributions of source code must retain the above copyright 19 * notice, this list of conditions and the following disclaimer. 20 * 2. Redistributions in binary form must reproduce the above copyright 21 * notice, this list of conditions and the following disclaimer in the 22 * documentation and/or other materials provided with the distribution. 23 * 24 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND 25 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 26 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 27 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE 28 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 29 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 30 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 31 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 32 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 33 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 34 * SUCH DAMAGE. 35 */ 36 37 /* 38 * This is based on the aesni code. 39 */ 40 41 #include <sys/cdefs.h> 42 __FBSDID("$FreeBSD$"); 43 44 #include <sys/param.h> 45 #include <sys/systm.h> 46 #include <sys/kernel.h> 47 #include <sys/bus.h> 48 #include <sys/endian.h> 49 #include <sys/lock.h> 50 #include <sys/malloc.h> 51 #include <sys/mbuf.h> 52 #include <sys/module.h> 53 #include <sys/mutex.h> 54 #include <sys/queue.h> 55 #include <sys/rwlock.h> 56 #include <sys/smp.h> 57 #include <sys/uio.h> 58 59 #include <machine/vfp.h> 60 61 #include <opencrypto/cryptodev.h> 62 #include <opencrypto/gmac.h> 63 #include <cryptodev_if.h> 64 #include <crypto/armv8/armv8_crypto.h> 65 #include <crypto/rijndael/rijndael.h> 66 67 struct armv8_crypto_softc { 68 int dieing; 69 int32_t cid; 70 struct rwlock lock; 71 bool has_pmul; 72 }; 73 74 static struct mtx *ctx_mtx; 75 static struct fpu_kern_ctx **ctx_vfp; 76 77 #define AQUIRE_CTX(i, ctx) \ 78 do { \ 79 (i) = PCPU_GET(cpuid); \ 80 mtx_lock(&ctx_mtx[(i)]); \ 81 (ctx) = ctx_vfp[(i)]; \ 82 } while (0) 83 #define RELEASE_CTX(i, ctx) \ 84 do { \ 85 mtx_unlock(&ctx_mtx[(i)]); \ 86 (i) = -1; \ 87 (ctx) = NULL; \ 88 } while (0) 89 90 static int armv8_crypto_cipher_process(struct armv8_crypto_session *, 91 struct cryptop *); 92 93 MALLOC_DEFINE(M_ARMV8_CRYPTO, "armv8_crypto", "ARMv8 Crypto Data"); 94 95 static void 96 armv8_crypto_identify(driver_t *drv, device_t parent) 97 { 98 99 /* NB: order 10 is so we get attached after h/w devices */ 100 if (device_find_child(parent, "armv8crypto", -1) == NULL && 101 BUS_ADD_CHILD(parent, 10, "armv8crypto", -1) == 0) 102 panic("ARMv8 crypto: could not attach"); 103 } 104 105 static int 106 armv8_crypto_probe(device_t dev) 107 { 108 uint64_t reg; 109 int ret = ENXIO; 110 111 reg = READ_SPECIALREG(id_aa64isar0_el1); 112 113 switch (ID_AA64ISAR0_AES_VAL(reg)) { 114 case ID_AA64ISAR0_AES_BASE: 115 ret = 0; 116 device_set_desc(dev, "AES-CBC,AES-XTS"); 117 break; 118 case ID_AA64ISAR0_AES_PMULL: 119 ret = 0; 120 device_set_desc(dev, "AES-CBC,AES-XTS,AES-GCM"); 121 break; 122 default: 123 break; 124 case ID_AA64ISAR0_AES_NONE: 125 device_printf(dev, "CPU lacks AES instructions"); 126 break; 127 } 128 129 /* TODO: Check more fields as we support more features */ 130 131 return (ret); 132 } 133 134 static int 135 armv8_crypto_attach(device_t dev) 136 { 137 struct armv8_crypto_softc *sc; 138 uint64_t reg; 139 int i; 140 141 sc = device_get_softc(dev); 142 sc->dieing = 0; 143 144 reg = READ_SPECIALREG(id_aa64isar0_el1); 145 146 if (ID_AA64ISAR0_AES_VAL(reg) == ID_AA64ISAR0_AES_PMULL) 147 sc->has_pmul = true; 148 149 sc->cid = crypto_get_driverid(dev, sizeof(struct armv8_crypto_session), 150 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC | CRYPTOCAP_F_ACCEL_SOFTWARE); 151 if (sc->cid < 0) { 152 device_printf(dev, "Could not get crypto driver id.\n"); 153 return (ENOMEM); 154 } 155 156 rw_init(&sc->lock, "armv8crypto"); 157 158 ctx_mtx = malloc(sizeof(*ctx_mtx) * (mp_maxid + 1), M_ARMV8_CRYPTO, 159 M_WAITOK|M_ZERO); 160 ctx_vfp = malloc(sizeof(*ctx_vfp) * (mp_maxid + 1), M_ARMV8_CRYPTO, 161 M_WAITOK|M_ZERO); 162 163 CPU_FOREACH(i) { 164 ctx_vfp[i] = fpu_kern_alloc_ctx(0); 165 mtx_init(&ctx_mtx[i], "armv8cryptoctx", NULL, MTX_DEF|MTX_NEW); 166 } 167 168 return (0); 169 } 170 171 static int 172 armv8_crypto_detach(device_t dev) 173 { 174 struct armv8_crypto_softc *sc; 175 int i; 176 177 sc = device_get_softc(dev); 178 179 rw_wlock(&sc->lock); 180 sc->dieing = 1; 181 rw_wunlock(&sc->lock); 182 crypto_unregister_all(sc->cid); 183 184 rw_destroy(&sc->lock); 185 186 CPU_FOREACH(i) { 187 if (ctx_vfp[i] != NULL) { 188 mtx_destroy(&ctx_mtx[i]); 189 fpu_kern_free_ctx(ctx_vfp[i]); 190 } 191 ctx_vfp[i] = NULL; 192 } 193 free(ctx_mtx, M_ARMV8_CRYPTO); 194 ctx_mtx = NULL; 195 free(ctx_vfp, M_ARMV8_CRYPTO); 196 ctx_vfp = NULL; 197 198 return (0); 199 } 200 201 #define SUPPORTED_SES (CSP_F_SEPARATE_OUTPUT | CSP_F_SEPARATE_AAD) 202 203 static int 204 armv8_crypto_probesession(device_t dev, 205 const struct crypto_session_params *csp) 206 { 207 struct armv8_crypto_softc *sc; 208 209 sc = device_get_softc(dev); 210 211 if ((csp->csp_flags & ~(SUPPORTED_SES)) != 0) 212 return (EINVAL); 213 214 switch (csp->csp_mode) { 215 case CSP_MODE_AEAD: 216 switch (csp->csp_cipher_alg) { 217 case CRYPTO_AES_NIST_GCM_16: 218 if (!sc->has_pmul) 219 return (EINVAL); 220 if (csp->csp_ivlen != AES_GCM_IV_LEN) 221 return (EINVAL); 222 if (csp->csp_auth_mlen != 0 && 223 csp->csp_auth_mlen != GMAC_DIGEST_LEN) 224 return (EINVAL); 225 switch (csp->csp_cipher_klen * 8) { 226 case 128: 227 case 192: 228 case 256: 229 break; 230 default: 231 return (EINVAL); 232 } 233 break; 234 default: 235 return (EINVAL); 236 } 237 break; 238 case CSP_MODE_CIPHER: 239 switch (csp->csp_cipher_alg) { 240 case CRYPTO_AES_CBC: 241 if (csp->csp_ivlen != AES_BLOCK_LEN) 242 return (EINVAL); 243 switch (csp->csp_cipher_klen * 8) { 244 case 128: 245 case 192: 246 case 256: 247 break; 248 default: 249 return (EINVAL); 250 } 251 break; 252 case CRYPTO_AES_XTS: 253 if (csp->csp_ivlen != AES_XTS_IV_LEN) 254 return (EINVAL); 255 switch (csp->csp_cipher_klen * 8) { 256 case 256: 257 case 512: 258 break; 259 default: 260 return (EINVAL); 261 } 262 break; 263 default: 264 return (EINVAL); 265 } 266 break; 267 default: 268 return (EINVAL); 269 } 270 return (CRYPTODEV_PROBE_ACCEL_SOFTWARE); 271 } 272 273 static int 274 armv8_crypto_cipher_setup(struct armv8_crypto_session *ses, 275 const struct crypto_session_params *csp, const uint8_t *key, int keylen) 276 { 277 __uint128_val_t H; 278 struct fpu_kern_ctx *ctx; 279 int kt, i; 280 281 if (csp->csp_cipher_alg == CRYPTO_AES_XTS) 282 keylen /= 2; 283 284 switch (keylen * 8) { 285 case 128: 286 case 192: 287 case 256: 288 break; 289 default: 290 return (EINVAL); 291 } 292 293 kt = is_fpu_kern_thread(0); 294 if (!kt) { 295 AQUIRE_CTX(i, ctx); 296 fpu_kern_enter(curthread, ctx, 297 FPU_KERN_NORMAL | FPU_KERN_KTHR); 298 } 299 300 aes_v8_set_encrypt_key(key, 301 keylen * 8, &ses->enc_schedule); 302 303 if ((csp->csp_cipher_alg == CRYPTO_AES_XTS) || 304 (csp->csp_cipher_alg == CRYPTO_AES_CBC)) 305 aes_v8_set_decrypt_key(key, 306 keylen * 8, &ses->dec_schedule); 307 308 if (csp->csp_cipher_alg == CRYPTO_AES_NIST_GCM_16) { 309 memset(H.c, 0, sizeof(H.c)); 310 aes_v8_encrypt(H.c, H.c, &ses->enc_schedule); 311 H.u[0] = bswap64(H.u[0]); 312 H.u[1] = bswap64(H.u[1]); 313 gcm_init_v8(ses->Htable, H.u); 314 } 315 316 if (!kt) { 317 fpu_kern_leave(curthread, ctx); 318 RELEASE_CTX(i, ctx); 319 } 320 321 return (0); 322 } 323 324 static int 325 armv8_crypto_newsession(device_t dev, crypto_session_t cses, 326 const struct crypto_session_params *csp) 327 { 328 struct armv8_crypto_softc *sc; 329 struct armv8_crypto_session *ses; 330 int error; 331 332 sc = device_get_softc(dev); 333 rw_wlock(&sc->lock); 334 if (sc->dieing) { 335 rw_wunlock(&sc->lock); 336 return (EINVAL); 337 } 338 339 ses = crypto_get_driver_session(cses); 340 error = armv8_crypto_cipher_setup(ses, csp, csp->csp_cipher_key, 341 csp->csp_cipher_klen); 342 rw_wunlock(&sc->lock); 343 return (error); 344 } 345 346 static int 347 armv8_crypto_process(device_t dev, struct cryptop *crp, int hint __unused) 348 { 349 struct armv8_crypto_session *ses; 350 351 ses = crypto_get_driver_session(crp->crp_session); 352 crp->crp_etype = armv8_crypto_cipher_process(ses, crp); 353 crypto_done(crp); 354 return (0); 355 } 356 357 static uint8_t * 358 armv8_crypto_cipher_alloc(struct cryptop *crp, int start, int length, int *allocated) 359 { 360 uint8_t *addr; 361 362 addr = crypto_contiguous_subsegment(crp, start, length); 363 if (addr != NULL) { 364 *allocated = 0; 365 return (addr); 366 } 367 addr = malloc(crp->crp_payload_length, M_ARMV8_CRYPTO, M_NOWAIT); 368 if (addr != NULL) { 369 *allocated = 1; 370 crypto_copydata(crp, start, length, addr); 371 } else 372 *allocated = 0; 373 return (addr); 374 } 375 376 static int 377 armv8_crypto_cipher_process(struct armv8_crypto_session *ses, 378 struct cryptop *crp) 379 { 380 const struct crypto_session_params *csp; 381 struct fpu_kern_ctx *ctx; 382 uint8_t *buf, *authbuf, *outbuf; 383 uint8_t iv[AES_BLOCK_LEN], tag[GMAC_DIGEST_LEN]; 384 int allocated, authallocated, outallocated, i; 385 int encflag; 386 int kt; 387 int error; 388 bool outcopy; 389 390 csp = crypto_get_params(crp->crp_session); 391 encflag = CRYPTO_OP_IS_ENCRYPT(crp->crp_op); 392 393 allocated = 0; 394 outallocated = 0; 395 authallocated = 0; 396 authbuf = NULL; 397 kt = 1; 398 399 buf = armv8_crypto_cipher_alloc(crp, crp->crp_payload_start, 400 crp->crp_payload_length, &allocated); 401 if (buf == NULL) 402 return (ENOMEM); 403 404 if (csp->csp_cipher_alg == CRYPTO_AES_NIST_GCM_16) { 405 if (crp->crp_aad != NULL) 406 authbuf = crp->crp_aad; 407 else 408 authbuf = armv8_crypto_cipher_alloc(crp, crp->crp_aad_start, 409 crp->crp_aad_length, &authallocated); 410 if (authbuf == NULL) { 411 error = ENOMEM; 412 goto out; 413 } 414 } 415 416 if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) { 417 outbuf = crypto_buffer_contiguous_subsegment(&crp->crp_obuf, 418 crp->crp_payload_output_start, crp->crp_payload_length); 419 if (outbuf == NULL) { 420 outcopy = true; 421 if (allocated) 422 outbuf = buf; 423 else { 424 outbuf = malloc(crp->crp_payload_length, 425 M_ARMV8_CRYPTO, M_NOWAIT); 426 if (outbuf == NULL) { 427 error = ENOMEM; 428 goto out; 429 } 430 outallocated = true; 431 } 432 } else 433 outcopy = false; 434 } else { 435 outbuf = buf; 436 outcopy = allocated; 437 } 438 439 kt = is_fpu_kern_thread(0); 440 if (!kt) { 441 AQUIRE_CTX(i, ctx); 442 fpu_kern_enter(curthread, ctx, 443 FPU_KERN_NORMAL | FPU_KERN_KTHR); 444 } 445 446 if (crp->crp_cipher_key != NULL) { 447 armv8_crypto_cipher_setup(ses, csp, crp->crp_cipher_key, 448 csp->csp_cipher_klen); 449 } 450 451 crypto_read_iv(crp, iv); 452 453 /* Do work */ 454 switch (csp->csp_cipher_alg) { 455 case CRYPTO_AES_CBC: 456 if ((crp->crp_payload_length % AES_BLOCK_LEN) != 0) { 457 error = EINVAL; 458 goto out; 459 } 460 if (encflag) 461 armv8_aes_encrypt_cbc(&ses->enc_schedule, 462 crp->crp_payload_length, buf, buf, iv); 463 else 464 armv8_aes_decrypt_cbc(&ses->dec_schedule, 465 crp->crp_payload_length, buf, iv); 466 break; 467 case CRYPTO_AES_XTS: 468 if (encflag) 469 armv8_aes_encrypt_xts(&ses->enc_schedule, 470 &ses->xts_schedule.aes_key, crp->crp_payload_length, buf, 471 buf, iv); 472 else 473 armv8_aes_decrypt_xts(&ses->dec_schedule, 474 &ses->xts_schedule.aes_key, crp->crp_payload_length, buf, 475 buf, iv); 476 break; 477 case CRYPTO_AES_NIST_GCM_16: 478 if (encflag) { 479 memset(tag, 0, sizeof(tag)); 480 armv8_aes_encrypt_gcm(&ses->enc_schedule, 481 crp->crp_payload_length, 482 buf, outbuf, 483 crp->crp_aad_length, authbuf, 484 tag, iv, ses->Htable); 485 crypto_copyback(crp, crp->crp_digest_start, sizeof(tag), 486 tag); 487 } else { 488 crypto_copydata(crp, crp->crp_digest_start, sizeof(tag), 489 tag); 490 if (armv8_aes_decrypt_gcm(&ses->enc_schedule, 491 crp->crp_payload_length, 492 buf, outbuf, 493 crp->crp_aad_length, authbuf, 494 tag, iv, ses->Htable) != 0) { 495 error = EBADMSG; 496 goto out; 497 } 498 } 499 break; 500 } 501 502 if (outcopy) 503 crypto_copyback(crp, CRYPTO_HAS_OUTPUT_BUFFER(crp) ? 504 crp->crp_payload_output_start : crp->crp_payload_start, 505 crp->crp_payload_length, outbuf); 506 507 error = 0; 508 out: 509 if (!kt) { 510 fpu_kern_leave(curthread, ctx); 511 RELEASE_CTX(i, ctx); 512 } 513 514 if (allocated) 515 zfree(buf, M_ARMV8_CRYPTO); 516 if (authallocated) 517 zfree(authbuf, M_ARMV8_CRYPTO); 518 if (outallocated) 519 zfree(outbuf, M_ARMV8_CRYPTO); 520 explicit_bzero(iv, sizeof(iv)); 521 explicit_bzero(tag, sizeof(tag)); 522 523 return (error); 524 } 525 526 static device_method_t armv8_crypto_methods[] = { 527 DEVMETHOD(device_identify, armv8_crypto_identify), 528 DEVMETHOD(device_probe, armv8_crypto_probe), 529 DEVMETHOD(device_attach, armv8_crypto_attach), 530 DEVMETHOD(device_detach, armv8_crypto_detach), 531 532 DEVMETHOD(cryptodev_probesession, armv8_crypto_probesession), 533 DEVMETHOD(cryptodev_newsession, armv8_crypto_newsession), 534 DEVMETHOD(cryptodev_process, armv8_crypto_process), 535 536 DEVMETHOD_END, 537 }; 538 539 static DEFINE_CLASS_0(armv8crypto, armv8_crypto_driver, armv8_crypto_methods, 540 sizeof(struct armv8_crypto_softc)); 541 static devclass_t armv8_crypto_devclass; 542 543 DRIVER_MODULE(armv8crypto, nexus, armv8_crypto_driver, armv8_crypto_devclass, 544 0, 0); 545