1 /*- 2 * Copyright (c) 2005-2008 Pawel Jakub Dawidek <pjd@FreeBSD.org> 3 * Copyright (c) 2010 Konstantin Belousov <kib@FreeBSD.org> 4 * Copyright (c) 2014,2016 The FreeBSD Foundation 5 * All rights reserved. 6 * 7 * Portions of this software were developed by John-Mark Gurney 8 * under sponsorship of the FreeBSD Foundation and 9 * Rubicon Communications, LLC (Netgate). 10 * 11 * This software was developed by Andrew Turner under 12 * sponsorship from the FreeBSD Foundation. 13 * 14 * Redistribution and use in source and binary forms, with or without 15 * modification, are permitted provided that the following conditions 16 * are met: 17 * 1. Redistributions of source code must retain the above copyright 18 * notice, this list of conditions and the following disclaimer. 19 * 2. Redistributions in binary form must reproduce the above copyright 20 * notice, this list of conditions and the following disclaimer in the 21 * documentation and/or other materials provided with the distribution. 22 * 23 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND 24 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 25 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 26 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE 27 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 28 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 29 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 30 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 31 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 32 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 33 * SUCH DAMAGE. 34 */ 35 36 /* 37 * This is based on the aesni code. 38 */ 39 40 #include <sys/cdefs.h> 41 __FBSDID("$FreeBSD$"); 42 43 #include <sys/param.h> 44 #include <sys/systm.h> 45 #include <sys/kernel.h> 46 #include <sys/bus.h> 47 #include <sys/endian.h> 48 #include <sys/lock.h> 49 #include <sys/malloc.h> 50 #include <sys/mbuf.h> 51 #include <sys/module.h> 52 #include <sys/mutex.h> 53 #include <sys/queue.h> 54 #include <sys/rwlock.h> 55 #include <sys/smp.h> 56 #include <sys/uio.h> 57 58 #include <machine/vfp.h> 59 60 #include <opencrypto/cryptodev.h> 61 #include <cryptodev_if.h> 62 #include <crypto/armv8/armv8_crypto.h> 63 #include <crypto/rijndael/rijndael.h> 64 65 struct armv8_crypto_softc { 66 int dieing; 67 int32_t cid; 68 struct rwlock lock; 69 }; 70 71 static struct mtx *ctx_mtx; 72 static struct fpu_kern_ctx **ctx_vfp; 73 74 #define AQUIRE_CTX(i, ctx) \ 75 do { \ 76 (i) = PCPU_GET(cpuid); \ 77 mtx_lock(&ctx_mtx[(i)]); \ 78 (ctx) = ctx_vfp[(i)]; \ 79 } while (0) 80 #define RELEASE_CTX(i, ctx) \ 81 do { \ 82 mtx_unlock(&ctx_mtx[(i)]); \ 83 (i) = -1; \ 84 (ctx) = NULL; \ 85 } while (0) 86 87 static int armv8_crypto_cipher_process(struct armv8_crypto_session *, 88 struct cryptop *); 89 90 MALLOC_DEFINE(M_ARMV8_CRYPTO, "armv8_crypto", "ARMv8 Crypto Data"); 91 92 static void 93 armv8_crypto_identify(driver_t *drv, device_t parent) 94 { 95 96 /* NB: order 10 is so we get attached after h/w devices */ 97 if (device_find_child(parent, "armv8crypto", -1) == NULL && 98 BUS_ADD_CHILD(parent, 10, "armv8crypto", -1) == 0) 99 panic("ARMv8 crypto: could not attach"); 100 } 101 102 static int 103 armv8_crypto_probe(device_t dev) 104 { 105 uint64_t reg; 106 int ret = ENXIO; 107 108 reg = READ_SPECIALREG(id_aa64isar0_el1); 109 110 switch (ID_AA64ISAR0_AES_VAL(reg)) { 111 case ID_AA64ISAR0_AES_BASE: 112 case ID_AA64ISAR0_AES_PMULL: 113 ret = 0; 114 break; 115 } 116 117 device_set_desc_copy(dev, "AES-CBC,AES-XTS"); 118 119 /* TODO: Check more fields as we support more features */ 120 121 return (ret); 122 } 123 124 static int 125 armv8_crypto_attach(device_t dev) 126 { 127 struct armv8_crypto_softc *sc; 128 int i; 129 130 sc = device_get_softc(dev); 131 sc->dieing = 0; 132 133 sc->cid = crypto_get_driverid(dev, sizeof(struct armv8_crypto_session), 134 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC | CRYPTOCAP_F_ACCEL_SOFTWARE); 135 if (sc->cid < 0) { 136 device_printf(dev, "Could not get crypto driver id.\n"); 137 return (ENOMEM); 138 } 139 140 rw_init(&sc->lock, "armv8crypto"); 141 142 ctx_mtx = malloc(sizeof(*ctx_mtx) * (mp_maxid + 1), M_ARMV8_CRYPTO, 143 M_WAITOK|M_ZERO); 144 ctx_vfp = malloc(sizeof(*ctx_vfp) * (mp_maxid + 1), M_ARMV8_CRYPTO, 145 M_WAITOK|M_ZERO); 146 147 CPU_FOREACH(i) { 148 ctx_vfp[i] = fpu_kern_alloc_ctx(0); 149 mtx_init(&ctx_mtx[i], "armv8cryptoctx", NULL, MTX_DEF|MTX_NEW); 150 } 151 152 return (0); 153 } 154 155 static int 156 armv8_crypto_detach(device_t dev) 157 { 158 struct armv8_crypto_softc *sc; 159 int i; 160 161 sc = device_get_softc(dev); 162 163 rw_wlock(&sc->lock); 164 sc->dieing = 1; 165 rw_wunlock(&sc->lock); 166 crypto_unregister_all(sc->cid); 167 168 rw_destroy(&sc->lock); 169 170 CPU_FOREACH(i) { 171 if (ctx_vfp[i] != NULL) { 172 mtx_destroy(&ctx_mtx[i]); 173 fpu_kern_free_ctx(ctx_vfp[i]); 174 } 175 ctx_vfp[i] = NULL; 176 } 177 free(ctx_mtx, M_ARMV8_CRYPTO); 178 ctx_mtx = NULL; 179 free(ctx_vfp, M_ARMV8_CRYPTO); 180 ctx_vfp = NULL; 181 182 return (0); 183 } 184 185 static int 186 armv8_crypto_probesession(device_t dev, 187 const struct crypto_session_params *csp) 188 { 189 190 if (csp->csp_flags != 0) 191 return (EINVAL); 192 switch (csp->csp_mode) { 193 case CSP_MODE_CIPHER: 194 switch (csp->csp_cipher_alg) { 195 case CRYPTO_AES_CBC: 196 if (csp->csp_ivlen != AES_BLOCK_LEN) 197 return (EINVAL); 198 switch (csp->csp_cipher_klen * 8) { 199 case 128: 200 case 192: 201 case 256: 202 break; 203 default: 204 return (EINVAL); 205 } 206 break; 207 case CRYPTO_AES_XTS: 208 if (csp->csp_ivlen != AES_XTS_IV_LEN) 209 return (EINVAL); 210 switch (csp->csp_cipher_klen * 8) { 211 case 256: 212 case 512: 213 break; 214 default: 215 return (EINVAL); 216 } 217 break; 218 default: 219 return (EINVAL); 220 } 221 break; 222 default: 223 return (EINVAL); 224 } 225 return (CRYPTODEV_PROBE_ACCEL_SOFTWARE); 226 } 227 228 static void 229 armv8_crypto_cipher_setup(struct armv8_crypto_session *ses, 230 const struct crypto_session_params *csp, const uint8_t *key, int keylen) 231 { 232 int i; 233 234 if (csp->csp_cipher_alg == CRYPTO_AES_XTS) 235 keylen /= 2; 236 237 switch (keylen * 8) { 238 case 128: 239 ses->rounds = AES128_ROUNDS; 240 break; 241 case 192: 242 ses->rounds = AES192_ROUNDS; 243 break; 244 case 256: 245 ses->rounds = AES256_ROUNDS; 246 break; 247 default: 248 panic("invalid AES key length"); 249 } 250 251 rijndaelKeySetupEnc(ses->enc_schedule, key, keylen * 8); 252 rijndaelKeySetupDec(ses->dec_schedule, key, keylen * 8); 253 if (csp->csp_cipher_alg == CRYPTO_AES_XTS) 254 rijndaelKeySetupEnc(ses->xts_schedule, key + keylen, keylen * 8); 255 256 for (i = 0; i < nitems(ses->enc_schedule); i++) { 257 ses->enc_schedule[i] = bswap32(ses->enc_schedule[i]); 258 ses->dec_schedule[i] = bswap32(ses->dec_schedule[i]); 259 if (csp->csp_cipher_alg == CRYPTO_AES_XTS) 260 ses->xts_schedule[i] = bswap32(ses->xts_schedule[i]); 261 } 262 } 263 264 static int 265 armv8_crypto_newsession(device_t dev, crypto_session_t cses, 266 const struct crypto_session_params *csp) 267 { 268 struct armv8_crypto_softc *sc; 269 struct armv8_crypto_session *ses; 270 271 sc = device_get_softc(dev); 272 rw_wlock(&sc->lock); 273 if (sc->dieing) { 274 rw_wunlock(&sc->lock); 275 return (EINVAL); 276 } 277 278 ses = crypto_get_driver_session(cses); 279 armv8_crypto_cipher_setup(ses, csp, csp->csp_cipher_key, 280 csp->csp_cipher_klen); 281 rw_wunlock(&sc->lock); 282 return (0); 283 } 284 285 static int 286 armv8_crypto_process(device_t dev, struct cryptop *crp, int hint __unused) 287 { 288 struct armv8_crypto_session *ses; 289 int error; 290 291 /* We can only handle full blocks for now */ 292 if ((crp->crp_payload_length % AES_BLOCK_LEN) != 0) { 293 error = EINVAL; 294 goto out; 295 } 296 297 ses = crypto_get_driver_session(crp->crp_session); 298 error = armv8_crypto_cipher_process(ses, crp); 299 300 out: 301 crp->crp_etype = error; 302 crypto_done(crp); 303 return (0); 304 } 305 306 static uint8_t * 307 armv8_crypto_cipher_alloc(struct cryptop *crp, int *allocated) 308 { 309 uint8_t *addr; 310 311 addr = crypto_contiguous_subsegment(crp, crp->crp_payload_start, 312 crp->crp_payload_length); 313 if (addr != NULL) { 314 *allocated = 0; 315 return (addr); 316 } 317 addr = malloc(crp->crp_payload_length, M_ARMV8_CRYPTO, M_NOWAIT); 318 if (addr != NULL) { 319 *allocated = 1; 320 crypto_copydata(crp, crp->crp_payload_start, 321 crp->crp_payload_length, addr); 322 } else 323 *allocated = 0; 324 return (addr); 325 } 326 327 static int 328 armv8_crypto_cipher_process(struct armv8_crypto_session *ses, 329 struct cryptop *crp) 330 { 331 const struct crypto_session_params *csp; 332 struct fpu_kern_ctx *ctx; 333 uint8_t *buf; 334 uint8_t iv[AES_BLOCK_LEN]; 335 int allocated, i; 336 int encflag; 337 int kt; 338 339 csp = crypto_get_params(crp->crp_session); 340 encflag = CRYPTO_OP_IS_ENCRYPT(crp->crp_op); 341 342 buf = armv8_crypto_cipher_alloc(crp, &allocated); 343 if (buf == NULL) 344 return (ENOMEM); 345 346 kt = is_fpu_kern_thread(0); 347 if (!kt) { 348 AQUIRE_CTX(i, ctx); 349 fpu_kern_enter(curthread, ctx, 350 FPU_KERN_NORMAL | FPU_KERN_KTHR); 351 } 352 353 if (crp->crp_cipher_key != NULL) { 354 armv8_crypto_cipher_setup(ses, csp, crp->crp_cipher_key, 355 csp->csp_cipher_klen); 356 } 357 358 crypto_read_iv(crp, iv); 359 360 /* Do work */ 361 switch (csp->csp_cipher_alg) { 362 case CRYPTO_AES_CBC: 363 if (encflag) 364 armv8_aes_encrypt_cbc(ses->rounds, ses->enc_schedule, 365 crp->crp_payload_length, buf, buf, iv); 366 else 367 armv8_aes_decrypt_cbc(ses->rounds, ses->dec_schedule, 368 crp->crp_payload_length, buf, iv); 369 break; 370 case CRYPTO_AES_XTS: 371 if (encflag) 372 armv8_aes_encrypt_xts(ses->rounds, ses->enc_schedule, 373 ses->xts_schedule, crp->crp_payload_length, buf, 374 buf, iv); 375 else 376 armv8_aes_decrypt_xts(ses->rounds, ses->dec_schedule, 377 ses->xts_schedule, crp->crp_payload_length, buf, 378 buf, iv); 379 break; 380 } 381 382 if (allocated) 383 crypto_copyback(crp, crp->crp_payload_start, 384 crp->crp_payload_length, buf); 385 386 if (!kt) { 387 fpu_kern_leave(curthread, ctx); 388 RELEASE_CTX(i, ctx); 389 } 390 if (allocated) 391 zfree(buf, M_ARMV8_CRYPTO); 392 return (0); 393 } 394 395 static device_method_t armv8_crypto_methods[] = { 396 DEVMETHOD(device_identify, armv8_crypto_identify), 397 DEVMETHOD(device_probe, armv8_crypto_probe), 398 DEVMETHOD(device_attach, armv8_crypto_attach), 399 DEVMETHOD(device_detach, armv8_crypto_detach), 400 401 DEVMETHOD(cryptodev_probesession, armv8_crypto_probesession), 402 DEVMETHOD(cryptodev_newsession, armv8_crypto_newsession), 403 DEVMETHOD(cryptodev_process, armv8_crypto_process), 404 405 DEVMETHOD_END, 406 }; 407 408 static DEFINE_CLASS_0(armv8crypto, armv8_crypto_driver, armv8_crypto_methods, 409 sizeof(struct armv8_crypto_softc)); 410 static devclass_t armv8_crypto_devclass; 411 412 DRIVER_MODULE(armv8crypto, nexus, armv8_crypto_driver, armv8_crypto_devclass, 413 0, 0); 414