1 // SPDX-License-Identifier: GPL-2.0-only 2 /* Glue code for AES encryption optimized for sparc64 crypto opcodes. 3 * 4 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c 5 * 6 * Copyright (C) 2008, Intel Corp. 7 * Author: Huang Ying <ying.huang@intel.com> 8 * 9 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD 10 * interface for 64-bit kernels. 11 * Authors: Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Aidan O'Mahony (aidan.o.mahony@intel.com) 15 * Copyright (c) 2010, Intel Corporation. 16 */ 17 18 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt 19 20 #include <linux/crypto.h> 21 #include <linux/init.h> 22 #include <linux/module.h> 23 #include <linux/mm.h> 24 #include <linux/types.h> 25 #include <crypto/algapi.h> 26 #include <crypto/aes.h> 27 #include <crypto/internal/skcipher.h> 28 29 #include <asm/fpumacro.h> 30 #include <asm/opcodes.h> 31 #include <asm/pstate.h> 32 #include <asm/elf.h> 33 34 struct aes_ops { 35 void (*load_encrypt_keys)(const u64 *key); 36 void (*load_decrypt_keys)(const u64 *key); 37 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output, 38 unsigned int len); 39 void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output, 40 unsigned int len); 41 void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output, 42 unsigned int len, u64 *iv); 43 void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output, 44 unsigned int len, u64 *iv); 45 void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output, 46 unsigned int len, u64 *iv); 47 }; 48 49 struct crypto_sparc64_aes_ctx { 50 struct aes_ops *ops; 51 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)]; 52 u32 key_length; 53 u32 expanded_key_length; 54 }; 55 56 static struct aes_ops aes128_ops = { 57 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128, 58 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128, 59 .ecb_encrypt = aes_sparc64_ecb_encrypt_128, 60 .ecb_decrypt = aes_sparc64_ecb_decrypt_128, 61 .cbc_encrypt = aes_sparc64_cbc_encrypt_128, 62 .cbc_decrypt = aes_sparc64_cbc_decrypt_128, 63 .ctr_crypt = aes_sparc64_ctr_crypt_128, 64 }; 65 66 static struct aes_ops aes192_ops = { 67 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192, 68 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192, 69 .ecb_encrypt = aes_sparc64_ecb_encrypt_192, 70 .ecb_decrypt = aes_sparc64_ecb_decrypt_192, 71 .cbc_encrypt = aes_sparc64_cbc_encrypt_192, 72 .cbc_decrypt = aes_sparc64_cbc_decrypt_192, 73 .ctr_crypt = aes_sparc64_ctr_crypt_192, 74 }; 75 76 static struct aes_ops aes256_ops = { 77 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256, 78 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256, 79 .ecb_encrypt = aes_sparc64_ecb_encrypt_256, 80 .ecb_decrypt = aes_sparc64_ecb_decrypt_256, 81 .cbc_encrypt = aes_sparc64_cbc_encrypt_256, 82 .cbc_decrypt = aes_sparc64_cbc_decrypt_256, 83 .ctr_crypt = aes_sparc64_ctr_crypt_256, 84 }; 85 86 static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key, 87 unsigned int key_len) 88 { 89 struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 90 91 switch (key_len) { 92 case AES_KEYSIZE_128: 93 ctx->expanded_key_length = 0xb0; 94 ctx->ops = &aes128_ops; 95 break; 96 97 case AES_KEYSIZE_192: 98 ctx->expanded_key_length = 0xd0; 99 ctx->ops = &aes192_ops; 100 break; 101 102 case AES_KEYSIZE_256: 103 ctx->expanded_key_length = 0xf0; 104 ctx->ops = &aes256_ops; 105 break; 106 107 default: 108 return -EINVAL; 109 } 110 111 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len); 112 ctx->key_length = key_len; 113 114 return 0; 115 } 116 117 static int ecb_encrypt(struct skcipher_request *req) 118 { 119 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 120 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 121 struct skcipher_walk walk; 122 unsigned int nbytes; 123 int err; 124 125 err = skcipher_walk_virt(&walk, req, true); 126 if (err) 127 return err; 128 129 ctx->ops->load_encrypt_keys(&ctx->key[0]); 130 while ((nbytes = walk.nbytes) != 0) { 131 ctx->ops->ecb_encrypt(&ctx->key[0], walk.src.virt.addr, 132 walk.dst.virt.addr, 133 round_down(nbytes, AES_BLOCK_SIZE)); 134 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); 135 } 136 fprs_write(0); 137 return err; 138 } 139 140 static int ecb_decrypt(struct skcipher_request *req) 141 { 142 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 143 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 144 const u64 *key_end; 145 struct skcipher_walk walk; 146 unsigned int nbytes; 147 int err; 148 149 err = skcipher_walk_virt(&walk, req, true); 150 if (err) 151 return err; 152 153 ctx->ops->load_decrypt_keys(&ctx->key[0]); 154 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)]; 155 while ((nbytes = walk.nbytes) != 0) { 156 ctx->ops->ecb_decrypt(key_end, walk.src.virt.addr, 157 walk.dst.virt.addr, 158 round_down(nbytes, AES_BLOCK_SIZE)); 159 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); 160 } 161 fprs_write(0); 162 163 return err; 164 } 165 166 static int cbc_encrypt(struct skcipher_request *req) 167 { 168 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 169 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 170 struct skcipher_walk walk; 171 unsigned int nbytes; 172 int err; 173 174 err = skcipher_walk_virt(&walk, req, true); 175 if (err) 176 return err; 177 178 ctx->ops->load_encrypt_keys(&ctx->key[0]); 179 while ((nbytes = walk.nbytes) != 0) { 180 ctx->ops->cbc_encrypt(&ctx->key[0], walk.src.virt.addr, 181 walk.dst.virt.addr, 182 round_down(nbytes, AES_BLOCK_SIZE), 183 walk.iv); 184 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); 185 } 186 fprs_write(0); 187 return err; 188 } 189 190 static int cbc_decrypt(struct skcipher_request *req) 191 { 192 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 193 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 194 const u64 *key_end; 195 struct skcipher_walk walk; 196 unsigned int nbytes; 197 int err; 198 199 err = skcipher_walk_virt(&walk, req, true); 200 if (err) 201 return err; 202 203 ctx->ops->load_decrypt_keys(&ctx->key[0]); 204 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)]; 205 while ((nbytes = walk.nbytes) != 0) { 206 ctx->ops->cbc_decrypt(key_end, walk.src.virt.addr, 207 walk.dst.virt.addr, 208 round_down(nbytes, AES_BLOCK_SIZE), 209 walk.iv); 210 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); 211 } 212 fprs_write(0); 213 214 return err; 215 } 216 217 static void ctr_crypt_final(const struct crypto_sparc64_aes_ctx *ctx, 218 struct skcipher_walk *walk) 219 { 220 u8 *ctrblk = walk->iv; 221 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)]; 222 const u8 *src = walk->src.virt.addr; 223 u8 *dst = walk->dst.virt.addr; 224 unsigned int nbytes = walk->nbytes; 225 226 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk, 227 keystream, AES_BLOCK_SIZE); 228 crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes); 229 crypto_inc(ctrblk, AES_BLOCK_SIZE); 230 } 231 232 static int ctr_crypt(struct skcipher_request *req) 233 { 234 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 235 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 236 struct skcipher_walk walk; 237 unsigned int nbytes; 238 int err; 239 240 err = skcipher_walk_virt(&walk, req, true); 241 if (err) 242 return err; 243 244 ctx->ops->load_encrypt_keys(&ctx->key[0]); 245 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { 246 ctx->ops->ctr_crypt(&ctx->key[0], walk.src.virt.addr, 247 walk.dst.virt.addr, 248 round_down(nbytes, AES_BLOCK_SIZE), 249 walk.iv); 250 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); 251 } 252 if (walk.nbytes) { 253 ctr_crypt_final(ctx, &walk); 254 err = skcipher_walk_done(&walk, 0); 255 } 256 fprs_write(0); 257 return err; 258 } 259 260 static struct skcipher_alg skcipher_algs[] = { 261 { 262 .base.cra_name = "ecb(aes)", 263 .base.cra_driver_name = "ecb-aes-sparc64", 264 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY, 265 .base.cra_blocksize = AES_BLOCK_SIZE, 266 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), 267 .base.cra_alignmask = 7, 268 .base.cra_module = THIS_MODULE, 269 .min_keysize = AES_MIN_KEY_SIZE, 270 .max_keysize = AES_MAX_KEY_SIZE, 271 .setkey = aes_set_key_skcipher, 272 .encrypt = ecb_encrypt, 273 .decrypt = ecb_decrypt, 274 }, { 275 .base.cra_name = "cbc(aes)", 276 .base.cra_driver_name = "cbc-aes-sparc64", 277 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY, 278 .base.cra_blocksize = AES_BLOCK_SIZE, 279 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), 280 .base.cra_alignmask = 7, 281 .base.cra_module = THIS_MODULE, 282 .min_keysize = AES_MIN_KEY_SIZE, 283 .max_keysize = AES_MAX_KEY_SIZE, 284 .ivsize = AES_BLOCK_SIZE, 285 .setkey = aes_set_key_skcipher, 286 .encrypt = cbc_encrypt, 287 .decrypt = cbc_decrypt, 288 }, { 289 .base.cra_name = "ctr(aes)", 290 .base.cra_driver_name = "ctr-aes-sparc64", 291 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY, 292 .base.cra_blocksize = 1, 293 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), 294 .base.cra_alignmask = 7, 295 .base.cra_module = THIS_MODULE, 296 .min_keysize = AES_MIN_KEY_SIZE, 297 .max_keysize = AES_MAX_KEY_SIZE, 298 .ivsize = AES_BLOCK_SIZE, 299 .setkey = aes_set_key_skcipher, 300 .encrypt = ctr_crypt, 301 .decrypt = ctr_crypt, 302 .chunksize = AES_BLOCK_SIZE, 303 } 304 }; 305 306 static bool __init sparc64_has_aes_opcode(void) 307 { 308 unsigned long cfr; 309 310 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO)) 311 return false; 312 313 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr)); 314 if (!(cfr & CFR_AES)) 315 return false; 316 317 return true; 318 } 319 320 static int __init aes_sparc64_mod_init(void) 321 { 322 if (!sparc64_has_aes_opcode()) { 323 pr_info("sparc64 aes opcodes not available.\n"); 324 return -ENODEV; 325 } 326 pr_info("Using sparc64 aes opcodes optimized AES implementation\n"); 327 return crypto_register_skciphers(skcipher_algs, 328 ARRAY_SIZE(skcipher_algs)); 329 } 330 331 static void __exit aes_sparc64_mod_fini(void) 332 { 333 crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs)); 334 } 335 336 module_init(aes_sparc64_mod_init); 337 module_exit(aes_sparc64_mod_fini); 338 339 MODULE_LICENSE("GPL"); 340 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated"); 341 342 MODULE_ALIAS_CRYPTO("aes"); 343 344 #include "crop_devid.c" 345