1 /* 2 * Cryptographic API. 3 * 4 * s390 implementation of the AES Cipher Algorithm. 5 * 6 * s390 Version: 7 * Copyright IBM Corp. 2005,2007 8 * Author(s): Jan Glauber (jang@de.ibm.com) 9 * 10 * Derived from "crypto/aes.c" 11 * 12 * This program is free software; you can redistribute it and/or modify it 13 * under the terms of the GNU General Public License as published by the Free 14 * Software Foundation; either version 2 of the License, or (at your option) 15 * any later version. 16 * 17 */ 18 19 #include <crypto/algapi.h> 20 #include <linux/module.h> 21 #include <linux/init.h> 22 #include "crypt_s390.h" 23 24 #define AES_MIN_KEY_SIZE 16 25 #define AES_MAX_KEY_SIZE 32 26 27 /* data block size for all key lengths */ 28 #define AES_BLOCK_SIZE 16 29 30 #define AES_KEYLEN_128 1 31 #define AES_KEYLEN_192 2 32 #define AES_KEYLEN_256 4 33 34 static char keylen_flag = 0; 35 36 struct s390_aes_ctx { 37 u8 iv[AES_BLOCK_SIZE]; 38 u8 key[AES_MAX_KEY_SIZE]; 39 long enc; 40 long dec; 41 int key_len; 42 }; 43 44 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 45 unsigned int key_len) 46 { 47 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 48 u32 *flags = &tfm->crt_flags; 49 50 switch (key_len) { 51 case 16: 52 if (!(keylen_flag & AES_KEYLEN_128)) 53 goto fail; 54 break; 55 case 24: 56 if (!(keylen_flag & AES_KEYLEN_192)) 57 goto fail; 58 59 break; 60 case 32: 61 if (!(keylen_flag & AES_KEYLEN_256)) 62 goto fail; 63 break; 64 default: 65 goto fail; 66 break; 67 } 68 69 sctx->key_len = key_len; 70 memcpy(sctx->key, in_key, key_len); 71 return 0; 72 fail: 73 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 74 return -EINVAL; 75 } 76 77 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 78 { 79 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 80 81 switch (sctx->key_len) { 82 case 16: 83 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, 84 AES_BLOCK_SIZE); 85 break; 86 case 24: 87 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, 88 AES_BLOCK_SIZE); 89 break; 90 case 32: 91 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, 92 AES_BLOCK_SIZE); 93 break; 94 } 95 } 96 97 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 98 { 99 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 100 101 switch (sctx->key_len) { 102 case 16: 103 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, 104 AES_BLOCK_SIZE); 105 break; 106 case 24: 107 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, 108 AES_BLOCK_SIZE); 109 break; 110 case 32: 111 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, 112 AES_BLOCK_SIZE); 113 break; 114 } 115 } 116 117 118 static struct crypto_alg aes_alg = { 119 .cra_name = "aes", 120 .cra_driver_name = "aes-s390", 121 .cra_priority = CRYPT_S390_PRIORITY, 122 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 123 .cra_blocksize = AES_BLOCK_SIZE, 124 .cra_ctxsize = sizeof(struct s390_aes_ctx), 125 .cra_module = THIS_MODULE, 126 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), 127 .cra_u = { 128 .cipher = { 129 .cia_min_keysize = AES_MIN_KEY_SIZE, 130 .cia_max_keysize = AES_MAX_KEY_SIZE, 131 .cia_setkey = aes_set_key, 132 .cia_encrypt = aes_encrypt, 133 .cia_decrypt = aes_decrypt, 134 } 135 } 136 }; 137 138 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 139 unsigned int key_len) 140 { 141 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 142 143 switch (key_len) { 144 case 16: 145 sctx->enc = KM_AES_128_ENCRYPT; 146 sctx->dec = KM_AES_128_DECRYPT; 147 break; 148 case 24: 149 sctx->enc = KM_AES_192_ENCRYPT; 150 sctx->dec = KM_AES_192_DECRYPT; 151 break; 152 case 32: 153 sctx->enc = KM_AES_256_ENCRYPT; 154 sctx->dec = KM_AES_256_DECRYPT; 155 break; 156 } 157 158 return aes_set_key(tfm, in_key, key_len); 159 } 160 161 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param, 162 struct blkcipher_walk *walk) 163 { 164 int ret = blkcipher_walk_virt(desc, walk); 165 unsigned int nbytes; 166 167 while ((nbytes = walk->nbytes)) { 168 /* only use complete blocks */ 169 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); 170 u8 *out = walk->dst.virt.addr; 171 u8 *in = walk->src.virt.addr; 172 173 ret = crypt_s390_km(func, param, out, in, n); 174 BUG_ON((ret < 0) || (ret != n)); 175 176 nbytes &= AES_BLOCK_SIZE - 1; 177 ret = blkcipher_walk_done(desc, walk, nbytes); 178 } 179 180 return ret; 181 } 182 183 static int ecb_aes_encrypt(struct blkcipher_desc *desc, 184 struct scatterlist *dst, struct scatterlist *src, 185 unsigned int nbytes) 186 { 187 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 188 struct blkcipher_walk walk; 189 190 blkcipher_walk_init(&walk, dst, src, nbytes); 191 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); 192 } 193 194 static int ecb_aes_decrypt(struct blkcipher_desc *desc, 195 struct scatterlist *dst, struct scatterlist *src, 196 unsigned int nbytes) 197 { 198 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 199 struct blkcipher_walk walk; 200 201 blkcipher_walk_init(&walk, dst, src, nbytes); 202 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk); 203 } 204 205 static struct crypto_alg ecb_aes_alg = { 206 .cra_name = "ecb(aes)", 207 .cra_driver_name = "ecb-aes-s390", 208 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 209 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 210 .cra_blocksize = AES_BLOCK_SIZE, 211 .cra_ctxsize = sizeof(struct s390_aes_ctx), 212 .cra_type = &crypto_blkcipher_type, 213 .cra_module = THIS_MODULE, 214 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list), 215 .cra_u = { 216 .blkcipher = { 217 .min_keysize = AES_MIN_KEY_SIZE, 218 .max_keysize = AES_MAX_KEY_SIZE, 219 .setkey = ecb_aes_set_key, 220 .encrypt = ecb_aes_encrypt, 221 .decrypt = ecb_aes_decrypt, 222 } 223 } 224 }; 225 226 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 227 unsigned int key_len) 228 { 229 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 230 231 switch (key_len) { 232 case 16: 233 sctx->enc = KMC_AES_128_ENCRYPT; 234 sctx->dec = KMC_AES_128_DECRYPT; 235 break; 236 case 24: 237 sctx->enc = KMC_AES_192_ENCRYPT; 238 sctx->dec = KMC_AES_192_DECRYPT; 239 break; 240 case 32: 241 sctx->enc = KMC_AES_256_ENCRYPT; 242 sctx->dec = KMC_AES_256_DECRYPT; 243 break; 244 } 245 246 return aes_set_key(tfm, in_key, key_len); 247 } 248 249 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param, 250 struct blkcipher_walk *walk) 251 { 252 int ret = blkcipher_walk_virt(desc, walk); 253 unsigned int nbytes = walk->nbytes; 254 255 if (!nbytes) 256 goto out; 257 258 memcpy(param, walk->iv, AES_BLOCK_SIZE); 259 do { 260 /* only use complete blocks */ 261 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); 262 u8 *out = walk->dst.virt.addr; 263 u8 *in = walk->src.virt.addr; 264 265 ret = crypt_s390_kmc(func, param, out, in, n); 266 BUG_ON((ret < 0) || (ret != n)); 267 268 nbytes &= AES_BLOCK_SIZE - 1; 269 ret = blkcipher_walk_done(desc, walk, nbytes); 270 } while ((nbytes = walk->nbytes)); 271 memcpy(walk->iv, param, AES_BLOCK_SIZE); 272 273 out: 274 return ret; 275 } 276 277 static int cbc_aes_encrypt(struct blkcipher_desc *desc, 278 struct scatterlist *dst, struct scatterlist *src, 279 unsigned int nbytes) 280 { 281 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 282 struct blkcipher_walk walk; 283 284 blkcipher_walk_init(&walk, dst, src, nbytes); 285 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk); 286 } 287 288 static int cbc_aes_decrypt(struct blkcipher_desc *desc, 289 struct scatterlist *dst, struct scatterlist *src, 290 unsigned int nbytes) 291 { 292 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 293 struct blkcipher_walk walk; 294 295 blkcipher_walk_init(&walk, dst, src, nbytes); 296 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk); 297 } 298 299 static struct crypto_alg cbc_aes_alg = { 300 .cra_name = "cbc(aes)", 301 .cra_driver_name = "cbc-aes-s390", 302 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 303 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 304 .cra_blocksize = AES_BLOCK_SIZE, 305 .cra_ctxsize = sizeof(struct s390_aes_ctx), 306 .cra_type = &crypto_blkcipher_type, 307 .cra_module = THIS_MODULE, 308 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list), 309 .cra_u = { 310 .blkcipher = { 311 .min_keysize = AES_MIN_KEY_SIZE, 312 .max_keysize = AES_MAX_KEY_SIZE, 313 .ivsize = AES_BLOCK_SIZE, 314 .setkey = cbc_aes_set_key, 315 .encrypt = cbc_aes_encrypt, 316 .decrypt = cbc_aes_decrypt, 317 } 318 } 319 }; 320 321 static int __init aes_init(void) 322 { 323 int ret; 324 325 if (crypt_s390_func_available(KM_AES_128_ENCRYPT)) 326 keylen_flag |= AES_KEYLEN_128; 327 if (crypt_s390_func_available(KM_AES_192_ENCRYPT)) 328 keylen_flag |= AES_KEYLEN_192; 329 if (crypt_s390_func_available(KM_AES_256_ENCRYPT)) 330 keylen_flag |= AES_KEYLEN_256; 331 332 if (!keylen_flag) 333 return -EOPNOTSUPP; 334 335 /* z9 109 and z9 BC/EC only support 128 bit key length */ 336 if (keylen_flag == AES_KEYLEN_128) 337 printk(KERN_INFO 338 "aes_s390: hardware acceleration only available for" 339 "128 bit keys\n"); 340 341 ret = crypto_register_alg(&aes_alg); 342 if (ret) 343 goto aes_err; 344 345 ret = crypto_register_alg(&ecb_aes_alg); 346 if (ret) 347 goto ecb_aes_err; 348 349 ret = crypto_register_alg(&cbc_aes_alg); 350 if (ret) 351 goto cbc_aes_err; 352 353 out: 354 return ret; 355 356 cbc_aes_err: 357 crypto_unregister_alg(&ecb_aes_alg); 358 ecb_aes_err: 359 crypto_unregister_alg(&aes_alg); 360 aes_err: 361 goto out; 362 } 363 364 static void __exit aes_fini(void) 365 { 366 crypto_unregister_alg(&cbc_aes_alg); 367 crypto_unregister_alg(&ecb_aes_alg); 368 crypto_unregister_alg(&aes_alg); 369 } 370 371 module_init(aes_init); 372 module_exit(aes_fini); 373 374 MODULE_ALIAS("aes"); 375 376 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); 377 MODULE_LICENSE("GPL"); 378 379