1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * Copyright IBM Corp. 2024 4 * 5 * s390 specific HMAC support. 6 */ 7 8 #define pr_fmt(fmt) "hmac_s390: " fmt 9 10 #include <asm/cpacf.h> 11 #include <crypto/internal/hash.h> 12 #include <crypto/hmac.h> 13 #include <crypto/sha2.h> 14 #include <linux/cpufeature.h> 15 #include <linux/errno.h> 16 #include <linux/kernel.h> 17 #include <linux/module.h> 18 #include <linux/string.h> 19 20 /* 21 * KMAC param block layout for sha2 function codes: 22 * The layout of the param block for the KMAC instruction depends on the 23 * blocksize of the used hashing sha2-algorithm function codes. The param block 24 * contains the hash chaining value (cv), the input message bit-length (imbl) 25 * and the hmac-secret (key). To prevent code duplication, the sizes of all 26 * these are calculated based on the blocksize. 27 * 28 * param-block: 29 * +-------+ 30 * | cv | 31 * +-------+ 32 * | imbl | 33 * +-------+ 34 * | key | 35 * +-------+ 36 * 37 * sizes: 38 * part | sh2-alg | calculation | size | type 39 * -----+---------+-------------+------+-------- 40 * cv | 224/256 | blocksize/2 | 32 | u64[8] 41 * | 384/512 | | 64 | u128[8] 42 * imbl | 224/256 | blocksize/8 | 8 | u64 43 * | 384/512 | | 16 | u128 44 * key | 224/256 | blocksize | 64 | u8[64] 45 * | 384/512 | | 128 | u8[128] 46 */ 47 48 #define MAX_DIGEST_SIZE SHA512_DIGEST_SIZE 49 #define MAX_IMBL_SIZE sizeof(u128) 50 #define MAX_BLOCK_SIZE SHA512_BLOCK_SIZE 51 52 #define SHA2_CV_SIZE(bs) ((bs) >> 1) 53 #define SHA2_IMBL_SIZE(bs) ((bs) >> 3) 54 55 #define SHA2_IMBL_OFFSET(bs) (SHA2_CV_SIZE(bs)) 56 #define SHA2_KEY_OFFSET(bs) (SHA2_CV_SIZE(bs) + SHA2_IMBL_SIZE(bs)) 57 58 struct s390_hmac_ctx { 59 u8 key[MAX_BLOCK_SIZE]; 60 }; 61 62 union s390_kmac_gr0 { 63 unsigned long reg; 64 struct { 65 unsigned long : 48; 66 unsigned long ikp : 1; 67 unsigned long iimp : 1; 68 unsigned long ccup : 1; 69 unsigned long : 6; 70 unsigned long fc : 7; 71 }; 72 }; 73 74 struct s390_kmac_sha2_ctx { 75 u8 param[MAX_DIGEST_SIZE + MAX_IMBL_SIZE + MAX_BLOCK_SIZE]; 76 union s390_kmac_gr0 gr0; 77 u64 buflen[2]; 78 }; 79 80 /* 81 * kmac_sha2_set_imbl - sets the input message bit-length based on the blocksize 82 */ 83 static inline void kmac_sha2_set_imbl(u8 *param, u64 buflen_lo, 84 u64 buflen_hi, unsigned int blocksize) 85 { 86 u8 *imbl = param + SHA2_IMBL_OFFSET(blocksize); 87 88 switch (blocksize) { 89 case SHA256_BLOCK_SIZE: 90 *(u64 *)imbl = buflen_lo * BITS_PER_BYTE; 91 break; 92 case SHA512_BLOCK_SIZE: 93 *(u128 *)imbl = (((u128)buflen_hi << 64) + buflen_lo) << 3; 94 break; 95 default: 96 break; 97 } 98 } 99 100 static int hash_data(const u8 *in, unsigned int inlen, 101 u8 *digest, unsigned int digestsize, bool final) 102 { 103 unsigned long func; 104 union { 105 struct sha256_paramblock { 106 u32 h[8]; 107 u64 mbl; 108 } sha256; 109 struct sha512_paramblock { 110 u64 h[8]; 111 u128 mbl; 112 } sha512; 113 } __packed param; 114 115 #define PARAM_INIT(x, y, z) \ 116 param.sha##x.h[0] = SHA##y ## _H0; \ 117 param.sha##x.h[1] = SHA##y ## _H1; \ 118 param.sha##x.h[2] = SHA##y ## _H2; \ 119 param.sha##x.h[3] = SHA##y ## _H3; \ 120 param.sha##x.h[4] = SHA##y ## _H4; \ 121 param.sha##x.h[5] = SHA##y ## _H5; \ 122 param.sha##x.h[6] = SHA##y ## _H6; \ 123 param.sha##x.h[7] = SHA##y ## _H7; \ 124 param.sha##x.mbl = (z) 125 126 switch (digestsize) { 127 case SHA224_DIGEST_SIZE: 128 func = final ? CPACF_KLMD_SHA_256 : CPACF_KIMD_SHA_256; 129 PARAM_INIT(256, 224, inlen * 8); 130 if (!final) 131 digestsize = SHA256_DIGEST_SIZE; 132 break; 133 case SHA256_DIGEST_SIZE: 134 func = final ? CPACF_KLMD_SHA_256 : CPACF_KIMD_SHA_256; 135 PARAM_INIT(256, 256, inlen * 8); 136 break; 137 case SHA384_DIGEST_SIZE: 138 func = final ? CPACF_KLMD_SHA_512 : CPACF_KIMD_SHA_512; 139 PARAM_INIT(512, 384, inlen * 8); 140 if (!final) 141 digestsize = SHA512_DIGEST_SIZE; 142 break; 143 case SHA512_DIGEST_SIZE: 144 func = final ? CPACF_KLMD_SHA_512 : CPACF_KIMD_SHA_512; 145 PARAM_INIT(512, 512, inlen * 8); 146 break; 147 default: 148 return -EINVAL; 149 } 150 151 #undef PARAM_INIT 152 153 cpacf_klmd(func, ¶m, in, inlen); 154 155 memcpy(digest, ¶m, digestsize); 156 157 return 0; 158 } 159 160 static int hash_key(const u8 *in, unsigned int inlen, 161 u8 *digest, unsigned int digestsize) 162 { 163 return hash_data(in, inlen, digest, digestsize, true); 164 } 165 166 static int s390_hmac_sha2_setkey(struct crypto_shash *tfm, 167 const u8 *key, unsigned int keylen) 168 { 169 struct s390_hmac_ctx *tfm_ctx = crypto_shash_ctx(tfm); 170 unsigned int ds = crypto_shash_digestsize(tfm); 171 unsigned int bs = crypto_shash_blocksize(tfm); 172 173 memset(tfm_ctx, 0, sizeof(*tfm_ctx)); 174 175 if (keylen > bs) 176 return hash_key(key, keylen, tfm_ctx->key, ds); 177 178 memcpy(tfm_ctx->key, key, keylen); 179 return 0; 180 } 181 182 static int s390_hmac_sha2_init(struct shash_desc *desc) 183 { 184 struct s390_hmac_ctx *tfm_ctx = crypto_shash_ctx(desc->tfm); 185 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 186 unsigned int bs = crypto_shash_blocksize(desc->tfm); 187 188 memcpy(ctx->param + SHA2_KEY_OFFSET(bs), 189 tfm_ctx->key, bs); 190 191 ctx->buflen[0] = 0; 192 ctx->buflen[1] = 0; 193 ctx->gr0.reg = 0; 194 switch (crypto_shash_digestsize(desc->tfm)) { 195 case SHA224_DIGEST_SIZE: 196 ctx->gr0.fc = CPACF_KMAC_HMAC_SHA_224; 197 break; 198 case SHA256_DIGEST_SIZE: 199 ctx->gr0.fc = CPACF_KMAC_HMAC_SHA_256; 200 break; 201 case SHA384_DIGEST_SIZE: 202 ctx->gr0.fc = CPACF_KMAC_HMAC_SHA_384; 203 break; 204 case SHA512_DIGEST_SIZE: 205 ctx->gr0.fc = CPACF_KMAC_HMAC_SHA_512; 206 break; 207 default: 208 return -EINVAL; 209 } 210 211 return 0; 212 } 213 214 static int s390_hmac_sha2_update(struct shash_desc *desc, 215 const u8 *data, unsigned int len) 216 { 217 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 218 unsigned int bs = crypto_shash_blocksize(desc->tfm); 219 unsigned int n = round_down(len, bs); 220 221 ctx->buflen[0] += n; 222 if (ctx->buflen[0] < n) 223 ctx->buflen[1]++; 224 225 /* process as many blocks as possible */ 226 ctx->gr0.iimp = 1; 227 _cpacf_kmac(&ctx->gr0.reg, ctx->param, data, n); 228 return len - n; 229 } 230 231 static int s390_hmac_sha2_finup(struct shash_desc *desc, const u8 *src, 232 unsigned int len, u8 *out) 233 { 234 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 235 unsigned int bs = crypto_shash_blocksize(desc->tfm); 236 237 ctx->buflen[0] += len; 238 if (ctx->buflen[0] < len) 239 ctx->buflen[1]++; 240 241 ctx->gr0.iimp = 0; 242 kmac_sha2_set_imbl(ctx->param, ctx->buflen[0], ctx->buflen[1], bs); 243 _cpacf_kmac(&ctx->gr0.reg, ctx->param, src, len); 244 memcpy(out, ctx->param, crypto_shash_digestsize(desc->tfm)); 245 246 return 0; 247 } 248 249 static int s390_hmac_sha2_digest(struct shash_desc *desc, 250 const u8 *data, unsigned int len, u8 *out) 251 { 252 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 253 unsigned int ds = crypto_shash_digestsize(desc->tfm); 254 int rc; 255 256 rc = s390_hmac_sha2_init(desc); 257 if (rc) 258 return rc; 259 260 ctx->gr0.iimp = 0; 261 kmac_sha2_set_imbl(ctx->param, len, 0, 262 crypto_shash_blocksize(desc->tfm)); 263 _cpacf_kmac(&ctx->gr0.reg, ctx->param, data, len); 264 memcpy(out, ctx->param, ds); 265 266 return 0; 267 } 268 269 static int s390_hmac_export_zero(struct shash_desc *desc, void *out) 270 { 271 struct crypto_shash *tfm = desc->tfm; 272 u8 ipad[SHA512_BLOCK_SIZE]; 273 struct s390_hmac_ctx *ctx; 274 unsigned int bs; 275 int err, i; 276 277 ctx = crypto_shash_ctx(tfm); 278 bs = crypto_shash_blocksize(tfm); 279 for (i = 0; i < bs; i++) 280 ipad[i] = ctx->key[i] ^ HMAC_IPAD_VALUE; 281 282 err = hash_data(ipad, bs, out, crypto_shash_digestsize(tfm), false); 283 memzero_explicit(ipad, sizeof(ipad)); 284 return err; 285 } 286 287 static int s390_hmac_export(struct shash_desc *desc, void *out) 288 { 289 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 290 unsigned int bs = crypto_shash_blocksize(desc->tfm); 291 unsigned int ds = bs / 2; 292 u64 lo = ctx->buflen[0]; 293 union { 294 u8 *u8; 295 u64 *u64; 296 } p = { .u8 = out }; 297 int err = 0; 298 299 if (!ctx->gr0.ikp) 300 err = s390_hmac_export_zero(desc, out); 301 else 302 memcpy(p.u8, ctx->param, ds); 303 p.u8 += ds; 304 lo += bs; 305 put_unaligned(lo, p.u64++); 306 if (ds == SHA512_DIGEST_SIZE) 307 put_unaligned(ctx->buflen[1] + (lo < bs), p.u64); 308 return err; 309 } 310 311 static int s390_hmac_import(struct shash_desc *desc, const void *in) 312 { 313 struct s390_kmac_sha2_ctx *ctx = shash_desc_ctx(desc); 314 unsigned int bs = crypto_shash_blocksize(desc->tfm); 315 unsigned int ds = bs / 2; 316 union { 317 const u8 *u8; 318 const u64 *u64; 319 } p = { .u8 = in }; 320 u64 lo; 321 int err; 322 323 err = s390_hmac_sha2_init(desc); 324 memcpy(ctx->param, p.u8, ds); 325 p.u8 += ds; 326 lo = get_unaligned(p.u64++); 327 ctx->buflen[0] = lo - bs; 328 if (ds == SHA512_DIGEST_SIZE) 329 ctx->buflen[1] = get_unaligned(p.u64) - (lo < bs); 330 if (ctx->buflen[0] | ctx->buflen[1]) 331 ctx->gr0.ikp = 1; 332 return err; 333 } 334 335 #define S390_HMAC_SHA2_ALG(x, ss) { \ 336 .fc = CPACF_KMAC_HMAC_SHA_##x, \ 337 .alg = { \ 338 .init = s390_hmac_sha2_init, \ 339 .update = s390_hmac_sha2_update, \ 340 .finup = s390_hmac_sha2_finup, \ 341 .digest = s390_hmac_sha2_digest, \ 342 .setkey = s390_hmac_sha2_setkey, \ 343 .export = s390_hmac_export, \ 344 .import = s390_hmac_import, \ 345 .descsize = sizeof(struct s390_kmac_sha2_ctx), \ 346 .halg = { \ 347 .statesize = ss, \ 348 .digestsize = SHA##x##_DIGEST_SIZE, \ 349 .base = { \ 350 .cra_name = "hmac(sha" #x ")", \ 351 .cra_driver_name = "hmac_s390_sha" #x, \ 352 .cra_blocksize = SHA##x##_BLOCK_SIZE, \ 353 .cra_priority = 400, \ 354 .cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY | \ 355 CRYPTO_AHASH_ALG_FINUP_MAX, \ 356 .cra_ctxsize = sizeof(struct s390_hmac_ctx), \ 357 .cra_module = THIS_MODULE, \ 358 }, \ 359 }, \ 360 }, \ 361 } 362 363 static struct s390_hmac_alg { 364 bool registered; 365 unsigned int fc; 366 struct shash_alg alg; 367 } s390_hmac_algs[] = { 368 S390_HMAC_SHA2_ALG(224, sizeof(struct crypto_sha256_state)), 369 S390_HMAC_SHA2_ALG(256, sizeof(struct crypto_sha256_state)), 370 S390_HMAC_SHA2_ALG(384, SHA512_STATE_SIZE), 371 S390_HMAC_SHA2_ALG(512, SHA512_STATE_SIZE), 372 }; 373 374 static __always_inline void _s390_hmac_algs_unregister(void) 375 { 376 struct s390_hmac_alg *hmac; 377 int i; 378 379 for (i = ARRAY_SIZE(s390_hmac_algs) - 1; i >= 0; i--) { 380 hmac = &s390_hmac_algs[i]; 381 if (!hmac->registered) 382 continue; 383 crypto_unregister_shash(&hmac->alg); 384 } 385 } 386 387 static int __init hmac_s390_init(void) 388 { 389 struct s390_hmac_alg *hmac; 390 int i, rc = -ENODEV; 391 392 if (!cpacf_query_func(CPACF_KLMD, CPACF_KLMD_SHA_256)) 393 return -ENODEV; 394 if (!cpacf_query_func(CPACF_KLMD, CPACF_KLMD_SHA_512)) 395 return -ENODEV; 396 397 for (i = 0; i < ARRAY_SIZE(s390_hmac_algs); i++) { 398 hmac = &s390_hmac_algs[i]; 399 if (!cpacf_query_func(CPACF_KMAC, hmac->fc)) 400 continue; 401 402 rc = crypto_register_shash(&hmac->alg); 403 if (rc) { 404 pr_err("unable to register %s\n", 405 hmac->alg.halg.base.cra_name); 406 goto out; 407 } 408 hmac->registered = true; 409 pr_debug("registered %s\n", hmac->alg.halg.base.cra_name); 410 } 411 return rc; 412 out: 413 _s390_hmac_algs_unregister(); 414 return rc; 415 } 416 417 static void __exit hmac_s390_exit(void) 418 { 419 _s390_hmac_algs_unregister(); 420 } 421 422 module_cpu_feature_match(S390_CPU_FEATURE_MSA, hmac_s390_init); 423 module_exit(hmac_s390_exit); 424 425 MODULE_DESCRIPTION("S390 HMAC driver"); 426 MODULE_LICENSE("GPL"); 427