1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * CMAC: Cipher Block Mode for Authentication 4 * 5 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> 6 * 7 * Based on work by: 8 * Copyright © 2013 Tom St Denis <tstdenis@elliptictech.com> 9 * Based on crypto/xcbc.c: 10 * Copyright © 2006 USAGI/WIDE Project, 11 * Author: Kazunori Miyazawa <miyazawa@linux-ipv6.org> 12 */ 13 14 #include <crypto/internal/cipher.h> 15 #include <crypto/internal/hash.h> 16 #include <linux/err.h> 17 #include <linux/kernel.h> 18 #include <linux/module.h> 19 20 /* 21 * +------------------------ 22 * | <parent tfm> 23 * +------------------------ 24 * | cmac_tfm_ctx 25 * +------------------------ 26 * | consts (block size * 2) 27 * +------------------------ 28 */ 29 struct cmac_tfm_ctx { 30 struct crypto_cipher *child; 31 __be64 consts[]; 32 }; 33 34 /* 35 * +------------------------ 36 * | <shash desc> 37 * +------------------------ 38 * | cmac_desc_ctx 39 * +------------------------ 40 * | odds (block size) 41 * +------------------------ 42 * | prev (block size) 43 * +------------------------ 44 */ 45 struct cmac_desc_ctx { 46 unsigned int len; 47 u8 odds[]; 48 }; 49 50 static int crypto_cmac_digest_setkey(struct crypto_shash *parent, 51 const u8 *inkey, unsigned int keylen) 52 { 53 struct cmac_tfm_ctx *ctx = crypto_shash_ctx(parent); 54 unsigned int bs = crypto_shash_blocksize(parent); 55 __be64 *consts = ctx->consts; 56 u64 _const[2]; 57 int i, err = 0; 58 u8 msb_mask, gfmask; 59 60 err = crypto_cipher_setkey(ctx->child, inkey, keylen); 61 if (err) 62 return err; 63 64 /* encrypt the zero block */ 65 memset(consts, 0, bs); 66 crypto_cipher_encrypt_one(ctx->child, (u8 *)consts, (u8 *)consts); 67 68 switch (bs) { 69 case 16: 70 gfmask = 0x87; 71 _const[0] = be64_to_cpu(consts[1]); 72 _const[1] = be64_to_cpu(consts[0]); 73 74 /* gf(2^128) multiply zero-ciphertext with u and u^2 */ 75 for (i = 0; i < 4; i += 2) { 76 msb_mask = ((s64)_const[1] >> 63) & gfmask; 77 _const[1] = (_const[1] << 1) | (_const[0] >> 63); 78 _const[0] = (_const[0] << 1) ^ msb_mask; 79 80 consts[i + 0] = cpu_to_be64(_const[1]); 81 consts[i + 1] = cpu_to_be64(_const[0]); 82 } 83 84 break; 85 case 8: 86 gfmask = 0x1B; 87 _const[0] = be64_to_cpu(consts[0]); 88 89 /* gf(2^64) multiply zero-ciphertext with u and u^2 */ 90 for (i = 0; i < 2; i++) { 91 msb_mask = ((s64)_const[0] >> 63) & gfmask; 92 _const[0] = (_const[0] << 1) ^ msb_mask; 93 94 consts[i] = cpu_to_be64(_const[0]); 95 } 96 97 break; 98 } 99 100 return 0; 101 } 102 103 static int crypto_cmac_digest_init(struct shash_desc *pdesc) 104 { 105 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 106 int bs = crypto_shash_blocksize(pdesc->tfm); 107 u8 *prev = &ctx->odds[bs]; 108 109 ctx->len = 0; 110 memset(prev, 0, bs); 111 112 return 0; 113 } 114 115 static int crypto_cmac_digest_update(struct shash_desc *pdesc, const u8 *p, 116 unsigned int len) 117 { 118 struct crypto_shash *parent = pdesc->tfm; 119 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 120 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 121 struct crypto_cipher *tfm = tctx->child; 122 int bs = crypto_shash_blocksize(parent); 123 u8 *odds = ctx->odds; 124 u8 *prev = odds + bs; 125 126 /* checking the data can fill the block */ 127 if ((ctx->len + len) <= bs) { 128 memcpy(odds + ctx->len, p, len); 129 ctx->len += len; 130 return 0; 131 } 132 133 /* filling odds with new data and encrypting it */ 134 memcpy(odds + ctx->len, p, bs - ctx->len); 135 len -= bs - ctx->len; 136 p += bs - ctx->len; 137 138 crypto_xor(prev, odds, bs); 139 crypto_cipher_encrypt_one(tfm, prev, prev); 140 141 /* clearing the length */ 142 ctx->len = 0; 143 144 /* encrypting the rest of data */ 145 while (len > bs) { 146 crypto_xor(prev, p, bs); 147 crypto_cipher_encrypt_one(tfm, prev, prev); 148 p += bs; 149 len -= bs; 150 } 151 152 /* keeping the surplus of blocksize */ 153 if (len) { 154 memcpy(odds, p, len); 155 ctx->len = len; 156 } 157 158 return 0; 159 } 160 161 static int crypto_cmac_digest_final(struct shash_desc *pdesc, u8 *out) 162 { 163 struct crypto_shash *parent = pdesc->tfm; 164 struct cmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 165 struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 166 struct crypto_cipher *tfm = tctx->child; 167 int bs = crypto_shash_blocksize(parent); 168 u8 *odds = ctx->odds; 169 u8 *prev = odds + bs; 170 unsigned int offset = 0; 171 172 if (ctx->len != bs) { 173 unsigned int rlen; 174 u8 *p = odds + ctx->len; 175 176 *p = 0x80; 177 p++; 178 179 rlen = bs - ctx->len - 1; 180 if (rlen) 181 memset(p, 0, rlen); 182 183 offset += bs; 184 } 185 186 crypto_xor(prev, odds, bs); 187 crypto_xor(prev, (const u8 *)tctx->consts + offset, bs); 188 189 crypto_cipher_encrypt_one(tfm, out, prev); 190 191 return 0; 192 } 193 194 static int cmac_init_tfm(struct crypto_shash *tfm) 195 { 196 struct shash_instance *inst = shash_alg_instance(tfm); 197 struct cmac_tfm_ctx *ctx = crypto_shash_ctx(tfm); 198 struct crypto_cipher_spawn *spawn; 199 struct crypto_cipher *cipher; 200 201 spawn = shash_instance_ctx(inst); 202 cipher = crypto_spawn_cipher(spawn); 203 if (IS_ERR(cipher)) 204 return PTR_ERR(cipher); 205 206 ctx->child = cipher; 207 208 return 0; 209 } 210 211 static int cmac_clone_tfm(struct crypto_shash *tfm, struct crypto_shash *otfm) 212 { 213 struct cmac_tfm_ctx *octx = crypto_shash_ctx(otfm); 214 struct cmac_tfm_ctx *ctx = crypto_shash_ctx(tfm); 215 struct crypto_cipher *cipher; 216 217 cipher = crypto_clone_cipher(octx->child); 218 if (IS_ERR(cipher)) 219 return PTR_ERR(cipher); 220 221 ctx->child = cipher; 222 223 return 0; 224 } 225 226 static void cmac_exit_tfm(struct crypto_shash *tfm) 227 { 228 struct cmac_tfm_ctx *ctx = crypto_shash_ctx(tfm); 229 crypto_free_cipher(ctx->child); 230 } 231 232 static int cmac_create(struct crypto_template *tmpl, struct rtattr **tb) 233 { 234 struct shash_instance *inst; 235 struct crypto_cipher_spawn *spawn; 236 struct crypto_alg *alg; 237 u32 mask; 238 int err; 239 240 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH, &mask); 241 if (err) 242 return err; 243 244 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 245 if (!inst) 246 return -ENOMEM; 247 spawn = shash_instance_ctx(inst); 248 249 err = crypto_grab_cipher(spawn, shash_crypto_instance(inst), 250 crypto_attr_alg_name(tb[1]), 0, mask); 251 if (err) 252 goto err_free_inst; 253 alg = crypto_spawn_cipher_alg(spawn); 254 255 switch (alg->cra_blocksize) { 256 case 16: 257 case 8: 258 break; 259 default: 260 err = -EINVAL; 261 goto err_free_inst; 262 } 263 264 err = crypto_inst_setname(shash_crypto_instance(inst), tmpl->name, alg); 265 if (err) 266 goto err_free_inst; 267 268 inst->alg.base.cra_priority = alg->cra_priority; 269 inst->alg.base.cra_blocksize = alg->cra_blocksize; 270 inst->alg.base.cra_ctxsize = sizeof(struct cmac_tfm_ctx) + 271 alg->cra_blocksize * 2; 272 273 inst->alg.digestsize = alg->cra_blocksize; 274 inst->alg.descsize = sizeof(struct cmac_desc_ctx) + 275 alg->cra_blocksize * 2; 276 inst->alg.init = crypto_cmac_digest_init; 277 inst->alg.update = crypto_cmac_digest_update; 278 inst->alg.final = crypto_cmac_digest_final; 279 inst->alg.setkey = crypto_cmac_digest_setkey; 280 inst->alg.init_tfm = cmac_init_tfm; 281 inst->alg.clone_tfm = cmac_clone_tfm; 282 inst->alg.exit_tfm = cmac_exit_tfm; 283 284 inst->free = shash_free_singlespawn_instance; 285 286 err = shash_register_instance(tmpl, inst); 287 if (err) { 288 err_free_inst: 289 shash_free_singlespawn_instance(inst); 290 } 291 return err; 292 } 293 294 static struct crypto_template crypto_cmac_tmpl = { 295 .name = "cmac", 296 .create = cmac_create, 297 .module = THIS_MODULE, 298 }; 299 300 static int __init crypto_cmac_module_init(void) 301 { 302 return crypto_register_template(&crypto_cmac_tmpl); 303 } 304 305 static void __exit crypto_cmac_module_exit(void) 306 { 307 crypto_unregister_template(&crypto_cmac_tmpl); 308 } 309 310 subsys_initcall(crypto_cmac_module_init); 311 module_exit(crypto_cmac_module_exit); 312 313 MODULE_LICENSE("GPL"); 314 MODULE_DESCRIPTION("CMAC keyed hash algorithm"); 315 MODULE_ALIAS_CRYPTO("cmac"); 316 MODULE_IMPORT_NS(CRYPTO_INTERNAL); 317