1 /* 2 * Copyright (C)2006 USAGI/WIDE Project 3 * 4 * This program is free software; you can redistribute it and/or modify 5 * it under the terms of the GNU General Public License as published by 6 * the Free Software Foundation; either version 2 of the License, or 7 * (at your option) any later version. 8 * 9 * This program is distributed in the hope that it will be useful, 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 * GNU General Public License for more details. 13 * 14 * You should have received a copy of the GNU General Public License 15 * along with this program; if not, write to the Free Software 16 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 17 * 18 * Author: 19 * Kazunori Miyazawa <miyazawa@linux-ipv6.org> 20 */ 21 22 #include <linux/crypto.h> 23 #include <linux/err.h> 24 #include <linux/kernel.h> 25 #include <linux/mm.h> 26 #include <linux/rtnetlink.h> 27 #include <linux/slab.h> 28 #include <linux/scatterlist.h> 29 #include "internal.h" 30 31 static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101, 32 0x02020202, 0x02020202, 0x02020202, 0x02020202, 33 0x03030303, 0x03030303, 0x03030303, 0x03030303}; 34 /* 35 * +------------------------ 36 * | <parent tfm> 37 * +------------------------ 38 * | crypto_xcbc_ctx 39 * +------------------------ 40 * | odds (block size) 41 * +------------------------ 42 * | prev (block size) 43 * +------------------------ 44 * | key (block size) 45 * +------------------------ 46 * | consts (block size * 3) 47 * +------------------------ 48 */ 49 struct crypto_xcbc_ctx { 50 struct crypto_tfm *child; 51 u8 *odds; 52 u8 *prev; 53 u8 *key; 54 u8 *consts; 55 void (*xor)(u8 *a, const u8 *b, unsigned int bs); 56 unsigned int keylen; 57 unsigned int len; 58 }; 59 60 static void xor_128(u8 *a, const u8 *b, unsigned int bs) 61 { 62 ((u32 *)a)[0] ^= ((u32 *)b)[0]; 63 ((u32 *)a)[1] ^= ((u32 *)b)[1]; 64 ((u32 *)a)[2] ^= ((u32 *)b)[2]; 65 ((u32 *)a)[3] ^= ((u32 *)b)[3]; 66 } 67 68 static int _crypto_xcbc_digest_setkey(struct crypto_hash *parent, 69 struct crypto_xcbc_ctx *ctx) 70 { 71 int bs = crypto_hash_blocksize(parent); 72 int err = 0; 73 u8 key1[bs]; 74 75 if ((err = crypto_cipher_setkey(ctx->child, ctx->key, ctx->keylen))) 76 return err; 77 78 ctx->child->__crt_alg->cra_cipher.cia_encrypt(ctx->child, key1, 79 ctx->consts); 80 81 return crypto_cipher_setkey(ctx->child, key1, bs); 82 } 83 84 static int crypto_xcbc_digest_setkey(struct crypto_hash *parent, 85 const u8 *inkey, unsigned int keylen) 86 { 87 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent); 88 89 if (keylen != crypto_tfm_alg_blocksize(ctx->child)) 90 return -EINVAL; 91 92 ctx->keylen = keylen; 93 memcpy(ctx->key, inkey, keylen); 94 ctx->consts = (u8*)ks; 95 96 return _crypto_xcbc_digest_setkey(parent, ctx); 97 } 98 99 static int crypto_xcbc_digest_init(struct hash_desc *pdesc) 100 { 101 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(pdesc->tfm); 102 int bs = crypto_hash_blocksize(pdesc->tfm); 103 104 ctx->len = 0; 105 memset(ctx->odds, 0, bs); 106 memset(ctx->prev, 0, bs); 107 108 return 0; 109 } 110 111 static int crypto_xcbc_digest_update(struct hash_desc *pdesc, 112 struct scatterlist *sg, 113 unsigned int nbytes) 114 { 115 struct crypto_hash *parent = pdesc->tfm; 116 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent); 117 struct crypto_tfm *tfm = ctx->child; 118 int bs = crypto_hash_blocksize(parent); 119 unsigned int i = 0; 120 121 do { 122 123 struct page *pg = sg[i].page; 124 unsigned int offset = sg[i].offset; 125 unsigned int slen = sg[i].length; 126 127 while (slen > 0) { 128 unsigned int len = min(slen, ((unsigned int)(PAGE_SIZE)) - offset); 129 char *p = crypto_kmap(pg, 0) + offset; 130 131 /* checking the data can fill the block */ 132 if ((ctx->len + len) <= bs) { 133 memcpy(ctx->odds + ctx->len, p, len); 134 ctx->len += len; 135 slen -= len; 136 137 /* checking the rest of the page */ 138 if (len + offset >= PAGE_SIZE) { 139 offset = 0; 140 pg++; 141 } else 142 offset += len; 143 144 crypto_kunmap(p, 0); 145 crypto_yield(tfm->crt_flags); 146 continue; 147 } 148 149 /* filling odds with new data and encrypting it */ 150 memcpy(ctx->odds + ctx->len, p, bs - ctx->len); 151 len -= bs - ctx->len; 152 p += bs - ctx->len; 153 154 ctx->xor(ctx->prev, ctx->odds, bs); 155 tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, ctx->prev, ctx->prev); 156 157 /* clearing the length */ 158 ctx->len = 0; 159 160 /* encrypting the rest of data */ 161 while (len > bs) { 162 ctx->xor(ctx->prev, p, bs); 163 tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, ctx->prev, ctx->prev); 164 p += bs; 165 len -= bs; 166 } 167 168 /* keeping the surplus of blocksize */ 169 if (len) { 170 memcpy(ctx->odds, p, len); 171 ctx->len = len; 172 } 173 crypto_kunmap(p, 0); 174 crypto_yield(tfm->crt_flags); 175 slen -= min(slen, ((unsigned int)(PAGE_SIZE)) - offset); 176 offset = 0; 177 pg++; 178 } 179 nbytes-=sg[i].length; 180 i++; 181 } while (nbytes>0); 182 183 return 0; 184 } 185 186 static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out) 187 { 188 struct crypto_hash *parent = pdesc->tfm; 189 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent); 190 struct crypto_tfm *tfm = ctx->child; 191 int bs = crypto_hash_blocksize(parent); 192 int err = 0; 193 194 if (ctx->len == bs) { 195 u8 key2[bs]; 196 197 if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0) 198 return err; 199 200 tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, key2, (const u8*)(ctx->consts+bs)); 201 202 ctx->xor(ctx->prev, ctx->odds, bs); 203 ctx->xor(ctx->prev, key2, bs); 204 _crypto_xcbc_digest_setkey(parent, ctx); 205 206 tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, out, ctx->prev); 207 } else { 208 u8 key3[bs]; 209 unsigned int rlen; 210 u8 *p = ctx->odds + ctx->len; 211 *p = 0x80; 212 p++; 213 214 rlen = bs - ctx->len -1; 215 if (rlen) 216 memset(p, 0, rlen); 217 218 if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0) 219 return err; 220 221 tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, key3, (const u8*)(ctx->consts+bs*2)); 222 223 ctx->xor(ctx->prev, ctx->odds, bs); 224 ctx->xor(ctx->prev, key3, bs); 225 226 _crypto_xcbc_digest_setkey(parent, ctx); 227 228 tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, out, ctx->prev); 229 } 230 231 return 0; 232 } 233 234 static int crypto_xcbc_digest(struct hash_desc *pdesc, 235 struct scatterlist *sg, unsigned int nbytes, u8 *out) 236 { 237 crypto_xcbc_digest_init(pdesc); 238 crypto_xcbc_digest_update(pdesc, sg, nbytes); 239 return crypto_xcbc_digest_final(pdesc, out); 240 } 241 242 static int xcbc_init_tfm(struct crypto_tfm *tfm) 243 { 244 struct crypto_instance *inst = (void *)tfm->__crt_alg; 245 struct crypto_spawn *spawn = crypto_instance_ctx(inst); 246 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm)); 247 int bs = crypto_hash_blocksize(__crypto_hash_cast(tfm)); 248 249 tfm = crypto_spawn_tfm(spawn); 250 if (IS_ERR(tfm)) 251 return PTR_ERR(tfm); 252 253 switch(bs) { 254 case 16: 255 ctx->xor = xor_128; 256 break; 257 default: 258 return -EINVAL; 259 } 260 261 ctx->child = crypto_cipher_cast(tfm); 262 ctx->odds = (u8*)(ctx+1); 263 ctx->prev = ctx->odds + bs; 264 ctx->key = ctx->prev + bs; 265 266 return 0; 267 }; 268 269 static void xcbc_exit_tfm(struct crypto_tfm *tfm) 270 { 271 struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm)); 272 crypto_free_cipher(ctx->child); 273 } 274 275 static struct crypto_instance *xcbc_alloc(void *param, unsigned int len) 276 { 277 struct crypto_instance *inst; 278 struct crypto_alg *alg; 279 alg = crypto_get_attr_alg(param, len, CRYPTO_ALG_TYPE_CIPHER, 280 CRYPTO_ALG_TYPE_HASH_MASK | CRYPTO_ALG_ASYNC); 281 if (IS_ERR(alg)) 282 return ERR_PTR(PTR_ERR(alg)); 283 284 switch(alg->cra_blocksize) { 285 case 16: 286 break; 287 default: 288 return ERR_PTR(PTR_ERR(alg)); 289 } 290 291 inst = crypto_alloc_instance("xcbc", alg); 292 if (IS_ERR(inst)) 293 goto out_put_alg; 294 295 inst->alg.cra_flags = CRYPTO_ALG_TYPE_HASH; 296 inst->alg.cra_priority = alg->cra_priority; 297 inst->alg.cra_blocksize = alg->cra_blocksize; 298 inst->alg.cra_alignmask = alg->cra_alignmask; 299 inst->alg.cra_type = &crypto_hash_type; 300 301 inst->alg.cra_hash.digestsize = 302 (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == 303 CRYPTO_ALG_TYPE_HASH ? alg->cra_hash.digestsize : 304 alg->cra_blocksize; 305 inst->alg.cra_ctxsize = sizeof(struct crypto_xcbc_ctx) + 306 ALIGN(inst->alg.cra_blocksize * 3, sizeof(void *)); 307 inst->alg.cra_init = xcbc_init_tfm; 308 inst->alg.cra_exit = xcbc_exit_tfm; 309 310 inst->alg.cra_hash.init = crypto_xcbc_digest_init; 311 inst->alg.cra_hash.update = crypto_xcbc_digest_update; 312 inst->alg.cra_hash.final = crypto_xcbc_digest_final; 313 inst->alg.cra_hash.digest = crypto_xcbc_digest; 314 inst->alg.cra_hash.setkey = crypto_xcbc_digest_setkey; 315 316 out_put_alg: 317 crypto_mod_put(alg); 318 return inst; 319 } 320 321 static void xcbc_free(struct crypto_instance *inst) 322 { 323 crypto_drop_spawn(crypto_instance_ctx(inst)); 324 kfree(inst); 325 } 326 327 static struct crypto_template crypto_xcbc_tmpl = { 328 .name = "xcbc", 329 .alloc = xcbc_alloc, 330 .free = xcbc_free, 331 .module = THIS_MODULE, 332 }; 333 334 static int __init crypto_xcbc_module_init(void) 335 { 336 return crypto_register_template(&crypto_xcbc_tmpl); 337 } 338 339 static void __exit crypto_xcbc_module_exit(void) 340 { 341 crypto_unregister_template(&crypto_xcbc_tmpl); 342 } 343 344 module_init(crypto_xcbc_module_init); 345 module_exit(crypto_xcbc_module_exit); 346 347 MODULE_LICENSE("GPL"); 348 MODULE_DESCRIPTION("XCBC keyed hash algorithm"); 349