1 /* XTS: as defined in IEEE1619/D16 2 * http://grouper.ieee.org/groups/1619/email/pdf00086.pdf 3 * (sector sizes which are not a multiple of 16 bytes are, 4 * however currently unsupported) 5 * 6 * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org> 7 * 8 * Based om ecb.c 9 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 10 * 11 * This program is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License as published by the Free 13 * Software Foundation; either version 2 of the License, or (at your option) 14 * any later version. 15 */ 16 #include <crypto/algapi.h> 17 #include <linux/err.h> 18 #include <linux/init.h> 19 #include <linux/kernel.h> 20 #include <linux/module.h> 21 #include <linux/scatterlist.h> 22 #include <linux/slab.h> 23 24 #include <crypto/xts.h> 25 #include <crypto/b128ops.h> 26 #include <crypto/gf128mul.h> 27 28 struct priv { 29 struct crypto_cipher *child; 30 struct crypto_cipher *tweak; 31 }; 32 33 static int setkey(struct crypto_tfm *parent, const u8 *key, 34 unsigned int keylen) 35 { 36 struct priv *ctx = crypto_tfm_ctx(parent); 37 struct crypto_cipher *child = ctx->tweak; 38 u32 *flags = &parent->crt_flags; 39 int err; 40 41 /* key consists of keys of equal size concatenated, therefore 42 * the length must be even */ 43 if (keylen % 2) { 44 /* tell the user why there was an error */ 45 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 46 return -EINVAL; 47 } 48 49 /* we need two cipher instances: one to compute the initial 'tweak' 50 * by encrypting the IV (usually the 'plain' iv) and the other 51 * one to encrypt and decrypt the data */ 52 53 /* tweak cipher, uses Key2 i.e. the second half of *key */ 54 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); 55 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & 56 CRYPTO_TFM_REQ_MASK); 57 err = crypto_cipher_setkey(child, key + keylen/2, keylen/2); 58 if (err) 59 return err; 60 61 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & 62 CRYPTO_TFM_RES_MASK); 63 64 child = ctx->child; 65 66 /* data cipher, uses Key1 i.e. the first half of *key */ 67 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); 68 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & 69 CRYPTO_TFM_REQ_MASK); 70 err = crypto_cipher_setkey(child, key, keylen/2); 71 if (err) 72 return err; 73 74 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & 75 CRYPTO_TFM_RES_MASK); 76 77 return 0; 78 } 79 80 struct sinfo { 81 be128 *t; 82 struct crypto_tfm *tfm; 83 void (*fn)(struct crypto_tfm *, u8 *, const u8 *); 84 }; 85 86 static inline void xts_round(struct sinfo *s, void *dst, const void *src) 87 { 88 be128_xor(dst, s->t, src); /* PP <- T xor P */ 89 s->fn(s->tfm, dst, dst); /* CC <- E(Key1,PP) */ 90 be128_xor(dst, dst, s->t); /* C <- T xor CC */ 91 } 92 93 static int crypt(struct blkcipher_desc *d, 94 struct blkcipher_walk *w, struct priv *ctx, 95 void (*tw)(struct crypto_tfm *, u8 *, const u8 *), 96 void (*fn)(struct crypto_tfm *, u8 *, const u8 *)) 97 { 98 int err; 99 unsigned int avail; 100 const int bs = XTS_BLOCK_SIZE; 101 struct sinfo s = { 102 .tfm = crypto_cipher_tfm(ctx->child), 103 .fn = fn 104 }; 105 u8 *wsrc; 106 u8 *wdst; 107 108 err = blkcipher_walk_virt(d, w); 109 if (!w->nbytes) 110 return err; 111 112 s.t = (be128 *)w->iv; 113 avail = w->nbytes; 114 115 wsrc = w->src.virt.addr; 116 wdst = w->dst.virt.addr; 117 118 /* calculate first value of T */ 119 tw(crypto_cipher_tfm(ctx->tweak), w->iv, w->iv); 120 121 goto first; 122 123 for (;;) { 124 do { 125 gf128mul_x_ble(s.t, s.t); 126 127 first: 128 xts_round(&s, wdst, wsrc); 129 130 wsrc += bs; 131 wdst += bs; 132 } while ((avail -= bs) >= bs); 133 134 err = blkcipher_walk_done(d, w, avail); 135 if (!w->nbytes) 136 break; 137 138 avail = w->nbytes; 139 140 wsrc = w->src.virt.addr; 141 wdst = w->dst.virt.addr; 142 } 143 144 return err; 145 } 146 147 static int encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 148 struct scatterlist *src, unsigned int nbytes) 149 { 150 struct priv *ctx = crypto_blkcipher_ctx(desc->tfm); 151 struct blkcipher_walk w; 152 153 blkcipher_walk_init(&w, dst, src, nbytes); 154 return crypt(desc, &w, ctx, crypto_cipher_alg(ctx->tweak)->cia_encrypt, 155 crypto_cipher_alg(ctx->child)->cia_encrypt); 156 } 157 158 static int decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 159 struct scatterlist *src, unsigned int nbytes) 160 { 161 struct priv *ctx = crypto_blkcipher_ctx(desc->tfm); 162 struct blkcipher_walk w; 163 164 blkcipher_walk_init(&w, dst, src, nbytes); 165 return crypt(desc, &w, ctx, crypto_cipher_alg(ctx->tweak)->cia_encrypt, 166 crypto_cipher_alg(ctx->child)->cia_decrypt); 167 } 168 169 int xts_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst, 170 struct scatterlist *ssrc, unsigned int nbytes, 171 struct xts_crypt_req *req) 172 { 173 const unsigned int bsize = XTS_BLOCK_SIZE; 174 const unsigned int max_blks = req->tbuflen / bsize; 175 struct blkcipher_walk walk; 176 unsigned int nblocks; 177 be128 *src, *dst, *t; 178 be128 *t_buf = req->tbuf; 179 int err, i; 180 181 BUG_ON(max_blks < 1); 182 183 blkcipher_walk_init(&walk, sdst, ssrc, nbytes); 184 185 err = blkcipher_walk_virt(desc, &walk); 186 nbytes = walk.nbytes; 187 if (!nbytes) 188 return err; 189 190 nblocks = min(nbytes / bsize, max_blks); 191 src = (be128 *)walk.src.virt.addr; 192 dst = (be128 *)walk.dst.virt.addr; 193 194 /* calculate first value of T */ 195 req->tweak_fn(req->tweak_ctx, (u8 *)&t_buf[0], walk.iv); 196 197 i = 0; 198 goto first; 199 200 for (;;) { 201 do { 202 for (i = 0; i < nblocks; i++) { 203 gf128mul_x_ble(&t_buf[i], t); 204 first: 205 t = &t_buf[i]; 206 207 /* PP <- T xor P */ 208 be128_xor(dst + i, t, src + i); 209 } 210 211 /* CC <- E(Key2,PP) */ 212 req->crypt_fn(req->crypt_ctx, (u8 *)dst, 213 nblocks * bsize); 214 215 /* C <- T xor CC */ 216 for (i = 0; i < nblocks; i++) 217 be128_xor(dst + i, dst + i, &t_buf[i]); 218 219 src += nblocks; 220 dst += nblocks; 221 nbytes -= nblocks * bsize; 222 nblocks = min(nbytes / bsize, max_blks); 223 } while (nblocks > 0); 224 225 *(be128 *)walk.iv = *t; 226 227 err = blkcipher_walk_done(desc, &walk, nbytes); 228 nbytes = walk.nbytes; 229 if (!nbytes) 230 break; 231 232 nblocks = min(nbytes / bsize, max_blks); 233 src = (be128 *)walk.src.virt.addr; 234 dst = (be128 *)walk.dst.virt.addr; 235 } 236 237 return err; 238 } 239 EXPORT_SYMBOL_GPL(xts_crypt); 240 241 static int init_tfm(struct crypto_tfm *tfm) 242 { 243 struct crypto_cipher *cipher; 244 struct crypto_instance *inst = (void *)tfm->__crt_alg; 245 struct crypto_spawn *spawn = crypto_instance_ctx(inst); 246 struct priv *ctx = crypto_tfm_ctx(tfm); 247 u32 *flags = &tfm->crt_flags; 248 249 cipher = crypto_spawn_cipher(spawn); 250 if (IS_ERR(cipher)) 251 return PTR_ERR(cipher); 252 253 if (crypto_cipher_blocksize(cipher) != XTS_BLOCK_SIZE) { 254 *flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; 255 crypto_free_cipher(cipher); 256 return -EINVAL; 257 } 258 259 ctx->child = cipher; 260 261 cipher = crypto_spawn_cipher(spawn); 262 if (IS_ERR(cipher)) { 263 crypto_free_cipher(ctx->child); 264 return PTR_ERR(cipher); 265 } 266 267 /* this check isn't really needed, leave it here just in case */ 268 if (crypto_cipher_blocksize(cipher) != XTS_BLOCK_SIZE) { 269 crypto_free_cipher(cipher); 270 crypto_free_cipher(ctx->child); 271 *flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; 272 return -EINVAL; 273 } 274 275 ctx->tweak = cipher; 276 277 return 0; 278 } 279 280 static void exit_tfm(struct crypto_tfm *tfm) 281 { 282 struct priv *ctx = crypto_tfm_ctx(tfm); 283 crypto_free_cipher(ctx->child); 284 crypto_free_cipher(ctx->tweak); 285 } 286 287 static struct crypto_instance *alloc(struct rtattr **tb) 288 { 289 struct crypto_instance *inst; 290 struct crypto_alg *alg; 291 int err; 292 293 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); 294 if (err) 295 return ERR_PTR(err); 296 297 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, 298 CRYPTO_ALG_TYPE_MASK); 299 if (IS_ERR(alg)) 300 return ERR_CAST(alg); 301 302 inst = crypto_alloc_instance("xts", alg); 303 if (IS_ERR(inst)) 304 goto out_put_alg; 305 306 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; 307 inst->alg.cra_priority = alg->cra_priority; 308 inst->alg.cra_blocksize = alg->cra_blocksize; 309 310 if (alg->cra_alignmask < 7) 311 inst->alg.cra_alignmask = 7; 312 else 313 inst->alg.cra_alignmask = alg->cra_alignmask; 314 315 inst->alg.cra_type = &crypto_blkcipher_type; 316 317 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; 318 inst->alg.cra_blkcipher.min_keysize = 319 2 * alg->cra_cipher.cia_min_keysize; 320 inst->alg.cra_blkcipher.max_keysize = 321 2 * alg->cra_cipher.cia_max_keysize; 322 323 inst->alg.cra_ctxsize = sizeof(struct priv); 324 325 inst->alg.cra_init = init_tfm; 326 inst->alg.cra_exit = exit_tfm; 327 328 inst->alg.cra_blkcipher.setkey = setkey; 329 inst->alg.cra_blkcipher.encrypt = encrypt; 330 inst->alg.cra_blkcipher.decrypt = decrypt; 331 332 out_put_alg: 333 crypto_mod_put(alg); 334 return inst; 335 } 336 337 static void free(struct crypto_instance *inst) 338 { 339 crypto_drop_spawn(crypto_instance_ctx(inst)); 340 kfree(inst); 341 } 342 343 static struct crypto_template crypto_tmpl = { 344 .name = "xts", 345 .alloc = alloc, 346 .free = free, 347 .module = THIS_MODULE, 348 }; 349 350 static int __init crypto_module_init(void) 351 { 352 return crypto_register_template(&crypto_tmpl); 353 } 354 355 static void __exit crypto_module_exit(void) 356 { 357 crypto_unregister_template(&crypto_tmpl); 358 } 359 360 module_init(crypto_module_init); 361 module_exit(crypto_module_exit); 362 363 MODULE_LICENSE("GPL"); 364 MODULE_DESCRIPTION("XTS block cipher mode"); 365 MODULE_ALIAS_CRYPTO("xts"); 366