1 /* XTS: as defined in IEEE1619/D16 2 * http://grouper.ieee.org/groups/1619/email/pdf00086.pdf 3 * (sector sizes which are not a multiple of 16 bytes are, 4 * however currently unsupported) 5 * 6 * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org> 7 * 8 * Based om ecb.c 9 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 10 * 11 * This program is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License as published by the Free 13 * Software Foundation; either version 2 of the License, or (at your option) 14 * any later version. 15 */ 16 #include <crypto/algapi.h> 17 #include <linux/err.h> 18 #include <linux/init.h> 19 #include <linux/kernel.h> 20 #include <linux/module.h> 21 #include <linux/scatterlist.h> 22 #include <linux/slab.h> 23 24 #include <crypto/b128ops.h> 25 #include <crypto/gf128mul.h> 26 27 struct priv { 28 struct crypto_cipher *child; 29 struct crypto_cipher *tweak; 30 }; 31 32 static int setkey(struct crypto_tfm *parent, const u8 *key, 33 unsigned int keylen) 34 { 35 struct priv *ctx = crypto_tfm_ctx(parent); 36 struct crypto_cipher *child = ctx->tweak; 37 u32 *flags = &parent->crt_flags; 38 int err; 39 40 /* key consists of keys of equal size concatenated, therefore 41 * the length must be even */ 42 if (keylen % 2) { 43 /* tell the user why there was an error */ 44 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 45 return -EINVAL; 46 } 47 48 /* we need two cipher instances: one to compute the inital 'tweak' 49 * by encrypting the IV (usually the 'plain' iv) and the other 50 * one to encrypt and decrypt the data */ 51 52 /* tweak cipher, uses Key2 i.e. the second half of *key */ 53 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); 54 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & 55 CRYPTO_TFM_REQ_MASK); 56 err = crypto_cipher_setkey(child, key + keylen/2, keylen/2); 57 if (err) 58 return err; 59 60 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & 61 CRYPTO_TFM_RES_MASK); 62 63 child = ctx->child; 64 65 /* data cipher, uses Key1 i.e. the first half of *key */ 66 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); 67 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & 68 CRYPTO_TFM_REQ_MASK); 69 err = crypto_cipher_setkey(child, key, keylen/2); 70 if (err) 71 return err; 72 73 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & 74 CRYPTO_TFM_RES_MASK); 75 76 return 0; 77 } 78 79 struct sinfo { 80 be128 *t; 81 struct crypto_tfm *tfm; 82 void (*fn)(struct crypto_tfm *, u8 *, const u8 *); 83 }; 84 85 static inline void xts_round(struct sinfo *s, void *dst, const void *src) 86 { 87 be128_xor(dst, s->t, src); /* PP <- T xor P */ 88 s->fn(s->tfm, dst, dst); /* CC <- E(Key1,PP) */ 89 be128_xor(dst, dst, s->t); /* C <- T xor CC */ 90 } 91 92 static int crypt(struct blkcipher_desc *d, 93 struct blkcipher_walk *w, struct priv *ctx, 94 void (*tw)(struct crypto_tfm *, u8 *, const u8 *), 95 void (*fn)(struct crypto_tfm *, u8 *, const u8 *)) 96 { 97 int err; 98 unsigned int avail; 99 const int bs = crypto_cipher_blocksize(ctx->child); 100 struct sinfo s = { 101 .tfm = crypto_cipher_tfm(ctx->child), 102 .fn = fn 103 }; 104 u8 *wsrc; 105 u8 *wdst; 106 107 err = blkcipher_walk_virt(d, w); 108 if (!w->nbytes) 109 return err; 110 111 s.t = (be128 *)w->iv; 112 avail = w->nbytes; 113 114 wsrc = w->src.virt.addr; 115 wdst = w->dst.virt.addr; 116 117 /* calculate first value of T */ 118 tw(crypto_cipher_tfm(ctx->tweak), w->iv, w->iv); 119 120 goto first; 121 122 for (;;) { 123 do { 124 gf128mul_x_ble(s.t, s.t); 125 126 first: 127 xts_round(&s, wdst, wsrc); 128 129 wsrc += bs; 130 wdst += bs; 131 } while ((avail -= bs) >= bs); 132 133 err = blkcipher_walk_done(d, w, avail); 134 if (!w->nbytes) 135 break; 136 137 avail = w->nbytes; 138 139 wsrc = w->src.virt.addr; 140 wdst = w->dst.virt.addr; 141 } 142 143 return err; 144 } 145 146 static int encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 147 struct scatterlist *src, unsigned int nbytes) 148 { 149 struct priv *ctx = crypto_blkcipher_ctx(desc->tfm); 150 struct blkcipher_walk w; 151 152 blkcipher_walk_init(&w, dst, src, nbytes); 153 return crypt(desc, &w, ctx, crypto_cipher_alg(ctx->tweak)->cia_encrypt, 154 crypto_cipher_alg(ctx->child)->cia_encrypt); 155 } 156 157 static int decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 158 struct scatterlist *src, unsigned int nbytes) 159 { 160 struct priv *ctx = crypto_blkcipher_ctx(desc->tfm); 161 struct blkcipher_walk w; 162 163 blkcipher_walk_init(&w, dst, src, nbytes); 164 return crypt(desc, &w, ctx, crypto_cipher_alg(ctx->tweak)->cia_encrypt, 165 crypto_cipher_alg(ctx->child)->cia_decrypt); 166 } 167 168 static int init_tfm(struct crypto_tfm *tfm) 169 { 170 struct crypto_cipher *cipher; 171 struct crypto_instance *inst = (void *)tfm->__crt_alg; 172 struct crypto_spawn *spawn = crypto_instance_ctx(inst); 173 struct priv *ctx = crypto_tfm_ctx(tfm); 174 u32 *flags = &tfm->crt_flags; 175 176 cipher = crypto_spawn_cipher(spawn); 177 if (IS_ERR(cipher)) 178 return PTR_ERR(cipher); 179 180 if (crypto_cipher_blocksize(cipher) != 16) { 181 *flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; 182 crypto_free_cipher(cipher); 183 return -EINVAL; 184 } 185 186 ctx->child = cipher; 187 188 cipher = crypto_spawn_cipher(spawn); 189 if (IS_ERR(cipher)) { 190 crypto_free_cipher(ctx->child); 191 return PTR_ERR(cipher); 192 } 193 194 /* this check isn't really needed, leave it here just in case */ 195 if (crypto_cipher_blocksize(cipher) != 16) { 196 crypto_free_cipher(cipher); 197 crypto_free_cipher(ctx->child); 198 *flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; 199 return -EINVAL; 200 } 201 202 ctx->tweak = cipher; 203 204 return 0; 205 } 206 207 static void exit_tfm(struct crypto_tfm *tfm) 208 { 209 struct priv *ctx = crypto_tfm_ctx(tfm); 210 crypto_free_cipher(ctx->child); 211 crypto_free_cipher(ctx->tweak); 212 } 213 214 static struct crypto_instance *alloc(struct rtattr **tb) 215 { 216 struct crypto_instance *inst; 217 struct crypto_alg *alg; 218 int err; 219 220 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); 221 if (err) 222 return ERR_PTR(err); 223 224 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, 225 CRYPTO_ALG_TYPE_MASK); 226 if (IS_ERR(alg)) 227 return ERR_PTR(PTR_ERR(alg)); 228 229 inst = crypto_alloc_instance("xts", alg); 230 if (IS_ERR(inst)) 231 goto out_put_alg; 232 233 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; 234 inst->alg.cra_priority = alg->cra_priority; 235 inst->alg.cra_blocksize = alg->cra_blocksize; 236 237 if (alg->cra_alignmask < 7) 238 inst->alg.cra_alignmask = 7; 239 else 240 inst->alg.cra_alignmask = alg->cra_alignmask; 241 242 inst->alg.cra_type = &crypto_blkcipher_type; 243 244 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; 245 inst->alg.cra_blkcipher.min_keysize = 246 2 * alg->cra_cipher.cia_min_keysize; 247 inst->alg.cra_blkcipher.max_keysize = 248 2 * alg->cra_cipher.cia_max_keysize; 249 250 inst->alg.cra_ctxsize = sizeof(struct priv); 251 252 inst->alg.cra_init = init_tfm; 253 inst->alg.cra_exit = exit_tfm; 254 255 inst->alg.cra_blkcipher.setkey = setkey; 256 inst->alg.cra_blkcipher.encrypt = encrypt; 257 inst->alg.cra_blkcipher.decrypt = decrypt; 258 259 out_put_alg: 260 crypto_mod_put(alg); 261 return inst; 262 } 263 264 static void free(struct crypto_instance *inst) 265 { 266 crypto_drop_spawn(crypto_instance_ctx(inst)); 267 kfree(inst); 268 } 269 270 static struct crypto_template crypto_tmpl = { 271 .name = "xts", 272 .alloc = alloc, 273 .free = free, 274 .module = THIS_MODULE, 275 }; 276 277 static int __init crypto_module_init(void) 278 { 279 return crypto_register_template(&crypto_tmpl); 280 } 281 282 static void __exit crypto_module_exit(void) 283 { 284 crypto_unregister_template(&crypto_tmpl); 285 } 286 287 module_init(crypto_module_init); 288 module_exit(crypto_module_exit); 289 290 MODULE_LICENSE("GPL"); 291 MODULE_DESCRIPTION("XTS block cipher mode"); 292