Lines Matching refs:dmreq
113 struct dm_crypt_request *dmreq);
115 struct dm_crypt_request *dmreq);
361 struct dm_crypt_request *dmreq) in crypt_iv_plain_gen() argument
364 *(__le32 *)iv = cpu_to_le32(dmreq->iv_sector & 0xffffffff); in crypt_iv_plain_gen()
370 struct dm_crypt_request *dmreq) in crypt_iv_plain64_gen() argument
373 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_plain64_gen()
379 struct dm_crypt_request *dmreq) in crypt_iv_plain64be_gen() argument
383 *(__be64 *)&iv[cc->iv_size - sizeof(u64)] = cpu_to_be64(dmreq->iv_sector); in crypt_iv_plain64be_gen()
389 struct dm_crypt_request *dmreq) in crypt_iv_essiv_gen() argument
396 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_essiv_gen()
437 struct dm_crypt_request *dmreq) in crypt_iv_benbi_gen() argument
443 val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1); in crypt_iv_benbi_gen()
450 struct dm_crypt_request *dmreq) in crypt_iv_null_gen() argument
520 struct dm_crypt_request *dmreq, u8 *data) in crypt_iv_lmk_one() argument
535 buf[0] = cpu_to_le32(dmreq->iv_sector & 0xFFFFFFFF); in crypt_iv_lmk_one()
536 buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000); in crypt_iv_lmk_one()
547 struct dm_crypt_request *dmreq) in crypt_iv_lmk_gen() argument
552 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_lmk_gen()
553 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_lmk_gen()
555 crypt_iv_lmk_one(cc, iv, dmreq, src + sg->offset); in crypt_iv_lmk_gen()
563 struct dm_crypt_request *dmreq) in crypt_iv_lmk_post() argument
568 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) in crypt_iv_lmk_post()
571 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_lmk_post()
573 crypt_iv_lmk_one(cc, iv, dmreq, dst + sg->offset); in crypt_iv_lmk_post()
641 struct dm_crypt_request *dmreq, u8 *data) in crypt_iv_tcw_whitening() argument
644 __le64 sector = cpu_to_le64(dmreq->iv_sector); in crypt_iv_tcw_whitening()
665 struct dm_crypt_request *dmreq) in crypt_iv_tcw_gen() argument
669 __le64 sector = cpu_to_le64(dmreq->iv_sector); in crypt_iv_tcw_gen()
673 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { in crypt_iv_tcw_gen()
674 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_tcw_gen()
676 crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset); in crypt_iv_tcw_gen()
690 struct dm_crypt_request *dmreq) in crypt_iv_tcw_post() argument
695 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) in crypt_iv_tcw_post()
699 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_tcw_post()
701 crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset); in crypt_iv_tcw_post()
708 struct dm_crypt_request *dmreq) in crypt_iv_random_gen() argument
732 struct dm_crypt_request *dmreq) in crypt_iv_eboiv_gen() argument
753 *(__le64 *)buf = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_eboiv_gen()
937 static int crypt_iv_elephant(struct crypt_config *cc, struct dm_crypt_request *dmreq) in crypt_iv_elephant() argument
955 *(__le64 *)es = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_elephant()
973 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_elephant()
978 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_elephant()
979 sg2 = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_elephant()
985 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { in crypt_iv_elephant()
995 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_elephant()
1011 struct dm_crypt_request *dmreq) in crypt_iv_elephant_gen() argument
1015 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_elephant_gen()
1016 r = crypt_iv_elephant(cc, dmreq); in crypt_iv_elephant_gen()
1021 return crypt_iv_eboiv_gen(cc, iv, dmreq); in crypt_iv_elephant_gen()
1025 struct dm_crypt_request *dmreq) in crypt_iv_elephant_post() argument
1027 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) in crypt_iv_elephant_post()
1028 return crypt_iv_elephant(cc, dmreq); in crypt_iv_elephant_post()
1229 static void *req_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq) in req_of_dmreq() argument
1231 return (void *)((char *)dmreq - cc->dmreq_start); in req_of_dmreq()
1235 struct dm_crypt_request *dmreq) in iv_of_dmreq() argument
1238 return (u8 *)ALIGN((unsigned long)(dmreq + 1), in iv_of_dmreq()
1241 return (u8 *)ALIGN((unsigned long)(dmreq + 1), in iv_of_dmreq()
1246 struct dm_crypt_request *dmreq) in org_iv_of_dmreq() argument
1248 return iv_of_dmreq(cc, dmreq) + cc->iv_size; in org_iv_of_dmreq()
1252 struct dm_crypt_request *dmreq) in org_sector_of_dmreq() argument
1254 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + cc->iv_size; in org_sector_of_dmreq()
1260 struct dm_crypt_request *dmreq) in org_tag_of_dmreq() argument
1262 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + in org_tag_of_dmreq()
1269 struct dm_crypt_request *dmreq) in tag_from_dmreq() argument
1271 struct convert_context *ctx = dmreq->ctx; in tag_from_dmreq()
1274 return &io->integrity_metadata[*org_tag_of_dmreq(cc, dmreq) * in tag_from_dmreq()
1279 struct dm_crypt_request *dmreq) in iv_tag_from_dmreq() argument
1281 return tag_from_dmreq(cc, dmreq) + cc->integrity_tag_size; in iv_tag_from_dmreq()
1291 struct dm_crypt_request *dmreq; in crypt_convert_block_aead() local
1302 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_aead()
1303 dmreq->iv_sector = ctx->cc_sector; in crypt_convert_block_aead()
1305 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_aead()
1306 dmreq->ctx = ctx; in crypt_convert_block_aead()
1308 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_aead()
1310 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1313 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1314 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1315 tag = tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
1316 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
1323 sg_init_table(dmreq->sg_in, 4); in crypt_convert_block_aead()
1324 sg_set_buf(&dmreq->sg_in[0], sector, sizeof(uint64_t)); in crypt_convert_block_aead()
1325 sg_set_buf(&dmreq->sg_in[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1326 sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_aead()
1327 sg_set_buf(&dmreq->sg_in[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1329 sg_init_table(dmreq->sg_out, 4); in crypt_convert_block_aead()
1330 sg_set_buf(&dmreq->sg_out[0], sector, sizeof(uint64_t)); in crypt_convert_block_aead()
1331 sg_set_buf(&dmreq->sg_out[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1332 sg_set_page(&dmreq->sg_out[2], bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_aead()
1333 sg_set_buf(&dmreq->sg_out[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1340 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_aead()
1353 aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out, in crypt_convert_block_aead()
1360 aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out, in crypt_convert_block_aead()
1378 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_aead()
1394 struct dm_crypt_request *dmreq; in crypt_convert_block_skcipher() local
1403 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_skcipher()
1404 dmreq->iv_sector = ctx->cc_sector; in crypt_convert_block_skcipher()
1406 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_skcipher()
1407 dmreq->ctx = ctx; in crypt_convert_block_skcipher()
1409 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_skcipher()
1411 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1412 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1413 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1415 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1419 sg_in = &dmreq->sg_in[0]; in crypt_convert_block_skcipher()
1420 sg_out = &dmreq->sg_out[0]; in crypt_convert_block_skcipher()
1433 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
1455 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
2203 struct dm_crypt_request *dmreq = data; in kcryptd_async_done() local
2204 struct convert_context *ctx = dmreq->ctx; in kcryptd_async_done()
2219 error = cc->iv_gen_ops->post(cc, org_iv_of_dmreq(cc, dmreq), dmreq); in kcryptd_async_done()
2222 sector_t s = le64_to_cpu(*org_sector_of_dmreq(cc, dmreq)); in kcryptd_async_done()
2235 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()