Lines Matching refs:dmreq
103 struct dm_crypt_request *dmreq);
105 struct dm_crypt_request *dmreq);
314 struct dm_crypt_request *dmreq) in crypt_iv_plain_gen() argument
317 *(__le32 *)iv = cpu_to_le32(dmreq->iv_sector & 0xffffffff); in crypt_iv_plain_gen()
323 struct dm_crypt_request *dmreq) in crypt_iv_plain64_gen() argument
326 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_plain64_gen()
332 struct dm_crypt_request *dmreq) in crypt_iv_plain64be_gen() argument
336 *(__be64 *)&iv[cc->iv_size - sizeof(u64)] = cpu_to_be64(dmreq->iv_sector); in crypt_iv_plain64be_gen()
342 struct dm_crypt_request *dmreq) in crypt_iv_essiv_gen() argument
349 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_essiv_gen()
389 struct dm_crypt_request *dmreq) in crypt_iv_benbi_gen() argument
395 val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1); in crypt_iv_benbi_gen()
402 struct dm_crypt_request *dmreq) in crypt_iv_null_gen() argument
478 struct dm_crypt_request *dmreq, in crypt_iv_lmk_one() argument
505 buf[0] = cpu_to_le32(dmreq->iv_sector & 0xFFFFFFFF); in crypt_iv_lmk_one()
506 buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000); in crypt_iv_lmk_one()
526 struct dm_crypt_request *dmreq) in crypt_iv_lmk_gen() argument
532 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_lmk_gen()
533 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_lmk_gen()
535 r = crypt_iv_lmk_one(cc, iv, dmreq, src + sg->offset); in crypt_iv_lmk_gen()
544 struct dm_crypt_request *dmreq) in crypt_iv_lmk_post() argument
550 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) in crypt_iv_lmk_post()
553 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_lmk_post()
555 r = crypt_iv_lmk_one(cc, iv, dmreq, dst + sg->offset); in crypt_iv_lmk_post()
635 struct dm_crypt_request *dmreq, in crypt_iv_tcw_whitening() argument
639 __le64 sector = cpu_to_le64(dmreq->iv_sector); in crypt_iv_tcw_whitening()
673 struct dm_crypt_request *dmreq) in crypt_iv_tcw_gen() argument
677 __le64 sector = cpu_to_le64(dmreq->iv_sector); in crypt_iv_tcw_gen()
682 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { in crypt_iv_tcw_gen()
683 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_tcw_gen()
685 r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset); in crypt_iv_tcw_gen()
699 struct dm_crypt_request *dmreq) in crypt_iv_tcw_post() argument
705 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) in crypt_iv_tcw_post()
709 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_tcw_post()
711 r = crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset); in crypt_iv_tcw_post()
718 struct dm_crypt_request *dmreq) in crypt_iv_random_gen() argument
743 struct dm_crypt_request *dmreq) in crypt_iv_eboiv_gen() argument
756 *(__le64 *)buf = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_eboiv_gen()
940 static int crypt_iv_elephant(struct crypt_config *cc, struct dm_crypt_request *dmreq) in crypt_iv_elephant() argument
958 *(__le64 *)es = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_elephant()
976 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_elephant()
981 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_elephant()
982 sg2 = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_elephant()
988 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { in crypt_iv_elephant()
998 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_elephant()
1014 struct dm_crypt_request *dmreq) in crypt_iv_elephant_gen() argument
1018 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_elephant_gen()
1019 r = crypt_iv_elephant(cc, dmreq); in crypt_iv_elephant_gen()
1024 return crypt_iv_eboiv_gen(cc, iv, dmreq); in crypt_iv_elephant_gen()
1028 struct dm_crypt_request *dmreq) in crypt_iv_elephant_post() argument
1030 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) in crypt_iv_elephant_post()
1031 return crypt_iv_elephant(cc, dmreq); in crypt_iv_elephant_post()
1232 static void *req_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq) in req_of_dmreq() argument
1234 return (void *)((char *)dmreq - cc->dmreq_start); in req_of_dmreq()
1238 struct dm_crypt_request *dmreq) in iv_of_dmreq() argument
1241 return (u8 *)ALIGN((unsigned long)(dmreq + 1), in iv_of_dmreq()
1244 return (u8 *)ALIGN((unsigned long)(dmreq + 1), in iv_of_dmreq()
1249 struct dm_crypt_request *dmreq) in org_iv_of_dmreq() argument
1251 return iv_of_dmreq(cc, dmreq) + cc->iv_size; in org_iv_of_dmreq()
1255 struct dm_crypt_request *dmreq) in org_sector_of_dmreq() argument
1257 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + cc->iv_size; in org_sector_of_dmreq()
1262 struct dm_crypt_request *dmreq) in org_tag_of_dmreq() argument
1264 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + in org_tag_of_dmreq()
1270 struct dm_crypt_request *dmreq) in tag_from_dmreq() argument
1272 struct convert_context *ctx = dmreq->ctx; in tag_from_dmreq()
1275 return &io->integrity_metadata[*org_tag_of_dmreq(cc, dmreq) * in tag_from_dmreq()
1280 struct dm_crypt_request *dmreq) in iv_tag_from_dmreq() argument
1282 return tag_from_dmreq(cc, dmreq) + cc->integrity_tag_size; in iv_tag_from_dmreq()
1292 struct dm_crypt_request *dmreq; in crypt_convert_block_aead() local
1303 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_aead()
1304 dmreq->iv_sector = ctx->cc_sector; in crypt_convert_block_aead()
1306 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_aead()
1307 dmreq->ctx = ctx; in crypt_convert_block_aead()
1309 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_aead()
1311 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1314 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1315 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1316 tag = tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
1317 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
1324 sg_init_table(dmreq->sg_in, 4); in crypt_convert_block_aead()
1325 sg_set_buf(&dmreq->sg_in[0], sector, sizeof(uint64_t)); in crypt_convert_block_aead()
1326 sg_set_buf(&dmreq->sg_in[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1327 sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_aead()
1328 sg_set_buf(&dmreq->sg_in[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1330 sg_init_table(dmreq->sg_out, 4); in crypt_convert_block_aead()
1331 sg_set_buf(&dmreq->sg_out[0], sector, sizeof(uint64_t)); in crypt_convert_block_aead()
1332 sg_set_buf(&dmreq->sg_out[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1333 sg_set_page(&dmreq->sg_out[2], bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_aead()
1334 sg_set_buf(&dmreq->sg_out[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1341 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_aead()
1354 aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out, in crypt_convert_block_aead()
1361 aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out, in crypt_convert_block_aead()
1376 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_aead()
1392 struct dm_crypt_request *dmreq; in crypt_convert_block_skcipher() local
1401 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_skcipher()
1402 dmreq->iv_sector = ctx->cc_sector; in crypt_convert_block_skcipher()
1404 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_skcipher()
1405 dmreq->ctx = ctx; in crypt_convert_block_skcipher()
1407 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_skcipher()
1409 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1410 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1411 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1413 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1417 sg_in = &dmreq->sg_in[0]; in crypt_convert_block_skcipher()
1418 sg_out = &dmreq->sg_out[0]; in crypt_convert_block_skcipher()
1431 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
1453 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
2153 struct dm_crypt_request *dmreq = async_req->data; in kcryptd_async_done() local
2154 struct convert_context *ctx = dmreq->ctx; in kcryptd_async_done()
2169 error = cc->iv_gen_ops->post(cc, org_iv_of_dmreq(cc, dmreq), dmreq); in kcryptd_async_done()
2172 sector_t s = le64_to_cpu(*org_sector_of_dmreq(cc, dmreq)); in kcryptd_async_done()
2182 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()