Lines Matching refs:dmreq

96 			 struct dm_crypt_request *dmreq);
98 struct dm_crypt_request *dmreq);
291 struct dm_crypt_request *dmreq) in crypt_iv_plain_gen() argument
294 *(__le32 *)iv = cpu_to_le32(dmreq->iv_sector & 0xffffffff); in crypt_iv_plain_gen()
300 struct dm_crypt_request *dmreq) in crypt_iv_plain64_gen() argument
303 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_plain64_gen()
309 struct dm_crypt_request *dmreq) in crypt_iv_plain64be_gen() argument
313 *(__be64 *)&iv[cc->iv_size - sizeof(u64)] = cpu_to_be64(dmreq->iv_sector); in crypt_iv_plain64be_gen()
319 struct dm_crypt_request *dmreq) in crypt_iv_essiv_gen() argument
326 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_essiv_gen()
360 struct dm_crypt_request *dmreq) in crypt_iv_benbi_gen() argument
366 val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1); in crypt_iv_benbi_gen()
373 struct dm_crypt_request *dmreq) in crypt_iv_null_gen() argument
448 struct dm_crypt_request *dmreq, in crypt_iv_lmk_one() argument
475 buf[0] = cpu_to_le32(dmreq->iv_sector & 0xFFFFFFFF); in crypt_iv_lmk_one()
476 buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000); in crypt_iv_lmk_one()
496 struct dm_crypt_request *dmreq) in crypt_iv_lmk_gen() argument
502 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_lmk_gen()
503 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_lmk_gen()
505 r = crypt_iv_lmk_one(cc, iv, dmreq, src + sg->offset); in crypt_iv_lmk_gen()
514 struct dm_crypt_request *dmreq) in crypt_iv_lmk_post() argument
520 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) in crypt_iv_lmk_post()
523 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_lmk_post()
525 r = crypt_iv_lmk_one(cc, iv, dmreq, dst + sg->offset); in crypt_iv_lmk_post()
604 struct dm_crypt_request *dmreq, in crypt_iv_tcw_whitening() argument
608 __le64 sector = cpu_to_le64(dmreq->iv_sector); in crypt_iv_tcw_whitening()
642 struct dm_crypt_request *dmreq) in crypt_iv_tcw_gen() argument
646 __le64 sector = cpu_to_le64(dmreq->iv_sector); in crypt_iv_tcw_gen()
651 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { in crypt_iv_tcw_gen()
652 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_tcw_gen()
654 r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset); in crypt_iv_tcw_gen()
668 struct dm_crypt_request *dmreq) in crypt_iv_tcw_post() argument
674 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) in crypt_iv_tcw_post()
678 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_tcw_post()
680 r = crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset); in crypt_iv_tcw_post()
687 struct dm_crypt_request *dmreq) in crypt_iv_random_gen() argument
712 struct dm_crypt_request *dmreq) in crypt_iv_eboiv_gen() argument
725 *(__le64 *)buf = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_eboiv_gen()
907 static void *req_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq) in req_of_dmreq() argument
909 return (void *)((char *)dmreq - cc->dmreq_start); in req_of_dmreq()
913 struct dm_crypt_request *dmreq) in iv_of_dmreq() argument
916 return (u8 *)ALIGN((unsigned long)(dmreq + 1), in iv_of_dmreq()
919 return (u8 *)ALIGN((unsigned long)(dmreq + 1), in iv_of_dmreq()
924 struct dm_crypt_request *dmreq) in org_iv_of_dmreq() argument
926 return iv_of_dmreq(cc, dmreq) + cc->iv_size; in org_iv_of_dmreq()
930 struct dm_crypt_request *dmreq) in org_sector_of_dmreq() argument
932 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + cc->iv_size; in org_sector_of_dmreq()
937 struct dm_crypt_request *dmreq) in org_tag_of_dmreq() argument
939 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + in org_tag_of_dmreq()
945 struct dm_crypt_request *dmreq) in tag_from_dmreq() argument
947 struct convert_context *ctx = dmreq->ctx; in tag_from_dmreq()
950 return &io->integrity_metadata[*org_tag_of_dmreq(cc, dmreq) * in tag_from_dmreq()
955 struct dm_crypt_request *dmreq) in iv_tag_from_dmreq() argument
957 return tag_from_dmreq(cc, dmreq) + cc->integrity_tag_size; in iv_tag_from_dmreq()
967 struct dm_crypt_request *dmreq; in crypt_convert_block_aead() local
978 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_aead()
979 dmreq->iv_sector = ctx->cc_sector; in crypt_convert_block_aead()
981 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_aead()
982 dmreq->ctx = ctx; in crypt_convert_block_aead()
984 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_aead()
986 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
989 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
990 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
991 tag = tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
992 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
999 sg_init_table(dmreq->sg_in, 4); in crypt_convert_block_aead()
1000 sg_set_buf(&dmreq->sg_in[0], sector, sizeof(uint64_t)); in crypt_convert_block_aead()
1001 sg_set_buf(&dmreq->sg_in[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1002 sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_aead()
1003 sg_set_buf(&dmreq->sg_in[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1005 sg_init_table(dmreq->sg_out, 4); in crypt_convert_block_aead()
1006 sg_set_buf(&dmreq->sg_out[0], sector, sizeof(uint64_t)); in crypt_convert_block_aead()
1007 sg_set_buf(&dmreq->sg_out[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1008 sg_set_page(&dmreq->sg_out[2], bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_aead()
1009 sg_set_buf(&dmreq->sg_out[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1016 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_aead()
1029 aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out, in crypt_convert_block_aead()
1036 aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out, in crypt_convert_block_aead()
1048 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_aead()
1064 struct dm_crypt_request *dmreq; in crypt_convert_block_skcipher() local
1073 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_skcipher()
1074 dmreq->iv_sector = ctx->cc_sector; in crypt_convert_block_skcipher()
1076 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_skcipher()
1077 dmreq->ctx = ctx; in crypt_convert_block_skcipher()
1079 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_skcipher()
1081 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1082 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1083 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1085 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1089 sg_in = &dmreq->sg_in[0]; in crypt_convert_block_skcipher()
1090 sg_out = &dmreq->sg_out[0]; in crypt_convert_block_skcipher()
1103 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
1122 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
1669 struct dm_crypt_request *dmreq = async_req->data; in kcryptd_async_done() local
1670 struct convert_context *ctx = dmreq->ctx; in kcryptd_async_done()
1685 error = cc->iv_gen_ops->post(cc, org_iv_of_dmreq(cc, dmreq), dmreq); in kcryptd_async_done()
1690 (unsigned long long)le64_to_cpu(*org_sector_of_dmreq(cc, dmreq))); in kcryptd_async_done()
1695 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()