Lines Matching refs:sector_size

170 	unsigned short int sector_size;  member
422 if (cc->sector_size != (1 << SECTOR_SHIFT)) { in crypt_iv_lmk_ctr()
580 if (cc->sector_size != (1 << SECTOR_SHIFT)) { in crypt_iv_tcw_ctr()
752 *(__le64 *)buf = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_eboiv_gen()
954 *(__le64 *)es = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_elephant()
980 memcpy(data_offset, data2 + sg2->offset, cc->sector_size); in crypt_iv_elephant()
985 diffuser_disk_to_cpu((u32*)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
986 diffuser_b_decrypt((u32*)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
987 diffuser_a_decrypt((u32*)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
988 diffuser_cpu_to_disk((__le32*)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
991 for (i = 0; i < (cc->sector_size / 32); i++) in crypt_iv_elephant()
995 diffuser_disk_to_cpu((u32*)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
996 diffuser_a_encrypt((u32*)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
997 diffuser_b_encrypt((u32*)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
998 diffuser_cpu_to_disk((__le32*)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1177 if (1 << bi->interval_exp != cc->sector_size) { in crypt_integrity_ctr()
1296 if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) in crypt_convert_block_aead()
1323 sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_aead()
1329 sg_set_page(&dmreq->sg_out[2], bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_aead()
1351 cc->sector_size, iv); in crypt_convert_block_aead()
1358 cc->sector_size + cc->integrity_tag_size, iv); in crypt_convert_block_aead()
1371 bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); in crypt_convert_block_aead()
1372 bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); in crypt_convert_block_aead()
1391 if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) in crypt_convert_block_skcipher()
1414 sg_set_page(sg_in, bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_skcipher()
1417 sg_set_page(sg_out, bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_skcipher()
1438 skcipher_request_set_crypt(req, sg_in, sg_out, cc->sector_size, iv); in crypt_convert_block_skcipher()
1448 bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); in crypt_convert_block_skcipher()
1449 bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); in crypt_convert_block_skcipher()
1535 unsigned int sector_step = cc->sector_size >> SECTOR_SHIFT; in crypt_convert()
2961 } else if (sscanf(opt_string, "sector_size:%hu%c", &cc->sector_size, &dummy) == 1) { in crypt_ctr_optional()
2962 if (cc->sector_size < (1 << SECTOR_SHIFT) || in crypt_ctr_optional()
2963 cc->sector_size > 4096 || in crypt_ctr_optional()
2964 (cc->sector_size & (cc->sector_size - 1))) { in crypt_ctr_optional()
2968 if (ti->len & ((cc->sector_size >> SECTOR_SHIFT) - 1)) { in crypt_ctr_optional()
2972 cc->sector_shift = __ffs(cc->sector_size) - SECTOR_SHIFT; in crypt_ctr_optional()
3031 cc->sector_size = (1 << SECTOR_SHIFT); in crypt_ctr()
3113 (tmpll & ((cc->sector_size >> SECTOR_SHIFT) - 1))) { in crypt_ctr()
3231 if (unlikely((bio->bi_iter.bi_sector & ((cc->sector_size >> SECTOR_SHIFT) - 1)) != 0)) in crypt_map()
3234 if (unlikely(bio->bi_iter.bi_size & (cc->sector_size - 1))) in crypt_map()
3299 num_feature_args += cc->sector_size != (1 << SECTOR_SHIFT); in crypt_status()
3317 if (cc->sector_size != (1 << SECTOR_SHIFT)) in crypt_status()
3318 DMEMIT(" sector_size:%d", cc->sector_size); in crypt_status()
3419 max_t(unsigned, limits->logical_block_size, cc->sector_size); in crypt_io_hints()
3421 max_t(unsigned, limits->physical_block_size, cc->sector_size); in crypt_io_hints()
3422 limits->io_min = max_t(unsigned, limits->io_min, cc->sector_size); in crypt_io_hints()