Searched refs:layout_map_count (Results 1 – 4 of 4) sorted by relevance
984 __le16 layout_map_count; /* layout maps (1 map per */ member1017 u16 layout_map_count; member
1255 if (get_unaligned_le16(&raid_map->layout_map_count) != 2) { in pqi_validate_raid_map()1260 if (get_unaligned_le16(&raid_map->layout_map_count) != 3) { in pqi_validate_raid_map()1266 get_unaligned_le16(&raid_map->layout_map_count) > 1) { in pqi_validate_raid_map()2504 rmd->layout_map_count = get_unaligned_le16(&raid_map->layout_map_count); in pci_get_aio_common_raid_map_values()2565 rmd->stripesize = rmd->blocks_per_row * rmd->layout_map_count; in pqi_calc_aio_r5_or_r6()2712 if (rmd->layout_map_count > 2) { in pqi_calc_aio_r1_nexus()2717 rmd->num_it_nexus_entries = rmd->layout_map_count; in pqi_calc_aio_r1_nexus()2757 if (next_bypass_group >= rmd.layout_map_count) in pqi_raid_bypass_submit_scsi_cmd()2764 (rmd.layout_map_count > 1 || rmd.is_write)) { in pqi_raid_bypass_submit_scsi_cmd()
246 __le16 layout_map_count; /* layout maps (1 map per mirror/parity member
1707 le16_to_cpu(map->layout_map_count) * in hpsa_figure_phys_disk_ptrs()1709 int nphys_disk = le16_to_cpu(map->layout_map_count) * in hpsa_figure_phys_disk_ptrs()3266 le16_to_cpu(map_buff->layout_map_count)); in hpsa_debug_map_buff()3274 map_cnt = le16_to_cpu(map_buff->layout_map_count); in hpsa_debug_map_buff()5085 if (*current_group < le16_to_cpu(map->layout_map_count) - 1) { in raid_map_helper()5263 if (le16_to_cpu(map->layout_map_count) != 2) { in hpsa_scsi_ioaccel_raid_map()5276 if (le16_to_cpu(map->layout_map_count) != 3) { in hpsa_scsi_ioaccel_raid_map()5287 le16_to_cpu(map->layout_map_count) - 1) in hpsa_scsi_ioaccel_raid_map()5297 if (le16_to_cpu(map->layout_map_count) <= 1) in hpsa_scsi_ioaccel_raid_map()5309 le16_to_cpu(map->layout_map_count); in hpsa_scsi_ioaccel_raid_map()[all …]