Searched refs:layout_map_count (Results 1 – 4 of 4) sorted by relevance
1019 __le16 layout_map_count; /* layout maps (1 map per */ member1052 u16 layout_map_count; member
1375 if (get_unaligned_le16(&raid_map->layout_map_count) != 2) { in pqi_validate_raid_map()1380 if (get_unaligned_le16(&raid_map->layout_map_count) != 3) { in pqi_validate_raid_map()1386 get_unaligned_le16(&raid_map->layout_map_count) > 1) { in pqi_validate_raid_map()2718 rmd->layout_map_count = get_unaligned_le16(&raid_map->layout_map_count); in pci_get_aio_common_raid_map_values()2779 rmd->stripesize = rmd->blocks_per_row * rmd->layout_map_count; in pqi_calc_aio_r5_or_r6()2926 if (rmd->layout_map_count > 2) { in pqi_calc_aio_r1_nexus()2931 rmd->num_it_nexus_entries = rmd->layout_map_count; in pqi_calc_aio_r1_nexus()2971 if (next_bypass_group >= rmd.layout_map_count) in pqi_raid_bypass_submit_scsi_cmd()2978 (rmd.layout_map_count > 1 || rmd.is_write)) { in pqi_raid_bypass_submit_scsi_cmd()
246 __le16 layout_map_count; /* layout maps (1 map per mirror/parity member
1711 le16_to_cpu(map->layout_map_count) * in hpsa_figure_phys_disk_ptrs()1713 int nphys_disk = le16_to_cpu(map->layout_map_count) * in hpsa_figure_phys_disk_ptrs()3270 le16_to_cpu(map_buff->layout_map_count)); in hpsa_debug_map_buff()3278 map_cnt = le16_to_cpu(map_buff->layout_map_count); in hpsa_debug_map_buff()5087 if (*current_group < le16_to_cpu(map->layout_map_count) - 1) { in raid_map_helper()5265 if (le16_to_cpu(map->layout_map_count) != 2) { in hpsa_scsi_ioaccel_raid_map()5278 if (le16_to_cpu(map->layout_map_count) != 3) { in hpsa_scsi_ioaccel_raid_map()5289 le16_to_cpu(map->layout_map_count) - 1) in hpsa_scsi_ioaccel_raid_map()5299 if (le16_to_cpu(map->layout_map_count) <= 1) in hpsa_scsi_ioaccel_raid_map()5311 le16_to_cpu(map->layout_map_count); in hpsa_scsi_ioaccel_raid_map()[all …]