Searched refs:read_errors (Results 1 – 7 of 7) sorted by relevance
79 unsigned int read_errors; member267 cw_bat->read_errors++; in cw_get_soc()268 if (cw_bat->read_errors > max_error_cycles) { in cw_get_soc()272 cw_bat->read_errors = 0; in cw_get_soc()276 cw_bat->read_errors = 0; in cw_get_soc()
2586 unsigned int read_errors = atomic_read(&rdev->read_errors); in check_decay_read_errors() local2606 if (hours_since_last >= 8 * sizeof(read_errors)) in check_decay_read_errors()2607 atomic_set(&rdev->read_errors, 0); in check_decay_read_errors()2609 atomic_set(&rdev->read_errors, read_errors >> hours_since_last); in check_decay_read_errors()2663 atomic_inc(&rdev->read_errors); in fix_read_error()2664 if (atomic_read(&rdev->read_errors) > max_read_errors) { in fix_read_error()2670 atomic_read(&rdev->read_errors), max_read_errors); in fix_read_error()
112 atomic_t read_errors; /* number of consecutive read errors that member
2723 if (atomic_read(&rdev->read_errors)) in raid5_end_read_request()2724 atomic_set(&rdev->read_errors, 0); in raid5_end_read_request()2732 atomic_inc(&rdev->read_errors); in raid5_end_read_request()2754 } else if (atomic_read(&rdev->read_errors) in raid5_end_read_request()2759 atomic_read(&rdev->read_errors), in raid5_end_read_request()
3665 atomic_set(&rdev->read_errors, 0); in md_rdev_init()
155 __u64 read_errors; /* # of read errors encountered (EIO) */ member
881 sctx->stat.read_errors++; in scrub_handle_errored_block()921 sctx->stat.read_errors++; in scrub_handle_errored_block()932 sctx->stat.read_errors++; in scrub_handle_errored_block()966 sctx->stat.read_errors++; in scrub_handle_errored_block()2174 sctx->stat.read_errors++; in scrub_missing_raid56_worker()2784 sctx->stat.read_errors += nbits; in scrub_free_parity()