Lines Matching refs:rq_state

34 	req->rq_state = (bio_data_dir(bio_src) == WRITE ? RQ_WRITE : 0)  in drbd_req_new()
77 const unsigned s = req->rq_state; in drbd_req_destroy()
195 const unsigned s = req->rq_state; in drbd_req_complete()
268 req->rq_state |= RQ_POSTPONED; in drbd_req_complete()
270 if (!(req->rq_state & RQ_POSTPONED)) { in drbd_req_complete()
295 D_ASSERT(device, m || (req->rq_state & RQ_POSTPONED)); in drbd_req_put_completion_ref()
307 if (req->rq_state & RQ_LOCAL_ABORTED) in drbd_req_put_completion_ref()
310 if (req->rq_state & RQ_POSTPONED) { in drbd_req_put_completion_ref()
337 const unsigned s = req->rq_state; in advance_conn_req_next()
363 const unsigned s = req->rq_state; in advance_conn_req_ack_pending()
389 const unsigned s = req->rq_state; in advance_conn_req_not_net_done()
405 unsigned s = req->rq_state; in mod_rq_state()
413 req->rq_state &= ~clear; in mod_rq_state()
414 req->rq_state |= set; in mod_rq_state()
417 if (req->rq_state == s) in mod_rq_state()
446 if (req->rq_state & RQ_NET_PENDING) in mod_rq_state()
459 D_ASSERT(device, req->rq_state & RQ_LOCAL_PENDING); in mod_rq_state()
464 if (req->rq_state & RQ_LOCAL_ABORTED) in mod_rq_state()
516 (req->rq_state & RQ_WRITE) ? "WRITE" : "READ", in drbd_report_io_error()
530 return (req->rq_state & in is_pending_write_protocol_A()
573 D_ASSERT(device, !(req->rq_state & RQ_NET_MASK)); in __req_mod()
578 req->rq_state |= in __req_mod()
586 D_ASSERT(device, !(req->rq_state & RQ_LOCAL_MASK)); in __req_mod()
591 if (req->rq_state & RQ_WRITE) in __req_mod()
643 D_ASSERT(device, req->rq_state & RQ_NET_PENDING); in __req_mod()
644 D_ASSERT(device, (req->rq_state & RQ_LOCAL_MASK) == 0); in __req_mod()
680 D_ASSERT(device, req->rq_state & RQ_NET_PENDING); in __req_mod()
746 D_ASSERT(device, req->rq_state & RQ_NET_PENDING); in __req_mod()
747 D_ASSERT(device, req->rq_state & RQ_EXP_WRITE_ACK); in __req_mod()
752 req->rq_state |= RQ_NET_SIS; in __req_mod()
763 D_ASSERT(device, req->rq_state & RQ_EXP_RECEIVE_ACK); in __req_mod()
772 D_ASSERT(device, req->rq_state & RQ_EXP_WRITE_ACK); in __req_mod()
777 D_ASSERT(device, req->rq_state & RQ_NET_PENDING); in __req_mod()
778 req->rq_state |= RQ_POSTPONED; in __req_mod()
791 if (!(req->rq_state & RQ_LOCAL_COMPLETED)) in __req_mod()
797 if (!(req->rq_state & RQ_LOCAL_COMPLETED)) in __req_mod()
816 if (!(req->rq_state & RQ_WRITE) && !req->w.cb) { in __req_mod()
826 if (!(req->rq_state & RQ_NET_OK)) { in __req_mod()
835 rv = req->rq_state & RQ_WRITE ? MR_WRITE : MR_READ; in __req_mod()
843 if (!(req->rq_state & RQ_WRITE)) in __req_mod()
846 if (req->rq_state & RQ_NET_PENDING) { in __req_mod()
857 (req->rq_state & RQ_NET_MASK) ? RQ_NET_DONE : 0); in __req_mod()
861 D_ASSERT(device, req->rq_state & RQ_NET_PENDING); in __req_mod()
1225 req->rq_state |= RQ_IN_ACT_LOG; in drbd_request_prepare()
1271 req->rq_state |= RQ_UNPLUG; in drbd_unplug()
1327 req->rq_state |= RQ_POSTPONED; in drbd_send_and_submit()
1442 req->rq_state |= RQ_IN_ACT_LOG; in submit_fast_path()
1487 req->rq_state |= RQ_IN_ACT_LOG; in send_and_submit_pending()
1628 if (net_req->rq_state & RQ_NET_PENDING) { in net_timeout_reached()