Lines Matching refs:gl

57 	struct gfs2_glock *gl;		/* current glock struct        */  member
61 typedef void (*glock_examiner) (struct gfs2_glock * gl);
63 static void do_xmote(struct gfs2_glock *gl, struct gfs2_holder *gh, unsigned int target);
65 static void handle_callback(struct gfs2_glock *gl, unsigned int state,
122 static void wake_up_glock(struct gfs2_glock *gl) in wake_up_glock() argument
124 wait_queue_head_t *wq = glock_waitqueue(&gl->gl_name); in wake_up_glock()
127 __wake_up(wq, TASK_NORMAL, 1, &gl->gl_name); in wake_up_glock()
132 struct gfs2_glock *gl = container_of(rcu, struct gfs2_glock, gl_rcu); in gfs2_glock_dealloc() local
134 kfree(gl->gl_lksb.sb_lvbptr); in gfs2_glock_dealloc()
135 if (gl->gl_ops->go_flags & GLOF_ASPACE) { in gfs2_glock_dealloc()
137 container_of(gl, struct gfs2_glock_aspace, glock); in gfs2_glock_dealloc()
140 kmem_cache_free(gfs2_glock_cachep, gl); in gfs2_glock_dealloc()
156 static bool glock_blocked_by_withdraw(struct gfs2_glock *gl) in glock_blocked_by_withdraw() argument
158 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in glock_blocked_by_withdraw()
162 if (gl->gl_ops->go_flags & GLOF_NONDISK) in glock_blocked_by_withdraw()
165 gl->gl_name.ln_number == sdp->sd_jdesc->jd_no_addr) in glock_blocked_by_withdraw()
170 void gfs2_glock_free(struct gfs2_glock *gl) in gfs2_glock_free() argument
172 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_glock_free()
174 gfs2_glock_assert_withdraw(gl, atomic_read(&gl->gl_revokes) == 0); in gfs2_glock_free()
175 rhashtable_remove_fast(&gl_hash_table, &gl->gl_node, ht_parms); in gfs2_glock_free()
177 wake_up_glock(gl); in gfs2_glock_free()
178 call_rcu(&gl->gl_rcu, gfs2_glock_dealloc); in gfs2_glock_free()
189 void gfs2_glock_hold(struct gfs2_glock *gl) in gfs2_glock_hold() argument
191 GLOCK_BUG_ON(gl, __lockref_is_dead(&gl->gl_lockref)); in gfs2_glock_hold()
192 lockref_get(&gl->gl_lockref); in gfs2_glock_hold()
202 static int demote_ok(const struct gfs2_glock *gl) in demote_ok() argument
204 const struct gfs2_glock_operations *glops = gl->gl_ops; in demote_ok()
206 if (gl->gl_state == LM_ST_UNLOCKED) in demote_ok()
214 if (!list_empty(&gl->gl_holders)) in demote_ok()
217 return glops->go_demote_ok(gl); in demote_ok()
222 void gfs2_glock_add_to_lru(struct gfs2_glock *gl) in gfs2_glock_add_to_lru() argument
224 if (!(gl->gl_ops->go_flags & GLOF_LRU)) in gfs2_glock_add_to_lru()
229 list_move_tail(&gl->gl_lru, &lru_list); in gfs2_glock_add_to_lru()
231 if (!test_bit(GLF_LRU, &gl->gl_flags)) { in gfs2_glock_add_to_lru()
232 set_bit(GLF_LRU, &gl->gl_flags); in gfs2_glock_add_to_lru()
239 static void gfs2_glock_remove_from_lru(struct gfs2_glock *gl) in gfs2_glock_remove_from_lru() argument
241 if (!(gl->gl_ops->go_flags & GLOF_LRU)) in gfs2_glock_remove_from_lru()
245 if (test_bit(GLF_LRU, &gl->gl_flags)) { in gfs2_glock_remove_from_lru()
246 list_del_init(&gl->gl_lru); in gfs2_glock_remove_from_lru()
248 clear_bit(GLF_LRU, &gl->gl_flags); in gfs2_glock_remove_from_lru()
257 static void __gfs2_glock_queue_work(struct gfs2_glock *gl, unsigned long delay) { in __gfs2_glock_queue_work() argument
258 if (!queue_delayed_work(glock_workqueue, &gl->gl_work, delay)) { in __gfs2_glock_queue_work()
265 GLOCK_BUG_ON(gl, gl->gl_lockref.count < 2); in __gfs2_glock_queue_work()
266 gl->gl_lockref.count--; in __gfs2_glock_queue_work()
270 static void gfs2_glock_queue_work(struct gfs2_glock *gl, unsigned long delay) { in gfs2_glock_queue_work() argument
271 spin_lock(&gl->gl_lockref.lock); in gfs2_glock_queue_work()
272 __gfs2_glock_queue_work(gl, delay); in gfs2_glock_queue_work()
273 spin_unlock(&gl->gl_lockref.lock); in gfs2_glock_queue_work()
276 static void __gfs2_glock_put(struct gfs2_glock *gl) in __gfs2_glock_put() argument
278 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in __gfs2_glock_put()
279 struct address_space *mapping = gfs2_glock2aspace(gl); in __gfs2_glock_put()
281 lockref_mark_dead(&gl->gl_lockref); in __gfs2_glock_put()
283 gfs2_glock_remove_from_lru(gl); in __gfs2_glock_put()
284 spin_unlock(&gl->gl_lockref.lock); in __gfs2_glock_put()
285 GLOCK_BUG_ON(gl, !list_empty(&gl->gl_holders)); in __gfs2_glock_put()
289 GLOCK_BUG_ON(gl, !mapping_empty(mapping)); in __gfs2_glock_put()
291 trace_gfs2_glock_put(gl); in __gfs2_glock_put()
292 sdp->sd_lockstruct.ls_ops->lm_put_lock(gl); in __gfs2_glock_put()
298 void gfs2_glock_queue_put(struct gfs2_glock *gl) in gfs2_glock_queue_put() argument
300 gfs2_glock_queue_work(gl, 0); in gfs2_glock_queue_put()
309 void gfs2_glock_put(struct gfs2_glock *gl) in gfs2_glock_put() argument
311 if (lockref_put_or_lock(&gl->gl_lockref)) in gfs2_glock_put()
314 __gfs2_glock_put(gl); in gfs2_glock_put()
331 static inline bool may_grant(struct gfs2_glock *gl, in may_grant() argument
336 GLOCK_BUG_ON(gl, !test_bit(HIF_HOLDER, &current_gh->gh_iflags)); in may_grant()
360 if (gl->gl_state == gh->gh_state) in may_grant()
364 if (gl->gl_state == LM_ST_EXCLUSIVE) { in may_grant()
369 return gl->gl_state != LM_ST_UNLOCKED; in may_grant()
391 static void do_error(struct gfs2_glock *gl, const int ret) in do_error() argument
395 list_for_each_entry_safe(gh, tmp, &gl->gl_holders, gh_list) { in do_error()
418 static void demote_incompat_holders(struct gfs2_glock *gl, in demote_incompat_holders() argument
428 list_for_each_entry_safe(gh, tmp, &gl->gl_holders, gh_list) { in demote_incompat_holders()
438 !may_grant(gl, current_gh, gh)) { in demote_incompat_holders()
454 static inline struct gfs2_holder *find_first_holder(const struct gfs2_glock *gl) in find_first_holder() argument
458 if (!list_empty(&gl->gl_holders)) { in find_first_holder()
459 gh = list_first_entry(&gl->gl_holders, struct gfs2_holder, in find_first_holder()
474 find_first_strong_holder(struct gfs2_glock *gl) in find_first_strong_holder() argument
478 list_for_each_entry(gh, &gl->gl_holders, gh_list) { in find_first_strong_holder()
495 struct gfs2_glock *gl = gh->gh_gl; in gfs2_instantiate() local
496 const struct gfs2_glock_operations *glops = gl->gl_ops; in gfs2_instantiate()
500 if (!test_bit(GLF_INSTANTIATE_NEEDED, &gl->gl_flags)) in gfs2_instantiate()
507 if (test_and_set_bit(GLF_INSTANTIATE_IN_PROG, &gl->gl_flags)) { in gfs2_instantiate()
508 wait_on_bit(&gl->gl_flags, GLF_INSTANTIATE_IN_PROG, in gfs2_instantiate()
520 ret = glops->go_instantiate(gl); in gfs2_instantiate()
522 clear_bit(GLF_INSTANTIATE_NEEDED, &gl->gl_flags); in gfs2_instantiate()
523 clear_and_wake_up_bit(GLF_INSTANTIATE_IN_PROG, &gl->gl_flags); in gfs2_instantiate()
540 static int do_promote(struct gfs2_glock *gl) in do_promote() argument
545 current_gh = find_first_strong_holder(gl); in do_promote()
546 list_for_each_entry(gh, &gl->gl_holders, gh_list) { in do_promote()
549 if (!may_grant(gl, current_gh, gh)) { in do_promote()
556 if (list_is_first(&gh->gh_list, &gl->gl_holders)) in do_promote()
558 do_error(gl, 0); in do_promote()
566 demote_incompat_holders(gl, current_gh); in do_promote()
578 static inline struct gfs2_holder *find_first_waiter(const struct gfs2_glock *gl) in find_first_waiter() argument
582 list_for_each_entry(gh, &gl->gl_holders, gh_list) { in find_first_waiter()
595 static void state_change(struct gfs2_glock *gl, unsigned int new_state) in state_change() argument
599 held1 = (gl->gl_state != LM_ST_UNLOCKED); in state_change()
603 GLOCK_BUG_ON(gl, __lockref_is_dead(&gl->gl_lockref)); in state_change()
605 gl->gl_lockref.count++; in state_change()
607 gl->gl_lockref.count--; in state_change()
609 if (new_state != gl->gl_target) in state_change()
611 gl->gl_hold_time = max(gl->gl_hold_time - GL_GLOCK_HOLD_DECR, in state_change()
613 gl->gl_state = new_state; in state_change()
614 gl->gl_tchange = jiffies; in state_change()
617 static void gfs2_set_demote(struct gfs2_glock *gl) in gfs2_set_demote() argument
619 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_set_demote()
621 set_bit(GLF_DEMOTE, &gl->gl_flags); in gfs2_set_demote()
626 static void gfs2_demote_wake(struct gfs2_glock *gl) in gfs2_demote_wake() argument
628 gl->gl_demote_state = LM_ST_EXCLUSIVE; in gfs2_demote_wake()
629 clear_bit(GLF_DEMOTE, &gl->gl_flags); in gfs2_demote_wake()
631 wake_up_bit(&gl->gl_flags, GLF_DEMOTE); in gfs2_demote_wake()
641 static void finish_xmote(struct gfs2_glock *gl, unsigned int ret) in finish_xmote() argument
643 const struct gfs2_glock_operations *glops = gl->gl_ops; in finish_xmote()
647 spin_lock(&gl->gl_lockref.lock); in finish_xmote()
648 trace_gfs2_glock_state_change(gl, state); in finish_xmote()
649 state_change(gl, state); in finish_xmote()
650 gh = find_first_waiter(gl); in finish_xmote()
653 if (test_bit(GLF_DEMOTE_IN_PROGRESS, &gl->gl_flags) && in finish_xmote()
654 state != LM_ST_UNLOCKED && gl->gl_demote_state == LM_ST_UNLOCKED) in finish_xmote()
655 gl->gl_target = LM_ST_UNLOCKED; in finish_xmote()
658 if (unlikely(state != gl->gl_target)) { in finish_xmote()
661 if (gh && !test_bit(GLF_DEMOTE_IN_PROGRESS, &gl->gl_flags)) { in finish_xmote()
665 list_move_tail(&gh->gh_list, &gl->gl_holders); in finish_xmote()
666 gh = find_first_waiter(gl); in finish_xmote()
667 gl->gl_target = gh->gh_state; in finish_xmote()
673 gl->gl_target = gl->gl_state; in finish_xmote()
674 do_error(gl, ret); in finish_xmote()
682 do_xmote(gl, gh, gl->gl_target); in finish_xmote()
687 do_xmote(gl, gh, LM_ST_UNLOCKED); in finish_xmote()
690 fs_err(gl->gl_name.ln_sbd, "wanted %u got %u\n", in finish_xmote()
691 gl->gl_target, state); in finish_xmote()
692 GLOCK_BUG_ON(gl, 1); in finish_xmote()
694 spin_unlock(&gl->gl_lockref.lock); in finish_xmote()
699 if (test_and_clear_bit(GLF_DEMOTE_IN_PROGRESS, &gl->gl_flags)) in finish_xmote()
700 gfs2_demote_wake(gl); in finish_xmote()
705 spin_unlock(&gl->gl_lockref.lock); in finish_xmote()
706 rv = glops->go_xmote_bh(gl); in finish_xmote()
707 spin_lock(&gl->gl_lockref.lock); in finish_xmote()
709 do_error(gl, rv); in finish_xmote()
713 do_promote(gl); in finish_xmote()
716 clear_bit(GLF_LOCK, &gl->gl_flags); in finish_xmote()
717 spin_unlock(&gl->gl_lockref.lock); in finish_xmote()
720 static bool is_system_glock(struct gfs2_glock *gl) in is_system_glock() argument
722 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in is_system_glock()
725 if (gl == m_ip->i_gl) in is_system_glock()
738 static void do_xmote(struct gfs2_glock *gl, struct gfs2_holder *gh, in do_xmote() argument
740 __releases(&gl->gl_lockref.lock) in do_xmote()
741 __acquires(&gl->gl_lockref.lock) in do_xmote()
743 const struct gfs2_glock_operations *glops = gl->gl_ops; in do_xmote()
744 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in do_xmote()
748 if (target != LM_ST_UNLOCKED && glock_blocked_by_withdraw(gl) && in do_xmote()
754 GLOCK_BUG_ON(gl, gl->gl_state == target); in do_xmote()
755 GLOCK_BUG_ON(gl, gl->gl_state == gl->gl_target); in do_xmote()
764 &gl->gl_flags)) in do_xmote()
766 do_error(gl, 0); /* Fail queued try locks */ in do_xmote()
768 gl->gl_req = target; in do_xmote()
769 set_bit(GLF_BLOCKING, &gl->gl_flags); in do_xmote()
770 if ((gl->gl_req == LM_ST_UNLOCKED) || in do_xmote()
771 (gl->gl_state == LM_ST_EXCLUSIVE) || in do_xmote()
773 clear_bit(GLF_BLOCKING, &gl->gl_flags); in do_xmote()
774 spin_unlock(&gl->gl_lockref.lock); in do_xmote()
776 ret = glops->go_sync(gl); in do_xmote()
784 gfs2_dump_glock(NULL, gl, true); in do_xmote()
789 if (test_bit(GLF_INVALIDATE_IN_PROGRESS, &gl->gl_flags)) { in do_xmote()
797 if ((atomic_read(&gl->gl_ail_count) != 0) && in do_xmote()
799 gfs2_glock_assert_warn(gl, in do_xmote()
800 !atomic_read(&gl->gl_ail_count)); in do_xmote()
801 gfs2_dump_glock(NULL, gl, true); in do_xmote()
803 glops->go_inval(gl, target == LM_ST_DEFERRED ? 0 : DIO_METADATA); in do_xmote()
804 clear_bit(GLF_INVALIDATE_IN_PROGRESS, &gl->gl_flags); in do_xmote()
808 gfs2_glock_hold(gl); in do_xmote()
832 if (glock_blocked_by_withdraw(gl) && in do_xmote()
835 if (!is_system_glock(gl)) { in do_xmote()
836 handle_callback(gl, LM_ST_UNLOCKED, 0, false); /* sets demote */ in do_xmote()
843 state_change(gl, LM_ST_UNLOCKED); in do_xmote()
848 clear_bit(GLF_LOCK, &gl->gl_flags); in do_xmote()
849 clear_bit(GLF_DEMOTE_IN_PROGRESS, &gl->gl_flags); in do_xmote()
850 gfs2_glock_queue_work(gl, GL_GLOCK_DFT_HOLD); in do_xmote()
853 clear_bit(GLF_INVALIDATE_IN_PROGRESS, &gl->gl_flags); in do_xmote()
859 ret = sdp->sd_lockstruct.ls_ops->lm_lock(gl, target, lck_flags); in do_xmote()
860 if (ret == -EINVAL && gl->gl_target == LM_ST_UNLOCKED && in do_xmote()
863 finish_xmote(gl, target); in do_xmote()
864 gfs2_glock_queue_work(gl, 0); in do_xmote()
867 GLOCK_BUG_ON(gl, !gfs2_withdrawn(sdp)); in do_xmote()
870 finish_xmote(gl, target); in do_xmote()
871 gfs2_glock_queue_work(gl, 0); in do_xmote()
874 spin_lock(&gl->gl_lockref.lock); in do_xmote()
884 static void run_queue(struct gfs2_glock *gl, const int nonblock) in run_queue() argument
885 __releases(&gl->gl_lockref.lock) in run_queue()
886 __acquires(&gl->gl_lockref.lock) in run_queue()
890 if (test_and_set_bit(GLF_LOCK, &gl->gl_flags)) in run_queue()
893 GLOCK_BUG_ON(gl, test_bit(GLF_DEMOTE_IN_PROGRESS, &gl->gl_flags)); in run_queue()
895 if (test_bit(GLF_DEMOTE, &gl->gl_flags) && in run_queue()
896 gl->gl_demote_state != gl->gl_state) { in run_queue()
897 if (find_first_holder(gl)) in run_queue()
901 set_bit(GLF_DEMOTE_IN_PROGRESS, &gl->gl_flags); in run_queue()
902 GLOCK_BUG_ON(gl, gl->gl_demote_state == LM_ST_EXCLUSIVE); in run_queue()
903 gl->gl_target = gl->gl_demote_state; in run_queue()
905 if (test_bit(GLF_DEMOTE, &gl->gl_flags)) in run_queue()
906 gfs2_demote_wake(gl); in run_queue()
907 if (do_promote(gl) == 0) in run_queue()
909 gh = find_first_waiter(gl); in run_queue()
910 gl->gl_target = gh->gh_state; in run_queue()
912 do_error(gl, 0); /* Fail queued try locks */ in run_queue()
914 do_xmote(gl, gh, gl->gl_target); in run_queue()
918 clear_bit(GLF_LOCK, &gl->gl_flags); in run_queue()
920 gl->gl_lockref.count++; in run_queue()
921 __gfs2_glock_queue_work(gl, 0); in run_queue()
925 clear_bit(GLF_LOCK, &gl->gl_flags); in run_queue()
930 void gfs2_inode_remember_delete(struct gfs2_glock *gl, u64 generation) in gfs2_inode_remember_delete() argument
932 struct gfs2_inode_lvb *ri = (void *)gl->gl_lksb.sb_lvbptr; in gfs2_inode_remember_delete()
940 bool gfs2_inode_already_deleted(struct gfs2_glock *gl, u64 generation) in gfs2_inode_already_deleted() argument
942 struct gfs2_inode_lvb *ri = (void *)gl->gl_lksb.sb_lvbptr; in gfs2_inode_already_deleted()
949 static void gfs2_glock_poke(struct gfs2_glock *gl) in gfs2_glock_poke() argument
955 __gfs2_holder_init(gl, LM_ST_SHARED, flags, &gh, _RET_IP_); in gfs2_glock_poke()
962 static bool gfs2_try_evict(struct gfs2_glock *gl) in gfs2_try_evict() argument
977 spin_lock(&gl->gl_lockref.lock); in gfs2_try_evict()
978 ip = gl->gl_object; in gfs2_try_evict()
981 spin_unlock(&gl->gl_lockref.lock); in gfs2_try_evict()
985 gl->gl_no_formal_ino = ip->i_no_formal_ino; in gfs2_try_evict()
991 spin_lock(&gl->gl_lockref.lock); in gfs2_try_evict()
992 ip = gl->gl_object; in gfs2_try_evict()
998 spin_unlock(&gl->gl_lockref.lock); in gfs2_try_evict()
1011 struct gfs2_glock *gl = container_of(dwork, struct gfs2_glock, gl_delete); in delete_work_func() local
1012 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in delete_work_func()
1014 u64 no_addr = gl->gl_name.ln_number; in delete_work_func()
1016 spin_lock(&gl->gl_lockref.lock); in delete_work_func()
1017 clear_bit(GLF_PENDING_DELETE, &gl->gl_flags); in delete_work_func()
1018 spin_unlock(&gl->gl_lockref.lock); in delete_work_func()
1020 if (test_bit(GLF_DEMOTE, &gl->gl_flags)) { in delete_work_func()
1038 if (gfs2_try_evict(gl)) { in delete_work_func()
1039 if (gfs2_queue_delete_work(gl, 5 * HZ)) in delete_work_func()
1044 inode = gfs2_lookup_by_inum(sdp, no_addr, gl->gl_no_formal_ino, in delete_work_func()
1048 (gfs2_queue_delete_work(gl, 5 * HZ))) in delete_work_func()
1054 gfs2_glock_put(gl); in delete_work_func()
1060 struct gfs2_glock *gl = container_of(work, struct gfs2_glock, gl_work.work); in glock_work_func() local
1063 if (test_and_clear_bit(GLF_REPLY_PENDING, &gl->gl_flags)) { in glock_work_func()
1064 finish_xmote(gl, gl->gl_reply); in glock_work_func()
1067 spin_lock(&gl->gl_lockref.lock); in glock_work_func()
1068 if (test_bit(GLF_PENDING_DEMOTE, &gl->gl_flags) && in glock_work_func()
1069 gl->gl_state != LM_ST_UNLOCKED && in glock_work_func()
1070 gl->gl_demote_state != LM_ST_EXCLUSIVE) { in glock_work_func()
1073 holdtime = gl->gl_tchange + gl->gl_hold_time; in glock_work_func()
1078 clear_bit(GLF_PENDING_DEMOTE, &gl->gl_flags); in glock_work_func()
1079 gfs2_set_demote(gl); in glock_work_func()
1082 run_queue(gl, 0); in glock_work_func()
1086 if (gl->gl_name.ln_type != LM_TYPE_INODE) in glock_work_func()
1088 __gfs2_glock_queue_work(gl, delay); in glock_work_func()
1096 gl->gl_lockref.count -= drop_refs; in glock_work_func()
1097 if (!gl->gl_lockref.count) { in glock_work_func()
1098 __gfs2_glock_put(gl); in glock_work_func()
1101 spin_unlock(&gl->gl_lockref.lock); in glock_work_func()
1109 struct gfs2_glock *gl; in find_insert_glock() local
1119 gl = rhashtable_lookup_get_insert_fast(&gl_hash_table, in find_insert_glock()
1121 if (IS_ERR(gl)) in find_insert_glock()
1124 gl = rhashtable_lookup_fast(&gl_hash_table, in find_insert_glock()
1127 if (gl && !lockref_get_not_dead(&gl->gl_lockref)) { in find_insert_glock()
1135 return gl; in find_insert_glock()
1159 struct gfs2_glock *gl, *tmp; in gfs2_glock_get() local
1163 gl = find_insert_glock(&name, NULL); in gfs2_glock_get()
1164 if (gl) { in gfs2_glock_get()
1165 *glp = gl; in gfs2_glock_get()
1176 gl = &gla->glock; in gfs2_glock_get()
1178 gl = kmem_cache_alloc(gfs2_glock_cachep, GFP_NOFS); in gfs2_glock_get()
1179 if (!gl) in gfs2_glock_get()
1182 memset(&gl->gl_lksb, 0, sizeof(struct dlm_lksb)); in gfs2_glock_get()
1183 gl->gl_ops = glops; in gfs2_glock_get()
1186 gl->gl_lksb.sb_lvbptr = kzalloc(GDLM_LVB_SIZE, GFP_NOFS); in gfs2_glock_get()
1187 if (!gl->gl_lksb.sb_lvbptr) { in gfs2_glock_get()
1188 gfs2_glock_dealloc(&gl->gl_rcu); in gfs2_glock_get()
1194 gl->gl_node.next = NULL; in gfs2_glock_get()
1195 gl->gl_flags = glops->go_instantiate ? BIT(GLF_INSTANTIATE_NEEDED) : 0; in gfs2_glock_get()
1196 gl->gl_name = name; in gfs2_glock_get()
1197 lockdep_set_subclass(&gl->gl_lockref.lock, glops->go_subclass); in gfs2_glock_get()
1198 gl->gl_lockref.count = 1; in gfs2_glock_get()
1199 gl->gl_state = LM_ST_UNLOCKED; in gfs2_glock_get()
1200 gl->gl_target = LM_ST_UNLOCKED; in gfs2_glock_get()
1201 gl->gl_demote_state = LM_ST_EXCLUSIVE; in gfs2_glock_get()
1202 gl->gl_dstamp = 0; in gfs2_glock_get()
1205 gl->gl_stats = this_cpu_ptr(sdp->sd_lkstats)->lkstats[glops->go_type]; in gfs2_glock_get()
1207 gl->gl_stats.stats[GFS2_LKS_DCOUNT] = 0; in gfs2_glock_get()
1208 gl->gl_stats.stats[GFS2_LKS_QCOUNT] = 0; in gfs2_glock_get()
1209 gl->gl_tchange = jiffies; in gfs2_glock_get()
1210 gl->gl_object = NULL; in gfs2_glock_get()
1211 gl->gl_hold_time = GL_GLOCK_DFT_HOLD; in gfs2_glock_get()
1212 INIT_DELAYED_WORK(&gl->gl_work, glock_work_func); in gfs2_glock_get()
1213 if (gl->gl_name.ln_type == LM_TYPE_IOPEN) in gfs2_glock_get()
1214 INIT_DELAYED_WORK(&gl->gl_delete, delete_work_func); in gfs2_glock_get()
1216 mapping = gfs2_glock2aspace(gl); in gfs2_glock_get()
1226 tmp = find_insert_glock(&name, gl); in gfs2_glock_get()
1228 *glp = gl; in gfs2_glock_get()
1238 gfs2_glock_dealloc(&gl->gl_rcu); in gfs2_glock_get()
1255 void __gfs2_holder_init(struct gfs2_glock *gl, unsigned int state, u16 flags, in __gfs2_holder_init() argument
1259 gh->gh_gl = gl; in __gfs2_holder_init()
1265 gfs2_glock_hold(gl); in __gfs2_holder_init()
1302 static void gfs2_glock_update_hold_time(struct gfs2_glock *gl, in gfs2_glock_update_hold_time() argument
1308 gl->gl_hold_time = min(gl->gl_hold_time + GL_GLOCK_HOLD_INCR, in gfs2_glock_update_hold_time()
1424 static void handle_callback(struct gfs2_glock *gl, unsigned int state, in handle_callback() argument
1428 set_bit(GLF_PENDING_DEMOTE, &gl->gl_flags); in handle_callback()
1430 gfs2_set_demote(gl); in handle_callback()
1431 if (gl->gl_demote_state == LM_ST_EXCLUSIVE) { in handle_callback()
1432 gl->gl_demote_state = state; in handle_callback()
1433 gl->gl_demote_time = jiffies; in handle_callback()
1434 } else if (gl->gl_demote_state != LM_ST_UNLOCKED && in handle_callback()
1435 gl->gl_demote_state != state) { in handle_callback()
1436 gl->gl_demote_state = LM_ST_UNLOCKED; in handle_callback()
1438 if (gl->gl_ops->go_callback) in handle_callback()
1439 gl->gl_ops->go_callback(gl, remote); in handle_callback()
1440 trace_gfs2_demote_rq(gl, remote); in handle_callback()
1482 __releases(&gl->gl_lockref.lock) in add_to_queue()
1483 __acquires(&gl->gl_lockref.lock) in add_to_queue()
1485 struct gfs2_glock *gl = gh->gh_gl; in add_to_queue() local
1486 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in add_to_queue()
1491 GLOCK_BUG_ON(gl, gh->gh_owner_pid == NULL); in add_to_queue()
1493 GLOCK_BUG_ON(gl, true); in add_to_queue()
1496 if (test_bit(GLF_LOCK, &gl->gl_flags)) { in add_to_queue()
1499 current_gh = find_first_strong_holder(gl); in add_to_queue()
1500 try_futile = !may_grant(gl, current_gh, gh); in add_to_queue()
1502 if (test_bit(GLF_INVALIDATE_IN_PROGRESS, &gl->gl_flags)) in add_to_queue()
1506 list_for_each_entry(gh2, &gl->gl_holders, gh_list) { in add_to_queue()
1517 list_for_each_entry(gh2, &gl->gl_holders, gh_list) { in add_to_queue()
1531 gfs2_glstats_inc(gl, GFS2_LKS_QCOUNT); in add_to_queue()
1532 gfs2_sbstats_inc(gl, GFS2_LKS_QCOUNT); in add_to_queue()
1534 list_add_tail(&gh->gh_list, &gl->gl_holders); in add_to_queue()
1541 gh = list_first_entry(&gl->gl_holders, struct gfs2_holder, gh_list); in add_to_queue()
1543 spin_unlock(&gl->gl_lockref.lock); in add_to_queue()
1545 sdp->sd_lockstruct.ls_ops->lm_cancel(gl); in add_to_queue()
1546 spin_lock(&gl->gl_lockref.lock); in add_to_queue()
1559 gfs2_dump_glock(NULL, gl, true); in add_to_queue()
1574 struct gfs2_glock *gl = gh->gh_gl; in gfs2_glock_nq() local
1577 if (glock_blocked_by_withdraw(gl) && !(gh->gh_flags & LM_FLAG_NOEXP)) in gfs2_glock_nq()
1580 if (test_bit(GLF_LRU, &gl->gl_flags)) in gfs2_glock_nq()
1581 gfs2_glock_remove_from_lru(gl); in gfs2_glock_nq()
1584 spin_lock(&gl->gl_lockref.lock); in gfs2_glock_nq()
1587 test_and_clear_bit(GLF_FROZEN, &gl->gl_flags))) { in gfs2_glock_nq()
1588 set_bit(GLF_REPLY_PENDING, &gl->gl_flags); in gfs2_glock_nq()
1589 gl->gl_lockref.count++; in gfs2_glock_nq()
1590 __gfs2_glock_queue_work(gl, 0); in gfs2_glock_nq()
1592 run_queue(gl, 1); in gfs2_glock_nq()
1593 spin_unlock(&gl->gl_lockref.lock); in gfs2_glock_nq()
1613 static inline bool needs_demote(struct gfs2_glock *gl) in needs_demote() argument
1615 return (test_bit(GLF_DEMOTE, &gl->gl_flags) || in needs_demote()
1616 test_bit(GLF_PENDING_DEMOTE, &gl->gl_flags)); in needs_demote()
1621 struct gfs2_glock *gl = gh->gh_gl; in __gfs2_glock_dq() local
1622 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in __gfs2_glock_dq()
1641 glock_blocked_by_withdraw(gl) && in __gfs2_glock_dq()
1644 spin_unlock(&gl->gl_lockref.lock); in __gfs2_glock_dq()
1648 spin_lock(&gl->gl_lockref.lock); in __gfs2_glock_dq()
1657 handle_callback(gl, LM_ST_UNLOCKED, 0, false); in __gfs2_glock_dq()
1667 if (!needs_demote(gl)) { in __gfs2_glock_dq()
1668 if (list_empty(&gl->gl_holders)) in __gfs2_glock_dq()
1676 if (find_first_strong_holder(gl)) in __gfs2_glock_dq()
1684 gh = find_first_holder(gl); in __gfs2_glock_dq()
1687 if (!test_bit(GLF_LFLUSH, &gl->gl_flags) && demote_ok(gl)) in __gfs2_glock_dq()
1688 gfs2_glock_add_to_lru(gl); in __gfs2_glock_dq()
1691 gl->gl_lockref.count++; in __gfs2_glock_dq()
1692 if (test_bit(GLF_PENDING_DEMOTE, &gl->gl_flags) && in __gfs2_glock_dq()
1693 !test_bit(GLF_DEMOTE, &gl->gl_flags) && in __gfs2_glock_dq()
1694 gl->gl_name.ln_type == LM_TYPE_INODE) in __gfs2_glock_dq()
1695 delay = gl->gl_hold_time; in __gfs2_glock_dq()
1696 __gfs2_glock_queue_work(gl, delay); in __gfs2_glock_dq()
1707 struct gfs2_glock *gl = gh->gh_gl; in gfs2_glock_dq() local
1709 spin_lock(&gl->gl_lockref.lock); in gfs2_glock_dq()
1710 if (list_is_first(&gh->gh_list, &gl->gl_holders) && in gfs2_glock_dq()
1712 spin_unlock(&gl->gl_lockref.lock); in gfs2_glock_dq()
1713 gl->gl_name.ln_sbd->sd_lockstruct.ls_ops->lm_cancel(gl); in gfs2_glock_dq()
1715 spin_lock(&gl->gl_lockref.lock); in gfs2_glock_dq()
1719 spin_unlock(&gl->gl_lockref.lock); in gfs2_glock_dq()
1724 struct gfs2_glock *gl = gh->gh_gl; in gfs2_glock_dq_wait() local
1727 wait_on_bit(&gl->gl_flags, GLF_DEMOTE, TASK_UNINTERRUPTIBLE); in gfs2_glock_dq_wait()
1758 struct gfs2_glock *gl; in gfs2_glock_nq_num() local
1761 error = gfs2_glock_get(sdp, number, glops, CREATE, &gl); in gfs2_glock_nq_num()
1763 error = gfs2_glock_nq_init(gl, state, flags, gh); in gfs2_glock_nq_num()
1764 gfs2_glock_put(gl); in gfs2_glock_nq_num()
1875 void gfs2_glock_cb(struct gfs2_glock *gl, unsigned int state) in gfs2_glock_cb() argument
1881 gfs2_glock_hold(gl); in gfs2_glock_cb()
1882 spin_lock(&gl->gl_lockref.lock); in gfs2_glock_cb()
1883 holdtime = gl->gl_tchange + gl->gl_hold_time; in gfs2_glock_cb()
1884 if (!list_empty(&gl->gl_holders) && in gfs2_glock_cb()
1885 gl->gl_name.ln_type == LM_TYPE_INODE) { in gfs2_glock_cb()
1888 if (test_bit(GLF_REPLY_PENDING, &gl->gl_flags)) in gfs2_glock_cb()
1889 delay = gl->gl_hold_time; in gfs2_glock_cb()
1908 if (!find_first_strong_holder(gl)) { in gfs2_glock_cb()
1910 .gh_gl = gl, in gfs2_glock_cb()
1916 demote_incompat_holders(gl, &mock_gh); in gfs2_glock_cb()
1918 handle_callback(gl, state, delay, true); in gfs2_glock_cb()
1919 __gfs2_glock_queue_work(gl, delay); in gfs2_glock_cb()
1920 spin_unlock(&gl->gl_lockref.lock); in gfs2_glock_cb()
1934 static int gfs2_should_freeze(const struct gfs2_glock *gl) in gfs2_should_freeze() argument
1938 if (gl->gl_reply & ~LM_OUT_ST_MASK) in gfs2_should_freeze()
1940 if (gl->gl_target == LM_ST_UNLOCKED) in gfs2_should_freeze()
1943 list_for_each_entry(gh, &gl->gl_holders, gh_list) { in gfs2_should_freeze()
1962 void gfs2_glock_complete(struct gfs2_glock *gl, int ret) in gfs2_glock_complete() argument
1964 struct lm_lockstruct *ls = &gl->gl_name.ln_sbd->sd_lockstruct; in gfs2_glock_complete()
1966 spin_lock(&gl->gl_lockref.lock); in gfs2_glock_complete()
1967 gl->gl_reply = ret; in gfs2_glock_complete()
1970 if (gfs2_should_freeze(gl)) { in gfs2_glock_complete()
1971 set_bit(GLF_FROZEN, &gl->gl_flags); in gfs2_glock_complete()
1972 spin_unlock(&gl->gl_lockref.lock); in gfs2_glock_complete()
1977 gl->gl_lockref.count++; in gfs2_glock_complete()
1978 set_bit(GLF_REPLY_PENDING, &gl->gl_flags); in gfs2_glock_complete()
1979 __gfs2_glock_queue_work(gl, 0); in gfs2_glock_complete()
1980 spin_unlock(&gl->gl_lockref.lock); in gfs2_glock_complete()
2017 struct gfs2_glock *gl; in gfs2_dispose_glock_lru() local
2022 gl = list_first_entry(list, struct gfs2_glock, gl_lru); in gfs2_dispose_glock_lru()
2023 list_del_init(&gl->gl_lru); in gfs2_dispose_glock_lru()
2024 clear_bit(GLF_LRU, &gl->gl_flags); in gfs2_dispose_glock_lru()
2025 if (!spin_trylock(&gl->gl_lockref.lock)) { in gfs2_dispose_glock_lru()
2027 list_add(&gl->gl_lru, &lru_list); in gfs2_dispose_glock_lru()
2028 set_bit(GLF_LRU, &gl->gl_flags); in gfs2_dispose_glock_lru()
2032 if (test_and_set_bit(GLF_LOCK, &gl->gl_flags)) { in gfs2_dispose_glock_lru()
2033 spin_unlock(&gl->gl_lockref.lock); in gfs2_dispose_glock_lru()
2036 gl->gl_lockref.count++; in gfs2_dispose_glock_lru()
2037 if (demote_ok(gl)) in gfs2_dispose_glock_lru()
2038 handle_callback(gl, LM_ST_UNLOCKED, 0, false); in gfs2_dispose_glock_lru()
2039 WARN_ON(!test_and_clear_bit(GLF_LOCK, &gl->gl_flags)); in gfs2_dispose_glock_lru()
2040 __gfs2_glock_queue_work(gl, 0); in gfs2_dispose_glock_lru()
2041 spin_unlock(&gl->gl_lockref.lock); in gfs2_dispose_glock_lru()
2057 struct gfs2_glock *gl; in gfs2_scan_glock_lru() local
2064 gl = list_first_entry(&lru_list, struct gfs2_glock, gl_lru); in gfs2_scan_glock_lru()
2067 if (!test_bit(GLF_LOCK, &gl->gl_flags)) { in gfs2_scan_glock_lru()
2068 list_move(&gl->gl_lru, &dispose); in gfs2_scan_glock_lru()
2074 list_move(&gl->gl_lru, &skipped); in gfs2_scan_glock_lru()
2116 struct gfs2_glock *gl; in glock_hash_walk() local
2124 while ((gl = rhashtable_walk_next(&iter)) && !IS_ERR(gl)) { in glock_hash_walk()
2125 if (gl->gl_name.ln_sbd == sdp) in glock_hash_walk()
2126 examiner(gl); in glock_hash_walk()
2130 } while (cond_resched(), gl == ERR_PTR(-EAGAIN)); in glock_hash_walk()
2135 bool gfs2_queue_delete_work(struct gfs2_glock *gl, unsigned long delay) in gfs2_queue_delete_work() argument
2139 spin_lock(&gl->gl_lockref.lock); in gfs2_queue_delete_work()
2141 &gl->gl_delete, delay); in gfs2_queue_delete_work()
2143 set_bit(GLF_PENDING_DELETE, &gl->gl_flags); in gfs2_queue_delete_work()
2144 spin_unlock(&gl->gl_lockref.lock); in gfs2_queue_delete_work()
2148 void gfs2_cancel_delete_work(struct gfs2_glock *gl) in gfs2_cancel_delete_work() argument
2150 if (cancel_delayed_work(&gl->gl_delete)) { in gfs2_cancel_delete_work()
2151 clear_bit(GLF_PENDING_DELETE, &gl->gl_flags); in gfs2_cancel_delete_work()
2152 gfs2_glock_put(gl); in gfs2_cancel_delete_work()
2156 bool gfs2_delete_work_queued(const struct gfs2_glock *gl) in gfs2_delete_work_queued() argument
2158 return test_bit(GLF_PENDING_DELETE, &gl->gl_flags); in gfs2_delete_work_queued()
2161 static void flush_delete_work(struct gfs2_glock *gl) in flush_delete_work() argument
2163 if (gl->gl_name.ln_type == LM_TYPE_IOPEN) { in flush_delete_work()
2164 if (cancel_delayed_work(&gl->gl_delete)) { in flush_delete_work()
2166 &gl->gl_delete, 0); in flush_delete_work()
2183 static void thaw_glock(struct gfs2_glock *gl) in thaw_glock() argument
2185 if (!test_and_clear_bit(GLF_FROZEN, &gl->gl_flags)) in thaw_glock()
2187 if (!lockref_get_not_dead(&gl->gl_lockref)) in thaw_glock()
2189 set_bit(GLF_REPLY_PENDING, &gl->gl_flags); in thaw_glock()
2190 gfs2_glock_queue_work(gl, 0); in thaw_glock()
2199 static void clear_glock(struct gfs2_glock *gl) in clear_glock() argument
2201 gfs2_glock_remove_from_lru(gl); in clear_glock()
2203 spin_lock(&gl->gl_lockref.lock); in clear_glock()
2204 if (!__lockref_is_dead(&gl->gl_lockref)) { in clear_glock()
2205 gl->gl_lockref.count++; in clear_glock()
2206 if (gl->gl_state != LM_ST_UNLOCKED) in clear_glock()
2207 handle_callback(gl, LM_ST_UNLOCKED, 0, false); in clear_glock()
2208 __gfs2_glock_queue_work(gl, 0); in clear_glock()
2210 spin_unlock(&gl->gl_lockref.lock); in clear_glock()
2224 static void dump_glock(struct seq_file *seq, struct gfs2_glock *gl, bool fsid) in dump_glock() argument
2226 spin_lock(&gl->gl_lockref.lock); in dump_glock()
2227 gfs2_dump_glock(seq, gl, fsid); in dump_glock()
2228 spin_unlock(&gl->gl_lockref.lock); in dump_glock()
2231 static void dump_glock_func(struct gfs2_glock *gl) in dump_glock_func() argument
2233 dump_glock(NULL, gl, true); in dump_glock_func()
2236 static void withdraw_dq(struct gfs2_glock *gl) in withdraw_dq() argument
2238 spin_lock(&gl->gl_lockref.lock); in withdraw_dq()
2239 if (!__lockref_is_dead(&gl->gl_lockref) && in withdraw_dq()
2240 glock_blocked_by_withdraw(gl)) in withdraw_dq()
2241 do_error(gl, LM_OUT_ERROR); /* remove pending waiters */ in withdraw_dq()
2242 spin_unlock(&gl->gl_lockref.lock); in withdraw_dq()
2349 static const char *gflags2str(char *buf, const struct gfs2_glock *gl) in gflags2str() argument
2351 const unsigned long *gflags = &gl->gl_flags; in gflags2str()
2374 if (!list_empty(&gl->gl_holders)) in gflags2str()
2378 if (gl->gl_object) in gflags2str()
2412 void gfs2_dump_glock(struct seq_file *seq, struct gfs2_glock *gl, bool fsid) in gfs2_dump_glock() argument
2414 const struct gfs2_glock_operations *glops = gl->gl_ops; in gfs2_dump_glock()
2418 struct gfs2_sbd *sdp = gl->gl_name.ln_sbd; in gfs2_dump_glock()
2422 if (gl->gl_ops->go_flags & GLOF_ASPACE) { in gfs2_dump_glock()
2423 struct address_space *mapping = gfs2_glock2aspace(gl); in gfs2_dump_glock()
2430 dtime = jiffies - gl->gl_demote_time; in gfs2_dump_glock()
2432 if (!test_bit(GLF_DEMOTE, &gl->gl_flags)) in gfs2_dump_glock()
2436 fs_id_buf, state2str(gl->gl_state), in gfs2_dump_glock()
2437 gl->gl_name.ln_type, in gfs2_dump_glock()
2438 (unsigned long long)gl->gl_name.ln_number, in gfs2_dump_glock()
2439 gflags2str(gflags_buf, gl), in gfs2_dump_glock()
2440 state2str(gl->gl_target), in gfs2_dump_glock()
2441 state2str(gl->gl_demote_state), dtime, in gfs2_dump_glock()
2442 atomic_read(&gl->gl_ail_count), in gfs2_dump_glock()
2443 atomic_read(&gl->gl_revokes), in gfs2_dump_glock()
2444 (int)gl->gl_lockref.count, gl->gl_hold_time, nrpages); in gfs2_dump_glock()
2446 list_for_each_entry(gh, &gl->gl_holders, gh_list) in gfs2_dump_glock()
2449 if (gl->gl_state != LM_ST_UNLOCKED && glops->go_dump) in gfs2_dump_glock()
2450 glops->go_dump(seq, gl, fs_id_buf); in gfs2_dump_glock()
2455 struct gfs2_glock *gl = iter_ptr; in gfs2_glstats_seq_show() local
2458 gl->gl_name.ln_type, in gfs2_glstats_seq_show()
2459 (unsigned long long)gl->gl_name.ln_number, in gfs2_glstats_seq_show()
2460 (unsigned long long)gl->gl_stats.stats[GFS2_LKS_SRTT], in gfs2_glstats_seq_show()
2461 (unsigned long long)gl->gl_stats.stats[GFS2_LKS_SRTTVAR], in gfs2_glstats_seq_show()
2462 (unsigned long long)gl->gl_stats.stats[GFS2_LKS_SRTTB], in gfs2_glstats_seq_show()
2463 (unsigned long long)gl->gl_stats.stats[GFS2_LKS_SRTTVARB], in gfs2_glstats_seq_show()
2464 (unsigned long long)gl->gl_stats.stats[GFS2_LKS_SIRT], in gfs2_glstats_seq_show()
2465 (unsigned long long)gl->gl_stats.stats[GFS2_LKS_SIRTVAR], in gfs2_glstats_seq_show()
2466 (unsigned long long)gl->gl_stats.stats[GFS2_LKS_DCOUNT], in gfs2_glstats_seq_show()
2467 (unsigned long long)gl->gl_stats.stats[GFS2_LKS_QCOUNT]); in gfs2_glstats_seq_show()
2572 struct gfs2_glock *gl = gi->gl; in gfs2_glock_iter_next() local
2574 if (gl) { in gfs2_glock_iter_next()
2577 if (!lockref_put_not_zero(&gl->gl_lockref)) in gfs2_glock_iter_next()
2578 gfs2_glock_queue_put(gl); in gfs2_glock_iter_next()
2581 gl = rhashtable_walk_next(&gi->hti); in gfs2_glock_iter_next()
2582 if (IS_ERR_OR_NULL(gl)) { in gfs2_glock_iter_next()
2583 if (gl == ERR_PTR(-EAGAIN)) { in gfs2_glock_iter_next()
2587 gl = NULL; in gfs2_glock_iter_next()
2590 if (gl->gl_name.ln_sbd != gi->sdp) in gfs2_glock_iter_next()
2593 if (!lockref_get_not_dead(&gl->gl_lockref)) in gfs2_glock_iter_next()
2597 if (__lockref_is_dead(&gl->gl_lockref)) in gfs2_glock_iter_next()
2602 gi->gl = gl; in gfs2_glock_iter_next()
2627 return gi->gl; in gfs2_glock_seq_start()
2638 return gi->gl; in gfs2_glock_seq_next()
2717 gi->gl = NULL; in __gfs2_glocks_open()
2733 if (gi->gl) in gfs2_glocks_release()
2734 gfs2_glock_put(gi->gl); in gfs2_glocks_release()
2884 struct gfs2_glock *gl; in gfs2_glockfd_seq_show() local
2887 gl = GFS2_I(inode)->i_iopen_gh.gh_gl; in gfs2_glockfd_seq_show()
2888 if (gl) { in gfs2_glockfd_seq_show()
2890 i->tgid, i->fd, gl->gl_name.ln_type, in gfs2_glockfd_seq_show()
2891 (unsigned long long)gl->gl_name.ln_number); in gfs2_glockfd_seq_show()