Searched refs:mutex_lock_killable (Results 1 – 24 of 24) sorted by relevance
35 if (mutex_lock_killable(&g_connected_mutex)) in vchiq_add_connected_callback()67 if (mutex_lock_killable(&g_connected_mutex)) in vchiq_call_connected_callbacks()
934 mutex_lock_killable(&state->slot_mutex)) in queue_message()1009 if (mutex_lock_killable(&state->slot_mutex)) in queue_message()1183 mutex_lock_killable(&state->sync_mutex)) in queue_message_sync()1824 if (mutex_lock_killable(&service->bulk_mutex)) { in parse_message()2731 if (mutex_lock_killable(&service->bulk_mutex)) in do_abort_bulks()3214 if (mutex_lock_killable(&service->bulk_mutex)) { in vchiq_bulk_transfer()3228 if (mutex_lock_killable(&service->bulk_mutex)) { in vchiq_bulk_transfer()3258 if (mutex_lock_killable(&state->slot_mutex)) { in vchiq_bulk_transfer()
674 if (mutex_lock_killable(&state->mutex)) in vchiq_shutdown()702 if (mutex_lock_killable(&state->mutex)) { in vchiq_connect()
637 rc = mutex_lock_killable(&instance->state->mutex); in vchiq_ioctl()
189 #define mutex_lock_killable(lock) mutex_lock_killable_nested(lock, 0) macro201 extern int __must_check mutex_lock_killable(struct mutex *lock);206 # define mutex_lock_killable_nested(lock, subclass) mutex_lock_killable(lock)
552 int __sched mutex_lock_killable(struct mutex *lock) in mutex_lock_killable() function556 EXPORT_SYMBOL(mutex_lock_killable);
945 int __sched mutex_lock_killable(struct mutex *lock) in mutex_lock_killable() function954 EXPORT_SYMBOL(mutex_lock_killable);
110 err = mutex_lock_killable(&loop_validate_mutex); in loop_global_lock_killable()114 err = mutex_lock_killable(&lo->lo_mutex); in loop_global_lock_killable()1469 err = mutex_lock_killable(&lo->lo_mutex); in loop_clr_fd()1506 err = mutex_lock_killable(&lo->lo_mutex); in loop_set_status()1586 ret = mutex_lock_killable(&lo->lo_mutex); in loop_get_status()1798 err = mutex_lock_killable(&lo->lo_mutex); in lo_simple_ioctl()2040 err = mutex_lock_killable(&lo->lo_mutex); in lo_open()2313 err = mutex_lock_killable(&loop_ctl_mutex); in loop_add()2450 ret = mutex_lock_killable(&loop_ctl_mutex); in loop_control_remove()2463 ret = mutex_lock_killable(&lo->lo_mutex); in loop_control_remove()[all …]
192 rc = mutex_lock_killable(&mac_hid_emumouse_mutex); in mac_hid_toggle_emumouse()
180 if (mutex_lock_killable(&vpd->lock)) in pci_vpd_read()236 if (mutex_lock_killable(&vpd->lock)) in pci_vpd_write()
97 ret = mutex_lock_killable(&orangefs_request_mutex); in service_operation()
2797 if (mutex_lock_killable(&hotkey_mutex)) in hotkey_mask_store()2893 if (mutex_lock_killable(&hotkey_mutex)) in hotkey_source_mask_store()2941 if (mutex_lock_killable(&hotkey_mutex)) in hotkey_poll_freq_store()4295 if (mutex_lock_killable(&hotkey_mutex)) in hotkey_read()4333 if (mutex_lock_killable(&hotkey_mutex)) in hotkey_write()6728 if (mutex_lock_killable(&brightness_mutex) < 0) in tpacpi_brightness_checkpoint_nvram()6827 res = mutex_lock_killable(&brightness_mutex); in brightness_set()6869 res = mutex_lock_killable(&brightness_mutex); in brightness_get()7339 if (mutex_lock_killable(&volume_mutex) < 0) in tpacpi_volume_checkpoint_nvram()7411 if (mutex_lock_killable(&volume_mutex) < 0) in __volume_set_mute_ec()[all …]
317 err = mutex_lock_killable(&serio_raw_mutex); in serio_raw_connect()
769 if (mutex_lock_killable(&oom_lock)) in oom_killer_disable()
1774 } else if (mutex_lock_killable(&pcpu_alloc_mutex)) { in pcpu_alloc()
1568 if (mutex_lock_killable(&oom_lock)) in mem_cgroup_out_of_memory()
1848 mutex_lock_killable(¤t->signal->cred_guard_mutex)) in seccomp_set_mode_filter()
1613 ret = mutex_lock_killable(&gpu->lock); in etnaviv_gpu_hw_resume()
1683 ret = mutex_lock_killable(&o.base); in ww_test_normal()
2096 ret = mutex_lock_killable(&hdev->ll_open_lock); in hid_hw_open()
638 ret = mutex_lock_killable(&vcpu->mutex); in sev_launch_update_vmsa()
3756 if (mutex_lock_killable(&vcpu->mutex)) in kvm_vcpu_ioctl()
78 return mutex_lock_killable(&rtnl_mutex); in rtnl_lock_killable()
10802 if (mutex_lock_killable(&vcpu->mutex)) in kvm_arch_vcpu_postcreate()