Searched refs:mutex_lock_killable (Results 1 – 24 of 24) sorted by relevance
44 if (mutex_lock_killable(&g_connected_mutex)) in vchiq_add_connected_callback()79 if (mutex_lock_killable(&g_connected_mutex)) in vchiq_call_connected_callbacks()
803 mutex_lock_killable(&state->slot_mutex)) in queue_message()877 if (mutex_lock_killable(&state->slot_mutex)) in queue_message()1044 mutex_lock_killable(&state->sync_mutex)) in queue_message_sync()1721 if (mutex_lock_killable(&service->bulk_mutex)) { in parse_rx_slots()2525 if (mutex_lock_killable(&service->bulk_mutex)) in do_abort_bulks()3001 if (mutex_lock_killable(&service->bulk_mutex)) { in vchiq_bulk_transfer()3015 if (mutex_lock_killable(&service->bulk_mutex)) { in vchiq_bulk_transfer()3043 if (mutex_lock_killable(&state->slot_mutex)) { in vchiq_bulk_transfer()
241 if (mutex_lock_killable(&state->mutex)) in vchiq_shutdown()283 if (mutex_lock_killable(&state->mutex)) { in vchiq_connect()851 rc = mutex_lock_killable(&instance->state->mutex); in vchiq_ioctl()
155 #define mutex_lock_killable(lock) mutex_lock_killable_nested(lock, 0) macro167 extern int __must_check mutex_lock_killable(struct mutex *lock);172 # define mutex_lock_killable_nested(lock, subclass) mutex_lock_killable(lock)
684 error = mutex_lock_killable(&loop_ctl_mutex); in loop_change_fd()953 error = mutex_lock_killable(&loop_ctl_mutex); in loop_set_fd()1199 err = mutex_lock_killable(&loop_ctl_mutex); in loop_clr_fd()1236 err = mutex_lock_killable(&loop_ctl_mutex); in loop_set_status()1355 ret = mutex_lock_killable(&loop_ctl_mutex); in loop_get_status()1561 err = mutex_lock_killable(&loop_ctl_mutex); in lo_simple_ioctl()1784 err = mutex_lock_killable(&loop_ctl_mutex); in lo_open()2150 ret = mutex_lock_killable(&loop_ctl_mutex); in loop_control_ioctl()
193 rc = mutex_lock_killable(&mac_hid_emumouse_mutex); in mac_hid_toggle_emumouse()
92 err = mutex_lock_killable(m1); in kcmp_lock()
1299 mutex_lock_killable(¤t->signal->cred_guard_mutex)) in seccomp_set_mode_filter()
1224 err = mutex_lock_killable(&task->signal->cred_guard_mutex); in mm_access()
202 if (mutex_lock_killable(&vpd->lock)) in pci_vpd_read()264 if (mutex_lock_killable(&vpd->lock)) in pci_vpd_write()
2803 if (mutex_lock_killable(&hotkey_mutex)) in hotkey_mask_store()2899 if (mutex_lock_killable(&hotkey_mutex)) in hotkey_source_mask_store()2947 if (mutex_lock_killable(&hotkey_mutex)) in hotkey_poll_freq_store()4265 if (mutex_lock_killable(&hotkey_mutex)) in hotkey_read()4303 if (mutex_lock_killable(&hotkey_mutex)) in hotkey_write()6679 if (mutex_lock_killable(&brightness_mutex) < 0) in tpacpi_brightness_checkpoint_nvram()6778 res = mutex_lock_killable(&brightness_mutex); in brightness_set()6820 res = mutex_lock_killable(&brightness_mutex); in brightness_get()7285 if (mutex_lock_killable(&volume_mutex) < 0) in tpacpi_volume_checkpoint_nvram()7357 if (mutex_lock_killable(&volume_mutex) < 0) in __volume_set_mute_ec()[all …]
97 ret = mutex_lock_killable(&orangefs_request_mutex); in service_operation()
1330 int __sched mutex_lock_killable(struct mutex *lock) in mutex_lock_killable() function1339 EXPORT_SYMBOL(mutex_lock_killable);
317 err = mutex_lock_killable(&serio_raw_mutex); in serio_raw_connect()
765 if (mutex_lock_killable(&oom_lock)) in oom_killer_disable()
1629 else if (mutex_lock_killable(&pcpu_alloc_mutex)) in pcpu_alloc()
1587 if (mutex_lock_killable(&oom_lock)) in mem_cgroup_out_of_memory()
406 int err = mutex_lock_killable(&task->signal->cred_guard_mutex); in lock_trace()2773 result = mutex_lock_killable(&task->signal->cred_guard_mutex); in do_io_accounting()
1371 ret = mutex_lock_killable(&o.base); in ww_test_normal()
1551 ret = mutex_lock_killable(&gpu->lock); in etnaviv_gpu_hw_resume()
2021 ret = mutex_lock_killable(&hdev->ll_open_lock); in hid_hw_open()
2811 if (mutex_lock_killable(&vcpu->mutex)) in kvm_vcpu_ioctl()
78 return mutex_lock_killable(&rtnl_mutex); in rtnl_lock_killable()
9090 if (mutex_lock_killable(&vcpu->mutex)) in kvm_arch_vcpu_postcreate()