Searched refs:down_read_killable (Results 1 – 12 of 12) sorted by relevance
49 err = down_read_killable(&inode->i_rwsem); in nfs_start_io_read()142 err = down_read_killable(&inode->i_rwsem); in nfs_start_io_direct()
225 extern int __must_check down_read_killable(struct rw_semaphore *sem);302 # define down_read_killable_nested(sem, subclass) down_read_killable(sem)
462 ret = down_read_killable(&mm->mmap_lock); in mmap_read_lock_killable()
889 return down_read_killable(&inode->i_rwsem); in inode_lock_shared_killable()
561 if (down_read_killable(&client->cdev->lock)) in ssam_cdev_device_ioctl()582 if (down_read_killable(&cdev->lock)) in ssam_cdev_read()606 if (down_read_killable(&cdev->lock)) in ssam_cdev_read()
383 if (down_read_killable(&client->ddev->lock)) in surface_dtx_ioctl()467 if (down_read_killable(&ddev->lock)) in surface_dtx_read()491 if (down_read_killable(&ddev->lock)) in surface_dtx_read()
1557 int __sched down_read_killable(struct rw_semaphore *sem) in down_read_killable() function1569 EXPORT_SYMBOL(down_read_killable);
833 ret = down_read_killable(&task->signal->exec_update_lock); in __pidfd_fget()
1352 err = down_read_killable(&task->signal->exec_update_lock); in mm_access()
566 rv = down_read_killable(&pernet_ops_rwsem); in copy_net_ns()
444 int err = down_read_killable(&task->signal->exec_update_lock); in lock_trace()3032 result = down_read_killable(&task->signal->exec_update_lock); in do_io_accounting()
1938 r = down_read_killable(&adev->reset_domain->sem); in amdgpu_debugfs_ib_preempt()