Searched refs:sq_lock (Results 1 – 15 of 15) sorted by relevance
| /linux/drivers/net/ethernet/intel/ice/ |
| H A D | ice_controlq.c | 465 mutex_lock(&cq->sq_lock); in ice_shutdown_sq() 486 mutex_unlock(&cq->sq_lock); in ice_shutdown_sq() 788 mutex_init(&cq->sq_lock); in ice_init_ctrlq_locks() 826 mutex_destroy(&cq->sq_lock); in ice_destroy_ctrlq_locks() 1009 mutex_lock(&cq->sq_lock); in ice_sq_send_cmd() 1142 mutex_unlock(&cq->sq_lock); in ice_sq_send_cmd()
|
| H A D | ice_controlq.h | 100 struct mutex sq_lock; /* Send queue lock */ member
|
| /linux/drivers/net/ethernet/fungible/funcore/ |
| H A D | fun_dev.c | 389 spin_lock(&funq->sq_lock); in fun_submit_admin_cmd() 405 spin_unlock(&funq->sq_lock); in fun_submit_admin_cmd() 428 spin_lock(&fdev->admin_q->sq_lock); in fun_admin_stop() 430 spin_unlock(&fdev->admin_q->sq_lock); in fun_admin_stop()
|
| H A D | fun_queue.c | 414 spin_lock_init(&funq->sq_lock); in fun_alloc_queue()
|
| /linux/drivers/infiniband/sw/siw/ |
| H A D | siw_qp_tx.c | 1082 spin_lock_irqsave(&qp->sq_lock, flags); in siw_qp_sq_process() 1085 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_qp_sq_process() 1117 spin_lock_irqsave(&qp->sq_lock, flags); in siw_qp_sq_process() 1127 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_qp_sq_process()
|
| H A D | siw_verbs.c | 368 spin_lock_init(&qp->sq_lock); in siw_create_qp() 823 spin_lock_irqsave(&qp->sq_lock, flags); in siw_post_send() 970 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_post_send() 974 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_post_send()
|
| H A D | siw_qp.c | 242 spin_lock_irqsave(&qp->sq_lock, flags); in siw_qp_mpa_rts() 245 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_qp_mpa_rts() 288 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_qp_mpa_rts()
|
| H A D | siw.h | 434 spinlock_t sq_lock; member
|
| /linux/drivers/nvme/host/ |
| H A D | pci.c | 368 spinlock_t sq_lock; member 743 spin_lock(&nvmeq->sq_lock); in nvme_commit_rqs() 746 spin_unlock(&nvmeq->sq_lock); in nvme_commit_rqs() 1427 spin_lock(&nvmeq->sq_lock); in nvme_queue_rq() 1430 spin_unlock(&nvmeq->sq_lock); in nvme_queue_rq() 1441 spin_lock(&nvmeq->sq_lock); in nvme_submit_cmds() 1448 spin_unlock(&nvmeq->sq_lock); in nvme_submit_cmds() 1663 spin_lock(&nvmeq->sq_lock); in nvme_pci_submit_async_event() 1666 spin_unlock(&nvmeq->sq_lock); in nvme_pci_submit_async_event() 2119 spin_lock_init(&nvmeq->sq_lock); in nvme_alloc_queue()
|
| /linux/drivers/infiniband/hw/ionic/ |
| H A D | ionic_controlpath.c | 2172 spin_lock_init(&qp->sq_lock); in ionic_create_qp() 2418 spin_lock(&qp->sq_lock); in ionic_flush_qp() 2424 spin_unlock(&qp->sq_lock); in ionic_flush_qp() 2494 spin_lock(&qp->sq_lock); in ionic_reset_qp() 2501 spin_unlock(&qp->sq_lock); in ionic_reset_qp()
|
| /linux/drivers/infiniband/hw/bnxt_re/ |
| H A D | ib_verbs.c | 1288 spin_lock_init(&qp->sq_lock); in bnxt_re_create_shadow_qp() 1729 spin_lock_init(&qp->sq_lock); in bnxt_re_create_qp() 2855 spin_lock_irqsave(&qp->sq_lock, flags); in bnxt_re_post_send_shadow_qp() 2892 spin_unlock_irqrestore(&qp->sq_lock, flags); in bnxt_re_post_send_shadow_qp() 2916 spin_lock_irqsave(&qp->sq_lock, flags); in bnxt_re_post_send() 3000 spin_unlock_irqrestore(&qp->sq_lock, flags); in bnxt_re_post_send() 3837 spin_lock_irqsave(&qp->sq_lock, flags); in send_phantom_wqe() 3849 spin_unlock_irqrestore(&qp->sq_lock, flags); in send_phantom_wqe()
|
| /linux/drivers/infiniband/sw/rxe/ |
| H A D | rxe_verbs.c | 914 spin_lock_irqsave(&qp->sq.sq_lock, flags); in rxe_post_send_kernel() 925 spin_unlock_irqrestore(&qp->sq.sq_lock, flags); in rxe_post_send_kernel()
|
| H A D | rxe_qp.c | 223 spin_lock_init(&qp->sq.sq_lock); in rxe_qp_init_misc()
|
| /linux/include/ufs/ |
| H A D | ufshcd.h | 1136 * @sq_lock: serialize submission queue access 1155 spinlock_t sq_lock; 1153 spinlock_t sq_lock; global() member
|
| /linux/drivers/ufs/core/ |
| H A D | ufshcd.c | 2394 spin_lock(&hwq->sq_lock); in ufshcd_send_command() 2398 spin_unlock(&hwq->sq_lock); in ufshcd_send_command()
|