Home
last modified time | relevance | path

Searched refs:sq_lock (Results 1 – 15 of 15) sorted by relevance

/linux/drivers/net/ethernet/intel/ice/
H A Dice_controlq.c465 mutex_lock(&cq->sq_lock); in ice_shutdown_sq()
486 mutex_unlock(&cq->sq_lock); in ice_shutdown_sq()
788 mutex_init(&cq->sq_lock); in ice_init_ctrlq_locks()
826 mutex_destroy(&cq->sq_lock); in ice_destroy_ctrlq_locks()
1009 mutex_lock(&cq->sq_lock); in ice_sq_send_cmd()
1142 mutex_unlock(&cq->sq_lock); in ice_sq_send_cmd()
H A Dice_controlq.h100 struct mutex sq_lock; /* Send queue lock */ member
/linux/drivers/net/ethernet/fungible/funcore/
H A Dfun_dev.c389 spin_lock(&funq->sq_lock); in fun_submit_admin_cmd()
405 spin_unlock(&funq->sq_lock); in fun_submit_admin_cmd()
428 spin_lock(&fdev->admin_q->sq_lock); in fun_admin_stop()
430 spin_unlock(&fdev->admin_q->sq_lock); in fun_admin_stop()
H A Dfun_queue.c414 spin_lock_init(&funq->sq_lock); in fun_alloc_queue()
/linux/drivers/infiniband/sw/siw/
H A Dsiw_qp_tx.c1082 spin_lock_irqsave(&qp->sq_lock, flags); in siw_qp_sq_process()
1085 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_qp_sq_process()
1117 spin_lock_irqsave(&qp->sq_lock, flags); in siw_qp_sq_process()
1127 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_qp_sq_process()
H A Dsiw_verbs.c368 spin_lock_init(&qp->sq_lock); in siw_create_qp()
823 spin_lock_irqsave(&qp->sq_lock, flags); in siw_post_send()
970 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_post_send()
974 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_post_send()
H A Dsiw_qp.c242 spin_lock_irqsave(&qp->sq_lock, flags); in siw_qp_mpa_rts()
245 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_qp_mpa_rts()
288 spin_unlock_irqrestore(&qp->sq_lock, flags); in siw_qp_mpa_rts()
H A Dsiw.h434 spinlock_t sq_lock; member
/linux/drivers/nvme/host/
H A Dpci.c368 spinlock_t sq_lock; member
743 spin_lock(&nvmeq->sq_lock); in nvme_commit_rqs()
746 spin_unlock(&nvmeq->sq_lock); in nvme_commit_rqs()
1427 spin_lock(&nvmeq->sq_lock); in nvme_queue_rq()
1430 spin_unlock(&nvmeq->sq_lock); in nvme_queue_rq()
1441 spin_lock(&nvmeq->sq_lock); in nvme_submit_cmds()
1448 spin_unlock(&nvmeq->sq_lock); in nvme_submit_cmds()
1663 spin_lock(&nvmeq->sq_lock); in nvme_pci_submit_async_event()
1666 spin_unlock(&nvmeq->sq_lock); in nvme_pci_submit_async_event()
2119 spin_lock_init(&nvmeq->sq_lock); in nvme_alloc_queue()
/linux/drivers/infiniband/hw/ionic/
H A Dionic_controlpath.c2172 spin_lock_init(&qp->sq_lock); in ionic_create_qp()
2418 spin_lock(&qp->sq_lock); in ionic_flush_qp()
2424 spin_unlock(&qp->sq_lock); in ionic_flush_qp()
2494 spin_lock(&qp->sq_lock); in ionic_reset_qp()
2501 spin_unlock(&qp->sq_lock); in ionic_reset_qp()
/linux/drivers/infiniband/hw/bnxt_re/
H A Dib_verbs.c1288 spin_lock_init(&qp->sq_lock); in bnxt_re_create_shadow_qp()
1729 spin_lock_init(&qp->sq_lock); in bnxt_re_create_qp()
2855 spin_lock_irqsave(&qp->sq_lock, flags); in bnxt_re_post_send_shadow_qp()
2892 spin_unlock_irqrestore(&qp->sq_lock, flags); in bnxt_re_post_send_shadow_qp()
2916 spin_lock_irqsave(&qp->sq_lock, flags); in bnxt_re_post_send()
3000 spin_unlock_irqrestore(&qp->sq_lock, flags); in bnxt_re_post_send()
3837 spin_lock_irqsave(&qp->sq_lock, flags); in send_phantom_wqe()
3849 spin_unlock_irqrestore(&qp->sq_lock, flags); in send_phantom_wqe()
/linux/drivers/infiniband/sw/rxe/
H A Drxe_verbs.c914 spin_lock_irqsave(&qp->sq.sq_lock, flags); in rxe_post_send_kernel()
925 spin_unlock_irqrestore(&qp->sq.sq_lock, flags); in rxe_post_send_kernel()
H A Drxe_qp.c223 spin_lock_init(&qp->sq.sq_lock); in rxe_qp_init_misc()
/linux/include/ufs/
H A Dufshcd.h1136 * @sq_lock: serialize submission queue access
1155 spinlock_t sq_lock;
1153 spinlock_t sq_lock; global() member
/linux/drivers/ufs/core/
H A Dufshcd.c2394 spin_lock(&hwq->sq_lock); in ufshcd_send_command()
2398 spin_unlock(&hwq->sq_lock); in ufshcd_send_command()