Home
last modified time | relevance | path

Searched refs:send_list (Results 1 – 9 of 9) sorted by relevance

/linux/drivers/net/ethernet/mellanox/mlx5/core/steering/sws/
H A Ddr_rule.c18 struct list_head *send_list) in dr_rule_append_to_miss_list() argument
38 ste_info_last, send_list, true); in dr_rule_append_to_miss_list()
124 list_del(&ste_info->send_list); in dr_rule_handle_one_ste_in_update_list()
155 send_ste_list, send_list) { in dr_rule_send_update_list()
163 send_ste_list, send_list) { in dr_rule_send_update_list()
502 &rehash_table_send_list, send_list) { in dr_rule_rehash_htbl()
503 list_del(&del_ste_info->send_list); in dr_rule_rehash_htbl()
540 struct list_head *send_list) in dr_rule_handle_collision() argument
556 new_ste, miss_list, send_list)) { in dr_rule_handle_collision()
562 ste_info, send_list, false); in dr_rule_handle_collision()
[all …]
H A Ddr_send.c516 struct list_head *send_list, in mlx5dr_send_fill_and_append_ste_send_info() argument
530 list_add_tail(&ste_info->send_list, send_list); in mlx5dr_send_fill_and_append_ste_send_info()
H A Ddr_types.h201 struct list_head send_list; member
211 struct list_head *send_list,
/linux/fs/dlm/
H A Dplock.c20 static LIST_HEAD(send_list);
95 list_add_tail(&op->list, &send_list); in send_op()
491 if (!list_empty(&send_list)) { in dev_read()
492 op = list_first_entry(&send_list, struct plock_op, list); in dev_read()
593 if (!list_empty(&send_list)) in dev_poll()
627 WARN_ON(!list_empty(&send_list)); in dlm_plock_exit()
/linux/drivers/vdpa/vdpa_user/
H A Dvduse_dev.c123 struct list_head send_list; member
231 list_splice_init(&dev->recv_list, &dev->send_list); in vduse_dev_broken()
232 list_for_each_entry_safe(msg, tmp, &dev->send_list, list) { in vduse_dev_broken()
257 vduse_enqueue_msg(&dev->send_list, msg); in vduse_dev_msg_sync()
369 msg = vduse_dequeue_msg(&dev->send_list); in vduse_dev_read_iter()
379 !list_empty(&dev->send_list)); in vduse_dev_read_iter()
390 vduse_enqueue_msg(&dev->send_list, msg); in vduse_dev_read_iter()
453 if (!list_empty(&dev->send_list)) in vduse_dev_poll()
1633 list_splice_init(&dev->recv_list, &dev->send_list); in vduse_dev_release()
1850 INIT_LIST_HEAD(&dev->send_list); in vduse_dev_create()
/linux/drivers/infiniband/core/
H A Duser_mad.c119 struct list_head send_list; member
481 list_for_each_entry(sent_packet, &file->send_list, list) { in is_duplicate()
643 list_add_tail(&packet->list, &file->send_list); in ib_umad_write()
649 list_add_tail(&packet->list, &file->send_list); in ib_umad_write()
1030 INIT_LIST_HEAD(&file->send_list); in ib_umad_open()
H A Dmad.c415 INIT_LIST_HEAD(&mad_agent_priv->send_list); in ib_register_mad_agent()
1104 &mad_agent_priv->send_list); in handle_send_state()
1109 &mad_agent_priv->send_list); in handle_send_state()
1971 list_for_each_entry(wr, &mad_agent_priv->send_list, agent_list) { in ib_find_send_mad()
2662 &mad_agent_priv->send_list, agent_list) in cancel_mads()
2695 list_for_each_entry(mad_send_wr, &mad_agent_priv->send_list, in find_send_wr()
H A Dmad_rmpp.c744 &mad_send_wr->mad_agent_priv->send_list); in process_rmpp_ack()
/linux/drivers/nvme/host/
H A Dtcp.c150 struct list_head send_list; member
396 return !list_empty(&queue->send_list) || in nvme_tcp_queue_has_pending()
413 list_empty(&queue->send_list) && !queue->request; in nvme_tcp_queue_request()
437 list_add(&req->entry, &queue->send_list); in nvme_tcp_process_req_list()
446 req = list_first_entry_or_null(&queue->send_list, in nvme_tcp_fetch_request()
450 req = list_first_entry_or_null(&queue->send_list, in nvme_tcp_fetch_request()
1776 INIT_LIST_HEAD(&queue->send_list); in nvme_tcp_alloc_queue()