| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_fence.c | 87 seq = atomic_read(&drv->last_seq); in amdgpu_fence_read() 212 uint32_t seq, last_seq; in amdgpu_fence_process() local 215 last_seq = atomic_read(&ring->fence_drv.last_seq); in amdgpu_fence_process() 218 } while (atomic_cmpxchg(&drv->last_seq, last_seq, seq) != last_seq); in amdgpu_fence_process() 224 if (unlikely(seq == last_seq)) in amdgpu_fence_process() 227 last_seq &= drv->num_fences_mask; in amdgpu_fence_process() 234 ++last_seq; in amdgpu_fence_process() 235 last_seq &= drv->num_fences_mask; in amdgpu_fence_process() 236 ptr = &drv->fences[last_seq]; in amdgpu_fence_process() 254 } while (last_seq != seq); in amdgpu_fence_process() [all …]
|
| H A D | amdgpu_ring_mux.c | 80 uint32_t seq, last_seq; in amdgpu_mux_resubmit_chunks() local 99 last_seq = atomic_read(&e->ring->fence_drv.last_seq); in amdgpu_mux_resubmit_chunks() 101 if (last_seq < seq) { in amdgpu_mux_resubmit_chunks() 104 if (chunk->sync_seq > last_seq && chunk->sync_seq <= seq) { in amdgpu_mux_resubmit_chunks() 464 uint32_t last_seq = 0; in scan_and_remove_signaled_chunk() local 474 last_seq = atomic_read(&ring->fence_drv.last_seq); in scan_and_remove_signaled_chunk() 477 if (chunk->sync_seq <= last_seq) { in scan_and_remove_signaled_chunk()
|
| H A D | amdgpu_job.c | 123 job->base.sched->name, atomic_read(&ring->fence_drv.last_seq), in amdgpu_job_timedout()
|
| H A D | amdgpu_ring.h | 121 atomic_t last_seq; member
|
| /linux/drivers/net/wireless/intel/iwlwifi/mld/tests/ |
| H A D | rx.c | 26 __le16 last_seq; member 86 .last_seq = __cpu_to_le16(0x100), 102 .last_seq = __cpu_to_le16(0x100), 119 .last_seq = __cpu_to_le16(0x100), 149 .last_seq = __cpu_to_le16(0x100), 167 .last_seq = __cpu_to_le16(0x100), 184 .last_seq = __cpu_to_le16(0x100), 202 .last_seq = __cpu_to_le16(0x100), 221 .last_seq = __cpu_to_le16(0x100), 239 .last_seq = __cpu_to_le16(0x100), [all …]
|
| /linux/drivers/gpu/drm/radeon/ |
| H A D | radeon_fence.c | 97 seq = lower_32_bits(atomic64_read(&drv->last_seq)); in radeon_fence_read() 177 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled() 199 uint64_t seq, last_seq, last_emitted; in radeon_fence_activity() local 224 last_seq = atomic64_read(&rdev->fence_drv[ring].last_seq); in radeon_fence_activity() 228 seq |= last_seq & 0xffffffff00000000LL; in radeon_fence_activity() 229 if (seq < last_seq) { in radeon_fence_activity() 234 if (seq <= last_seq || seq > last_emitted) in radeon_fence_activity() 242 last_seq = seq; in radeon_fence_activity() 251 } while (atomic64_xchg(&rdev->fence_drv[ring].last_seq, seq) > seq); in radeon_fence_activity() 300 (uint64_t)atomic64_read(&fence_drv->last_seq), in radeon_fence_check_lockup() [all …]
|
| /linux/drivers/net/wireless/marvell/mwifiex/ |
| H A D | 11n_rxreorder.c | 334 u16 last_seq = 0; in mwifiex_11n_create_rx_reorder_tbl() local 363 last_seq = node->rx_seq[tid]; in mwifiex_11n_create_rx_reorder_tbl() 368 last_seq = node->rx_seq[tid]; in mwifiex_11n_create_rx_reorder_tbl() 370 last_seq = priv->rx_seq[tid]; in mwifiex_11n_create_rx_reorder_tbl() 376 last_seq, new_node->start_win); in mwifiex_11n_create_rx_reorder_tbl() 378 if (last_seq != MWIFIEX_DEF_11N_RX_SEQ_NUM && in mwifiex_11n_create_rx_reorder_tbl() 379 last_seq >= new_node->start_win) { in mwifiex_11n_create_rx_reorder_tbl() 380 new_node->start_win = last_seq + 1; in mwifiex_11n_create_rx_reorder_tbl()
|
| /linux/include/linux/netfilter/ |
| H A D | nf_conntrack_tcp.h | 24 u_int32_t last_seq; /* Last sequence number seen in dir */ member
|
| /linux/net/rxrpc/ |
| H A D | conn_event.c | 170 pkt.ack.firstPacket = htonl(chan->last_seq + 1); in rxrpc_conn_retransmit_call() 171 pkt.ack.previousPacket = htonl(chan->last_seq); in rxrpc_conn_retransmit_call()
|
| H A D | conn_object.c | 166 chan->last_seq = call->rx_highest_seq; in __rxrpc_disconnect_call()
|
| H A D | ar-internal.h | 547 u32 last_seq; member
|
| /linux/include/net/ |
| H A D | mctp.h | 165 u8 last_seq; member
|
| /linux/fs/nilfs2/ |
| H A D | sysfs.c | 464 u64 last_seq; in nilfs_segctor_last_seg_sequence_show() local 467 last_seq = nilfs->ns_last_seq; in nilfs_segctor_last_seg_sequence_show() 470 return sysfs_emit(buf, "%llu\n", last_seq); in nilfs_segctor_last_seg_sequence_show()
|
| /linux/fs/btrfs/ |
| H A D | tree-checker.c | 1330 u64 last_seq = U64_MAX; in check_extent_item() local 1583 last_seq = U64_MAX; in check_extent_item() 1584 if (unlikely(seq > last_seq)) { in check_extent_item() 1588 last_type, last_seq); in check_extent_item() 1592 last_seq = seq; in check_extent_item()
|
| /linux/drivers/md/bcache/ |
| H A D | bcache_ondisk.h | 353 __u64 last_seq; member
|
| /linux/net/netfilter/ |
| H A D | nf_conntrack_proto_tcp.c | 704 state->last_seq == seq && in tcp_in_window() 711 state->last_seq = seq; in tcp_in_window() 1072 ct->proto.tcp.last_seq = ntohl(th->seq); in nf_conntrack_tcp_packet()
|
| /linux/net/mctp/ |
| H A D | route.c | 401 key->last_seq = this_seq; in mctp_frag_queue() 405 exp_seq = (key->last_seq + 1) & MCTP_HDR_SEQ_MASK; in mctp_frag_queue() 418 key->last_seq = this_seq; in mctp_frag_queue()
|
| /linux/drivers/scsi/lpfc/ |
| H A D | lpfc.h | 992 u8 last_seq, u8 cr_cx_cmd);
|
| H A D | lpfc_sli.c | 11086 u32 num_entry, u8 rctl, u8 last_seq, u8 cr_cx_cmd) in __lpfc_sli_prep_xmit_seq64_s3() argument 11098 if (last_seq) in __lpfc_sli_prep_xmit_seq64_s3() 11125 u32 full_size, u8 rctl, u8 last_seq, u8 cr_cx_cmd) in __lpfc_sli_prep_xmit_seq64_s4() argument 11140 bf_set(wqe_ls, &wqe->xmit_sequence.wge_ctl, last_seq); in __lpfc_sli_prep_xmit_seq64_s4() 11177 u32 num_entry, u8 rctl, u8 last_seq, u8 cr_cx_cmd) in lpfc_sli_prep_xmit_seq64() argument 11180 rctl, last_seq, cr_cx_cmd); in lpfc_sli_prep_xmit_seq64()
|