Home
last modified time | relevance | path

Searched refs:seqno (Results 1 – 25 of 136) sorted by relevance

123456

/linux/drivers/gpu/drm/vc4/
H A Dvc4_trace.h18 TP_PROTO(struct drm_device *dev, uint64_t seqno, uint64_t timeout),
19 TP_ARGS(dev, seqno, timeout),
23 __field(u64, seqno)
29 __entry->seqno = seqno;
34 __entry->dev, __entry->seqno, __entry->timeout)
38 TP_PROTO(struct drm_device *dev, uint64_t seqno),
39 TP_ARGS(dev, seqno),
43 __field(u64, seqno)
48 __entry->seqno = seqno;
52 __entry->dev, __entry->seqno)
[all …]
/linux/drivers/gpu/drm/v3d/
H A Dv3d_trace.h39 uint64_t seqno,
41 TP_ARGS(dev, is_render, seqno, ctnqba, ctnqea),
46 __field(u64, seqno)
54 __entry->seqno = seqno;
62 __entry->seqno,
69 uint64_t seqno),
70 TP_ARGS(dev, seqno),
74 __field(u64, seqno)
79 __entry->seqno = seqno;
84 __entry->seqno)
[all …]
H A Dv3d_fence.c17 fence->seqno = ++queue->emit_seqno; in v3d_fence_create()
19 queue->fence_context, fence->seqno); in v3d_fence_create()
/linux/drivers/gpu/drm/i915/selftests/
H A Di915_syncmap.c146 static int check_seqno(struct i915_syncmap *leaf, unsigned int idx, u32 seqno) in check_seqno() argument
154 if (__sync_seqno(leaf)[idx] != seqno) { in check_seqno()
156 __func__, idx, __sync_seqno(leaf)[idx], seqno); in check_seqno()
163 static int check_one(struct i915_syncmap **sync, u64 context, u32 seqno) in check_one() argument
167 err = i915_syncmap_set(sync, context, seqno); in check_one()
189 err = check_seqno((*sync), ilog2((*sync)->bitmap), seqno); in check_one()
193 if (!i915_syncmap_is_later(sync, context, seqno)) { in check_one()
195 context, seqno); in check_one()
238 static int check_leaf(struct i915_syncmap **sync, u64 context, u32 seqno) in check_leaf() argument
242 err = i915_syncmap_set(sync, context, seqno); in check_leaf()
[all …]
/linux/drivers/gpu/drm/radeon/
H A Dradeon_trace.h127 TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
129 TP_ARGS(dev, ring, seqno),
134 __field(u32, seqno)
140 __entry->seqno = seqno;
144 __entry->dev, __entry->ring, __entry->seqno)
149 TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
151 TP_ARGS(dev, ring, seqno)
156 TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
158 TP_ARGS(dev, ring, seqno)
163 TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
[all …]
/linux/include/trace/events/
H A Drpcgss.h322 __field(u32, seqno)
329 __entry->seqno = gc->gc_seq;
334 __entry->xid, __entry->seqno)
403 __field(u32, seqno)
412 __entry->seqno = *rqst->rq_seqnos;
417 __entry->xid, __entry->seqno)
434 __field(u32, seqno)
443 __entry->seqno = *task->tk_rqstp->rq_seqnos;
450 __entry->xid, __entry->seqno, __entry->seq_xmit,
492 u32 seqno
[all …]
/linux/drivers/gpu/drm/i915/gt/
H A Dselftest_timeline.c199 u32 seqno; member
211 if (__intel_timeline_sync_is_later(tl, ctx, p->seqno) != p->expected) { in __igt_sync()
213 name, p->name, ctx, p->seqno, str_yes_no(p->expected)); in __igt_sync()
218 ret = __intel_timeline_sync_set(tl, ctx, p->seqno); in __igt_sync()
396 u32 seqno = prandom_u32_state(&prng); in bench_sync() local
398 if (!__intel_timeline_sync_is_later(&tl, id, seqno)) in bench_sync()
399 __intel_timeline_sync_set(&tl, id, seqno); in bench_sync()
496 if (READ_ONCE(*tl->hwsp_seqno) != tl->seqno) { in checked_tl_write()
498 *tl->hwsp_seqno, tl->seqno); in checked_tl_write()
697 u32 seqno[2]; in live_hwsp_wrap() local
[all …]
/linux/drivers/gpu/drm/i915/
H A Di915_syncmap.c79 DECLARE_FLEX_ARRAY(u32, seqno);
99 return p->seqno; in __sync_seqno()
151 bool i915_syncmap_is_later(struct i915_syncmap **root, u64 id, u32 seqno) in i915_syncmap_is_later() argument
192 return seqno_later(__sync_seqno(p)[idx], seqno); in i915_syncmap_is_later()
200 p = kmalloc(struct_size(p, seqno, KSYNCMAP), GFP_KERNEL); in __sync_alloc_leaf()
211 static inline void __sync_set_seqno(struct i915_syncmap *p, u64 id, u32 seqno) in __sync_set_seqno() argument
216 __sync_seqno(p)[idx] = seqno; in __sync_set_seqno()
227 static noinline int __sync_set(struct i915_syncmap **root, u64 id, u32 seqno) in __sync_set() argument
332 __sync_set_seqno(p, id, seqno); in __sync_set()
350 int i915_syncmap_set(struct i915_syncmap **root, u64 id, u32 seqno) in i915_syncmap_set() argument
[all …]
H A Di915_trace.h275 __field(u32, seqno)
284 __entry->seqno = rq->fence.seqno;
290 __entry->ctx, __entry->seqno, __entry->flags)
302 __field(u32, seqno)
311 __entry->seqno = rq->fence.seqno;
317 __entry->ctx, __entry->seqno, __entry->tail)
350 __field(u32, seqno)
360 __entry->seqno = rq->fence.seqno;
367 __entry->ctx, __entry->seqno,
380 __field(u32, seqno)
[all …]
H A Di915_syncmap.h34 int i915_syncmap_set(struct i915_syncmap **root, u64 id, u32 seqno);
35 bool i915_syncmap_is_later(struct i915_syncmap **root, u64 id, u32 seqno);
/linux/net/dccp/
H A Dackvec.c
H A Dinput.c
/linux/net/dccp/ccids/lib/
H A Dpacket_history.h
/linux/drivers/dma-buf/
H A Dst-dma-fence-chain.c63 u64 seqno) in mock_chain() argument
72 seqno); in mock_chain()
281 fence->seqno); in find_signaled()
326 fence ? fence->seqno : 0); in find_out_of_order()
363 fence->seqno, in find_gap()
401 int seqno; in __find_race() local
403 seqno = get_random_u32_inclusive(1, data->fc.chain_length); in __find_race()
405 err = dma_fence_chain_find_seqno(&fence, seqno); in __find_race()
408 seqno); in __find_race()
419 if (fence->seqno == seqno) { in __find_race()
[all …]
H A Ddma-fence.c557 fence->context, fence->seqno); in dma_fence_release()
1013 fence->context, fence->seqno, timeline, driver, in dma_fence_describe()
1022 spinlock_t *lock, u64 context, u64 seqno, unsigned long flags) in __dma_fence_init() argument
1032 fence->seqno = seqno; in __dma_fence_init()
1056 spinlock_t *lock, u64 context, u64 seqno) in dma_fence_init() argument
1058 __dma_fence_init(fence, ops, lock, context, seqno, 0UL); in dma_fence_init()
1079 spinlock_t *lock, u64 context, u64 seqno) in dma_fence_init64() argument
1081 __dma_fence_init(fence, ops, lock, context, seqno, in dma_fence_init64()
H A Ddma-fence-array.c200 u64 context, unsigned seqno, in dma_fence_array_init() argument
209 context, seqno); in dma_fence_array_init()
254 u64 context, unsigned seqno, in dma_fence_array_create() argument
264 context, seqno, signal_on_any); in dma_fence_array_create()
/linux/net/batman-adv/
H A Dfragmentation.c107 u16 seqno) in batadv_frag_init_chain() argument
111 if (chain->seqno == seqno) in batadv_frag_init_chain()
118 chain->seqno = seqno; in batadv_frag_init_chain()
145 u16 seqno, hdr_size = sizeof(struct batadv_frag_packet); in batadv_frag_insert_packet() local
156 seqno = ntohs(frag_packet->seqno); in batadv_frag_insert_packet()
157 bucket = seqno % BATADV_FRAG_BUFFER_COUNT; in batadv_frag_insert_packet()
172 if (batadv_frag_init_chain(chain, seqno)) { in batadv_frag_insert_packet()
487 frag_header.seqno = htons(atomic_inc_return(&bat_priv->frag_seqno)); in batadv_frag_send_packet()
H A Dtp_meter.c579 u32 seqno, size_t len, const u8 *session, in batadv_tp_send_msg() argument
606 icmp->seqno = htonl(seqno); in batadv_tp_send_msg()
654 if (batadv_seq_before(ntohl(icmp->seqno), in batadv_tp_recv_ack()
674 recv_ack = ntohl(icmp->seqno); in batadv_tp_recv_ack()
1194 icmp->seqno = htonl(seq); in batadv_tp_send_ack()
1237 new->seqno = ntohl(icmp->seqno); in batadv_tp_handle_out_of_order()
1257 if (new->seqno == un->seqno) { in batadv_tp_handle_out_of_order()
1266 if (batadv_seq_before(new->seqno, un->seqno)) in batadv_tp_handle_out_of_order()
1307 if (batadv_seq_before(tp_vars->last_recv, un->seqno)) in batadv_tp_ack_unordered()
1310 to_ack = un->seqno + un->len - tp_vars->last_recv; in batadv_tp_ack_unordered()
[all …]
/linux/net/tipc/
H A Dname_distr.c151 u32 dnode, struct list_head *pls, u16 seqno) in named_distribute() argument
197 msg_set_named_seqno(hdr, seqno); in named_distribute()
211 u16 seqno; in tipc_named_node_up() local
217 seqno = nt->snd_nxt; in tipc_named_node_up()
221 named_distribute(net, &head, dnode, &nt->cluster_scope, seqno); in tipc_named_node_up()
318 u16 seqno; in tipc_named_dequeue() local
328 seqno = msg_named_seqno(hdr); in tipc_named_dequeue()
330 *rcv_nxt = seqno; in tipc_named_dequeue()
340 if (*open && (*rcv_nxt == seqno)) { in tipc_named_dequeue()
347 if (less(seqno, *rcv_nxt)) { in tipc_named_dequeue()
/linux/drivers/net/ppp/
H A Dppp_deflate.c25 int seqno; member
150 state->seqno = 0; in z_comp_init()
170 state->seqno = 0; in z_comp_reset()
213 put_unaligned_be16(state->seqno, wptr); in z_compress()
218 ++state->seqno; in z_compress()
364 state->seqno = 0; in z_decomp_init()
385 state->seqno = 0; in z_decomp_reset()
427 if (seq != (state->seqno & 0xffff)) { in z_decompress()
430 state->unit, seq, state->seqno & 0xffff); in z_decompress()
433 ++state->seqno; in z_decompress()
[all …]
/linux/security/selinux/
H A Dnetlink.c59 msg->seqno = *((u32 *)data); in selnl_add_payload()
104 void selnl_notify_policyload(u32 seqno) in selnl_notify_policyload() argument
106 selnl_notify(SELNL_MSG_POLICYLOAD, &seqno); in selnl_notify_policyload()
/linux/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
H A Dipsec_rxtx.c159 __be64 seqno; in mlx5e_ipsec_set_iv_esn() local
170 seqno = cpu_to_be64(xo->seq.low + ((u64)seq_hi << 32)); in mlx5e_ipsec_set_iv_esn()
172 skb_store_bits(skb, iv_offset, &seqno, 8); in mlx5e_ipsec_set_iv_esn()
179 __be64 seqno; in mlx5e_ipsec_set_iv() local
182 seqno = cpu_to_be64(xo->seq.low + ((u64)xo->seq.hi << 32)); in mlx5e_ipsec_set_iv()
184 skb_store_bits(skb, iv_offset, &seqno, 8); in mlx5e_ipsec_set_iv()
/linux/drivers/tty/hvc/
H A Dhvsi.c75 atomic_t seqno; /* HVSI packet sequence number */ member
211 header->seqno); in dump_packet()
287 packet.hdr.seqno = cpu_to_be16(atomic_inc_return(&hp->seqno)); in hvsi_version_respond()
311 hvsi_version_respond(hp, be16_to_cpu(query->hdr.seqno)); in hvsi_recv_query()
547 packet.hdr.seqno = cpu_to_be16(atomic_inc_return(&hp->seqno)); in hvsi_query()
589 packet.hdr.seqno = cpu_to_be16(atomic_inc_return(&hp->seqno)); in hvsi_set_mctrl()
672 packet.hdr.seqno = cpu_to_be16(atomic_inc_return(&hp->seqno)); in hvsi_put_chars()
689 packet.hdr.seqno = cpu_to_be16(atomic_inc_return(&hp->seqno)); in hvsi_close_protocol()
718 atomic_set(&hp->seqno, 0); in hvsi_open()
/linux/drivers/gpu/drm/xe/
H A Dxe_preempt_fence.h15 u64 context, u32 seqno);
23 u64 context, u32 seqno);
/linux/net/smc/
H A Dsmc_cdc.h44 __be16 seqno; member
214 peer->seqno = htons(local->seqno); in smc_host_msg_to_cdc()
247 local->seqno = ntohs(peer->seqno); in smcr_cdc_msg_to_host()

123456