Home
last modified time | relevance | path

Searched refs:refs (Results 1 – 25 of 134) sorted by relevance

123456

/linux/tools/bpf/bpftool/
H A Dpids.c34 struct obj_refs *refs; in add_ref() local
40 refs = entry->pvalue; in add_ref()
42 for (i = 0; i < refs->ref_cnt; i++) { in add_ref()
43 if (refs->refs[i].pid == e->pid) in add_ref()
47 tmp = realloc(refs->refs, (refs->ref_cnt + 1) * sizeof(*ref)); in add_ref()
53 refs->refs = tmp; in add_ref()
54 ref = &refs->refs[refs->ref_cnt]; in add_ref()
58 refs->ref_cnt++; in add_ref()
64 refs = calloc(1, sizeof(*refs)); in add_ref()
65 if (!refs) { in add_ref()
[all …]
/linux/drivers/media/v4l2-core/
H A Dv4l2-h264.c60 b->refs[i].longterm = true; in v4l2_h264_init_reflist_builder()
68 if (!b->refs[i].longterm && dpb[i].frame_num > cur_frame_num) in v4l2_h264_init_reflist_builder()
69 b->refs[i].frame_num = (int)dpb[i].frame_num - in v4l2_h264_init_reflist_builder()
72 b->refs[i].frame_num = dpb[i].frame_num; in v4l2_h264_init_reflist_builder()
74 b->refs[i].top_field_order_cnt = dpb[i].top_field_order_cnt; in v4l2_h264_init_reflist_builder()
75 b->refs[i].bottom_field_order_cnt = dpb[i].bottom_field_order_cnt; in v4l2_h264_init_reflist_builder()
113 return min(b->refs[ref->index].top_field_order_cnt, in v4l2_h264_get_poc()
114 b->refs[ref->index].bottom_field_order_cnt); in v4l2_h264_get_poc()
116 return b->refs[ref->index].top_field_order_cnt; in v4l2_h264_get_poc()
118 return b->refs[ref->index].bottom_field_order_cnt; in v4l2_h264_get_poc()
[all …]
/linux/io_uring/
H A Drefs.h12 ((unsigned int) atomic_read(&(req->refs)) + 127u <= 127u)
17 return atomic_inc_not_zero(&req->refs); in req_ref_inc_not_zero()
24 return atomic_dec_and_test(&req->refs); in req_ref_put_and_test_atomic()
33 return atomic_dec_and_test(&req->refs); in req_ref_put_and_test()
40 atomic_inc(&req->refs); in req_ref_get()
47 atomic_dec(&req->refs); in req_ref_put()
54 atomic_set(&req->refs, nr); in __io_req_set_refcount()
H A Dwaitid.c29 atomic_t refs; member
133 WARN_ON_ONCE(!(atomic_read(&iw->refs) & IO_WAITID_REF_MASK)); in io_waitid_complete()
156 atomic_or(IO_WAITID_CANCEL_FLAG, &iw->refs); in __io_waitid_cancel()
159 if (atomic_fetch_inc(&iw->refs) & IO_WAITID_REF_MASK) in __io_waitid_cancel()
183 if (!atomic_sub_return(1, &iw->refs)) in io_waitid_drop_issue_ref()
219 if (!(atomic_read(&iw->refs) & IO_WAITID_CANCEL_FLAG)) { in io_waitid_cb()
252 if (atomic_fetch_inc(&iw->refs) & IO_WAITID_REF_MASK) in io_waitid_wait()
298 atomic_set(&iw->refs, 1); in io_waitid()
H A Deventfd.c19 refcount_t refs; member
38 if (refcount_dec_and_test(&ev_fd->refs)) in io_eventfd_put()
92 if (!io_eventfd_trigger(ev_fd) || !refcount_inc_not_zero(&ev_fd->refs)) in io_eventfd_grab()
148 refcount_set(&ev_fd->refs, 1); in io_eventfd_flush_signal()
/linux/drivers/xen/
H A Dgntdev-dmabuf.c44 grant_ref_t *refs; member
429 int count, u32 domid, u32 *refs, u32 *fd) in dmabuf_exp_from_refs() argument
441 map->grants[i].ref = refs[i]; in dmabuf_exp_from_refs()
480 dmabuf_imp_grant_foreign_access(unsigned long *gfns, u32 *refs, in dmabuf_imp_grant_foreign_access() argument
504 refs[i] = cur_ref; in dmabuf_imp_grant_foreign_access()
514 static void dmabuf_imp_end_foreign_access(u32 *refs, int count) in dmabuf_imp_end_foreign_access() argument
519 if (refs[i] != INVALID_GRANT_REF) in dmabuf_imp_end_foreign_access()
520 gnttab_end_foreign_access(refs[i], NULL); in dmabuf_imp_end_foreign_access()
525 kfree(gntdev_dmabuf->u.imp.refs); in dmabuf_imp_free_storage()
538 gntdev_dmabuf->u.imp.refs = kcalloc(count, in dmabuf_imp_alloc_storage()
[all …]
/linux/fs/smb/client/
H A Ddfs_cache.c251 static inline void dump_refs(const struct dfs_info3_param *refs, int numrefs) in dump_refs() argument
257 const struct dfs_info3_param *ref = &refs[i]; in dump_refs()
383 static int copy_ref_data(const struct dfs_info3_param *refs, int numrefs, in copy_ref_data() argument
389 ce->ttl = max_t(int, refs[0].ttl, CACHE_MIN_TTL); in copy_ref_data()
391 ce->srvtype = refs[0].server_type; in copy_ref_data()
392 ce->hdr_flags = refs[0].flags; in copy_ref_data()
393 ce->ref_flags = refs[0].ref_flag; in copy_ref_data()
394 ce->path_consumed = refs[0].path_consumed; in copy_ref_data()
399 t = alloc_target(refs[i].node_name, refs[i].path_consumed); in copy_ref_data()
421 static struct cache_entry *alloc_cache_entry(struct dfs_info3_param *refs, int numrefs) in alloc_cache_entry() argument
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/core/
H A Devent.c34 if (--event->refs[index * event->types_nr + type] == 0) { in nvkm_event_put()
51 if (++event->refs[index * event->types_nr + type] == 1) { in nvkm_event_get()
175 if (!event->refs || WARN_ON(id >= event->index_nr)) in nvkm_event_ntfy()
194 if (event->refs) { in nvkm_event_fini()
195 kfree(event->refs); in nvkm_event_fini()
196 event->refs = NULL; in nvkm_event_fini()
204 event->refs = kzalloc(array3_size(index_nr, types_nr, sizeof(*event->refs)), GFP_KERNEL); in __nvkm_event_init()
205 if (!event->refs) in __nvkm_event_init()
/linux/tools/lib/python/abi/
H A Dsystem_symbols.py175 def check_file(self, refs, found): argument
181 for names in refs:
238 refs = []
241 refs.append(ref)
245 yield refs
247 refs = []
249 yield refs
321 for refs in self.get_fileref(all_refs, chunk_size):
322 if refs:
324 f_list.append(exe.submit(self.check_file, refs, found))
/linux/drivers/gpio/
H A Dgpiolib-shared.c58 struct list_head refs; member
179 INIT_LIST_HEAD(&entry->refs); in gpio_shared_of_traverse()
221 if (!list_empty(&entry->refs)) in gpio_shared_of_traverse()
225 list_add_tail(&no_free_ptr(ref)->list, &entry->refs); in gpio_shared_of_traverse()
380 list_for_each_entry(ref, &entry->refs, list) { in gpio_shared_add_proxy_lookup()
431 list_for_each_entry(ref, &entry->refs, list) { in gpio_device_setup_shared()
452 if (list_count_nodes(&entry->refs) <= 1) in gpio_device_setup_shared()
468 list_for_each_entry(ref, &entry->refs, list) { in gpio_device_setup_shared()
499 list_for_each_entry(ref, &entry->refs, list) { in gpio_device_teardown_shared()
631 list_for_each_entry_safe(ref, rpos, &entry->refs, list) in gpio_shared_teardown()
[all …]
/linux/Documentation/userspace-api/media/dvb/
H A Dheaders.rst15 :generate-cross-refs:
24 :generate-cross-refs:
33 :generate-cross-refs:
42 :generate-cross-refs:
/linux/drivers/android/binder/
H A Dprocess.rs556 let mut refs = self.node_refs.lock(); in debug_print_stats() localVariable
558 for r in refs.by_handle.values_mut() { in debug_print_stats()
618 let mut refs = self.node_refs.lock(); in debug_print() localVariable
619 for r in refs.by_handle.values_mut() { in debug_print()
812 let mut refs = self.node_refs.lock(); in insert_or_update_handle() localVariable
815 if let Some(handle_ref) = refs.by_node.get(&node_ref.node.global_id()) { in insert_or_update_handle()
817 let info = refs.by_handle.get_mut(&handle).unwrap(); in insert_or_update_handle()
829 let mut refs = &mut *refs_lock; in insert_or_update_handle() localVariable
835 if let Some(res) = refs.handle_is_present.find_unused_id(start) { in insert_or_update_handle()
836 match refs.by_handle.entry(res.as_u32()) { in insert_or_update_handle()
[all …]
/linux/net/ieee802154/6lowpan/
H A Dreassembly.c35 int *refs);
49 int refs = 1; in lowpan_frag_expire() local
58 inet_frag_kill(&fq->q, &refs); in lowpan_frag_expire()
61 inet_frag_putn(&fq->q, refs); in lowpan_frag_expire()
88 int *refs) in lowpan_frag_queue() argument
149 res = lowpan_frag_reasm(fq, skb, prev_tail, ldev, refs); in lowpan_frag_queue()
169 int *refs) in lowpan_frag_reasm() argument
173 inet_frag_kill(&fq->q, refs); in lowpan_frag_reasm()
310 int ret, refs = 0; in lowpan_frag_rcv() local
313 ret = lowpan_frag_queue(fq, skb, frag_type, &refs); in lowpan_frag_rcv()
[all …]
/linux/mm/
H A Dgup.c74 static inline struct folio *try_get_folio(struct page *page, int refs) in try_get_folio() argument
82 if (unlikely(!folio_ref_try_add(folio, refs))) in try_get_folio()
95 folio_put_refs(folio, refs); in try_get_folio()
102 static void gup_put_folio(struct folio *folio, int refs, unsigned int flags) in gup_put_folio() argument
107 node_stat_mod_folio(folio, NR_FOLL_PIN_RELEASED, refs); in gup_put_folio()
109 atomic_sub(refs, &folio->_pincount); in gup_put_folio()
111 refs *= GUP_PIN_COUNTING_BIAS; in gup_put_folio()
114 folio_put_refs(folio, refs); in gup_put_folio()
140 int __must_check try_grab_folio(struct folio *folio, int refs, in try_grab_folio() argument
150 folio_ref_add(folio, refs); in try_grab_folio()
[all …]
H A Dworkingset.c241 int refs = folio_lru_refs(folio); in lru_gen_eviction() local
243 int tier = lru_tier_from_refs(refs, workingset); in lru_gen_eviction()
252 token = (min_seq << LRU_REFS_WIDTH) | max(refs - 1, 0); in lru_gen_eviction()
286 int hist, tier, refs; in lru_gen_refault() local
308 refs = (token & (BIT(LRU_REFS_WIDTH) - 1)) + 1; in lru_gen_refault()
309 tier = lru_tier_from_refs(refs, workingset); in lru_gen_refault()
321 set_mask_bits(&folio->flags.f, LRU_REFS_MASK, (refs - 1UL) << LRU_REFS_PGOFF); in lru_gen_refault()
/linux/net/ipv6/netfilter/
H A Dnf_conntrack_reasm.c127 int *refs);
172 int *refs) in nf_ct_frag6_queue() argument
226 inet_frag_kill(&fq->q, refs); in nf_ct_frag6_queue()
292 err = nf_ct_frag6_reasm(fq, skb, prev, dev, refs); in nf_ct_frag6_queue()
306 inet_frag_kill(&fq->q, refs); in nf_ct_frag6_queue()
321 int *refs) in nf_ct_frag6_reasm() argument
327 inet_frag_kill(&fq->q, refs); in nf_ct_frag6_reasm()
378 inet_frag_kill(&fq->q, refs); in nf_ct_frag6_reasm()
453 int refs = 0; in nf_ct_frag6_gather() local
491 ret = nf_ct_frag6_queue(fq, skb, fhdr, nhoff, &refs); in nf_ct_frag6_gather()
[all …]
/linux/fs/btrfs/
H A Ddelayed-inode.c59 refcount_set(&delayed_node->refs, 0); in btrfs_init_delayed_node()
78 refcount_inc(&node->refs); in btrfs_get_delayed_node()
88 refcount_inc(&node->refs); /* can be accessed */ in btrfs_get_delayed_node()
111 if (refcount_inc_not_zero(&node->refs)) { in btrfs_get_delayed_node()
112 refcount_inc(&node->refs); in btrfs_get_delayed_node()
178 refcount_set(&node->refs, 2); in btrfs_get_or_create_delayed_node()
206 refcount_inc(&node->refs); /* inserted into list */ in btrfs_queue_delayed_node()
223 refcount_dec(&node->refs); /* not in the list */ in btrfs_dequeue_delayed_node()
242 refcount_inc(&node->refs); in btrfs_first_delayed_node()
271 refcount_inc(&next->refs); in btrfs_next_delayed_node()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
H A Dcgrp.c39 if (refcount_dec_and_test(&ectx->refs)) { in nvkm_cgrp_ectx_put()
65 refcount_inc(&ectx->refs); in nvkm_cgrp_ectx_get()
76 refcount_set(&ectx->refs, 1); in nvkm_cgrp_ectx_get()
100 if (refcount_dec_and_test(&vctx->refs)) { in nvkm_cgrp_vctx_put()
130 refcount_inc(&vctx->refs); in nvkm_cgrp_vctx_get()
151 refcount_set(&vctx->refs, 1); in nvkm_cgrp_vctx_get()
/linux/tools/testing/selftests/net/
H A Dnl_netdev.py198 refs = sum([pp["inflight"] for pp in pp_list])
199 ksft_eq(refs, 0)
210 refs = sum([pp["inflight"] for pp in pp_list if pp.get("ifindex") == nsim.ifindex])
211 ksft_ge(refs, 1)
217 refs = sum([pp["inflight"] for pp in pp_list])
218 ksft_eq(refs, 1)
/linux/net/ipv6/
H A Dreassembly.c72 int *refs);
110 u32 *prob_offset, int *refs) in ip6_frag_queue() argument
224 err = ip6_frag_reasm(fq, skb, prev_tail, dev, refs); in ip6_frag_queue()
242 inet_frag_kill(&fq->q, refs); in ip6_frag_queue()
259 int *refs) in ip6_frag_reasm() argument
267 inet_frag_kill(&fq->q, refs); in ip6_frag_reasm()
321 inet_frag_kill(&fq->q, refs); in ip6_frag_reasm()
382 int ret, refs = 0; in ipv6_frag_rcv() local
388 &prob_offset, &refs); in ipv6_frag_rcv()
392 inet_frag_putn(&fq->q, refs); in ipv6_frag_rcv()
/linux/include/net/
H A Dinet_frag.h129 void inet_frag_kill(struct inet_frag_queue *q, int *refs); in fqdir_pre_exit()
136 static inline void inet_frag_putn(struct inet_frag_queue *q, int refs) in fqdir_pre_exit()
138 if (refs && refcount_sub_and_test(refs, &q->refcnt))
148 inet_frag_putn(struct inet_frag_queue * q,int refs) inet_frag_putn() argument
/linux/lib/
H A Drefcount.c59 return atomic_try_cmpxchg_release(&r->refs, &val, 0); in refcount_dec_if_one()
76 unsigned int new, val = atomic_read(&r->refs); in refcount_dec_not_one()
91 } while (!atomic_try_cmpxchg_release(&r->refs, &val, new)); in refcount_dec_not_one()
/linux/tools/testing/selftests/bpf/progs/
H A Dmap_kptr.c318 if (p_st->cnt.refs.counter != ref) { in test_map_kptr_ref_pre()
328 if (p_st->cnt.refs.counter != ref) in test_map_kptr_ref_pre()
336 if (p_st->cnt.refs.counter != ref) in test_map_kptr_ref_pre()
348 if (p_st->cnt.refs.counter != ref) in test_map_kptr_ref_pre()
365 if (!p_st || p_st->cnt.refs.counter != ref) in test_map_kptr_ref_post()
371 if (p_st->cnt.refs.counter != ref) { in test_map_kptr_ref_post()
381 if (p_st->cnt.refs.counter != ref) in test_map_kptr_ref_post()
481 if (p->cnt.refs.counter != ref) { in test_map_kptr_ref3()
/linux/include/uapi/xen/
H A Dgntdev.h70 struct ioctl_gntdev_grant_ref refs[1]; member
256 __u32 refs[1]; member
298 __u32 refs[1]; member
/linux/drivers/net/ethernet/mellanox/mlx4/
H A Dport.c69 table->refs[i] = 0; in mlx4_init_mac_table()
83 table->refs[i] = 0; in mlx4_init_vlan_table()
118 if (table->refs[i] && in find_index()
212 dup_table->refs[index_at_port]) { in __mlx4_register_mac()
222 if (!table->refs[index_at_dup_port] || in __mlx4_register_mac()
231 if (!table->refs[i]) { in __mlx4_register_mac()
235 if (!dup_table->refs[i]) in __mlx4_register_mac()
245 ++table->refs[i]; in __mlx4_register_mac()
288 table->refs[free] = 1; in __mlx4_register_mac()
292 dup_table->refs[free] = 0; in __mlx4_register_mac()
[all …]

123456