/linux/lib/ |
H A D | objagg.c | 84 objagg_obj->stats.user_count++; in objagg_obj_stats_inc() 92 objagg_obj->stats.user_count--; in objagg_obj_stats_dec() 587 return stats_info2->stats.user_count - stats_info1->stats.user_count; in objagg_stats_info_sort_cmp_func() 656 unsigned int user_count = objagg_obj->stats.user_count; in objagg_hints_node_create() local 664 hnode->stats_info.stats.user_count = user_count; in objagg_hints_node_create() 665 hnode->stats_info.stats.delta_user_count = user_count; in objagg_hints_node_create() 667 parent_hnode->stats_info.stats.delta_user_count += user_count; in objagg_hints_node_create() 741 unsigned int weight = node->objagg_obj->stats.user_count; in objagg_tmp_graph_node_weight() 754 weight += node->objagg_obj->stats.user_count; in objagg_tmp_graph_node_weight()
|
H A D | test_objagg.c | 262 if (stats->stats_info[i].stats.user_count != 2) { in check_stats_nodelta() 425 #define ROOT(key_id, user_count, delta_user_count) \ argument 426 {{user_count, delta_user_count}, true, key_id} 428 #define DELTA(key_id, user_count) \ argument 429 {{user_count, user_count}, false, key_id} 650 if (stats_info->stats.user_count != in check_expect_stats_nums() 651 expect_stats_info->stats.user_count) { in check_expect_stats_nums() 867 stats->stats_info[i].stats.user_count, in __pr_debug_stats()
|
/linux/kernel/dma/ |
H A D | ops_helpers.c | 40 unsigned long user_count = vma_pages(vma); in dma_common_mmap() local 51 if (off >= count || user_count > count - off) in dma_common_mmap() 56 user_count << PAGE_SHIFT, vma->vm_page_prot); in dma_common_mmap()
|
H A D | direct.c | 544 unsigned long user_count = vma_pages(vma); in dma_direct_mmap() local 558 if (vma->vm_pgoff >= count || user_count > count - vma->vm_pgoff) in dma_direct_mmap() 561 user_count << PAGE_SHIFT, vma->vm_page_prot); in dma_direct_mmap()
|
/linux/tools/tracing/rtla/src/ |
H A D | timerlat_hist.c | 68 unsigned long long user_count; member 202 data->hist[cpu].user_count++; in timerlat_hist_update() 332 data->hist[cpu].user_count); in timerlat_print_summary() 360 data->hist[cpu].user_count, in timerlat_print_summary() 390 data->hist[cpu].user_count, in timerlat_print_summary() 420 data->hist[cpu].user_count, in timerlat_print_summary() 465 sum.user_count += cpu_data->user_count; in timerlat_print_stats_all() 498 sum.user_count); in timerlat_print_stats_all() 519 sum.user_count, in timerlat_print_stats_all() 542 sum.user_count, in timerlat_print_stats_all() [all …]
|
H A D | timerlat_top.c | 60 unsigned long long user_count; member 151 sum->user_count += cpu_data->user_count; in timerlat_top_update_sum() 181 cpu_data->user_count++; in timerlat_top_update() 312 if (!cpu_data->user_count) { in timerlat_top_print() 318 (cpu_data->sum_user / cpu_data->user_count) / divisor); in timerlat_top_print() 383 if (!summary->user_count) { in timerlat_top_print_sum() 389 (summary->sum_user / summary->user_count) / divisor); in timerlat_top_print_sum()
|
/linux/drivers/pwm/ |
H A D | pwm-sifive.c | 51 int user_count; member 65 ddata->user_count++; in pwm_sifive_request() 76 ddata->user_count--; in pwm_sifive_free() 171 if (ddata->user_count != 1 && ddata->approx_period) { in pwm_sifive_apply()
|
H A D | pwm-imx-tpm.c | 64 u32 user_count; member 193 if (tpm->user_count > 1) in pwm_imx_tpm_apply_hw() 317 tpm->user_count++; in pwm_imx_tpm_request() 328 tpm->user_count--; in pwm_imx_tpm_free()
|
H A D | pwm-rz-mtu3.c | 77 u32 user_count[RZ_MTU3_MAX_HW_CHANNELS]; member 181 if (!rz_mtu3_pwm->user_count[ch]) { in rz_mtu3_pwm_request() 189 rz_mtu3_pwm->user_count[ch]++; in rz_mtu3_pwm_request() 205 rz_mtu3_pwm->user_count[ch]--; in rz_mtu3_pwm_free() 206 if (!rz_mtu3_pwm->user_count[ch]) in rz_mtu3_pwm_free()
|
/linux/drivers/hwtracing/intel_th/ |
H A D | msu.c | 160 atomic_t user_count; member 738 atomic_inc(&win->msc->user_count); in msc_win_set_lockout() 740 atomic_dec(&win->msc->user_count); in msc_win_set_lockout() 882 if (!atomic_inc_unless_negative(&msc->user_count)) in intel_th_msc_activate() 894 atomic_dec(&msc->user_count); in intel_th_msc_activate() 906 atomic_dec(&msc->user_count); in intel_th_msc_deactivate() 1325 if (atomic_read(&msc->user_count) != -1) in msc_buffer_alloc() 1345 if (WARN_ON_ONCE(atomic_cmpxchg(&msc->user_count, -1, 0) != -1)) in msc_buffer_alloc() 1367 count = atomic_cmpxchg(&msc->user_count, 0, -1); in msc_buffer_unlocked_free_unless_used() 1548 if (!atomic_inc_unless_negative(&msc->user_count)) in intel_th_msc_read() [all …]
|
/linux/include/linux/ |
H A D | objagg.h | 33 unsigned int user_count; member
|
/linux/tools/include/uapi/linux/ |
H A D | fscrypt.h | 157 __u32 user_count; member
|
/linux/include/uapi/linux/ |
H A D | fscrypt.h | 157 __u32 user_count; member
|
/linux/drivers/gpu/drm/rockchip/ |
H A D | rockchip_drm_gem.c | 220 unsigned long user_count = vma_pages(vma); in rockchip_drm_gem_object_mmap_iommu() local 222 if (user_count == 0) in rockchip_drm_gem_object_mmap_iommu()
|
/linux/drivers/net/ethernet/mellanox/mlx5/core/esw/ |
H A D | legacy.c | 69 atomic64_set(&esw->user_count, 0); in esw_destroy_legacy_fdb_table() 180 atomic64_set(&esw->user_count, 0); in esw_create_legacy_table()
|
/linux/drivers/infiniband/hw/qib/ |
H A D | qib_file_ops.c | 2348 static atomic_t user_count = ATOMIC_INIT(0); variable 2352 if (atomic_dec_return(&user_count) == 0) in qib_user_remove() 2363 if (atomic_inc_return(&user_count) == 1) { in qib_user_add()
|
/linux/drivers/net/ethernet/mellanox/mlx5/core/ |
H A D | eswitch.c | 2289 atomic64_inc(&esw->user_count); in mlx5_esw_get() 2301 atomic64_dec_if_positive(&esw->user_count); in mlx5_esw_put() 2321 atomic64_read(&esw->user_count) > 0) { in mlx5_esw_try_lock()
|
H A D | eswitch.h | 358 atomic64_t user_count; member
|
H A D | eswitch_offloads.c | 1975 atomic64_set(&esw->user_count, 0); in esw_destroy_offloads_fdb_tables() 3350 atomic64_set(&esw->user_count, 0); in esw_offloads_steering_init()
|
/linux/Documentation/filesystems/ |
H A D | fscrypt.rst | 1123 __u32 user_count; 1151 - ``user_count`` specifies the number of users who have added the key.
|