Home
last modified time | relevance | path

Searched refs:map_flags (Results 1 – 25 of 135) sorted by relevance

123456

/linux/tools/testing/selftests/bpf/
H A Dtest_lru_map.c28 static int create_map(int map_type, int map_flags, unsigned int size) in create_map() argument
30 LIBBPF_OPTS(bpf_map_create_opts, opts, .map_flags = map_flags); in create_map()
149 static void test_lru_sanity0(int map_type, int map_flags) in test_lru_sanity0() argument
156 map_flags); in test_lru_sanity0()
160 if (map_flags & BPF_F_NO_COMMON_LRU) in test_lru_sanity0()
161 lru_map_fd = create_map(map_type, map_flags, 2 * nr_cpus); in test_lru_sanity0()
163 lru_map_fd = create_map(map_type, map_flags, 2); in test_lru_sanity0()
240 static void test_lru_sanity1(int map_type, int map_flags, unsigned int tgt_free) in test_lru_sanity1() argument
248 if (map_flags & BPF_F_NO_COMMON_LRU) in test_lru_sanity1()
253 map_flags); in test_lru_sanity1()
[all …]
H A Dtest_maps.c273 "err: %s, flags: 0x%x\n", strerror(errno), map_opts.map_flags); in helper_fill_hashmap()
329 old_flags = map_opts.map_flags; in test_hashmap_zero_seed()
330 map_opts.map_flags |= BPF_F_ZERO_SEED; in test_hashmap_zero_seed()
352 map_opts.map_flags = old_flags; in test_hashmap_zero_seed()
551 if (map_opts.map_flags & BPF_F_NO_PREALLOC) { in test_queuemap()
607 if (map_opts.map_flags & BPF_F_NO_PREALLOC) { in test_stackmap()
1535 old_flags = map_opts.map_flags; in test_map_rdonly()
1536 map_opts.map_flags |= BPF_F_RDONLY; in test_map_rdonly()
1539 map_opts.map_flags = old_flags; in test_map_rdonly()
1564 old_flags = map_opts.map_flags; in test_map_wronly_hash()
[all …]
/linux/samples/bpf/
H A Dtest_lru_dist.c202 static int create_map(int map_type, int map_flags, unsigned int size) in create_map() argument
205 .map_flags = map_flags, in create_map()
307 static void test_parallel_lru_dist(int map_type, int map_flags, in test_parallel_lru_dist() argument
314 map_flags); in test_parallel_lru_dist()
316 if (map_flags & BPF_F_NO_COMMON_LRU) in test_parallel_lru_dist()
317 lru_map_fd = create_map(map_type, map_flags, in test_parallel_lru_dist()
320 lru_map_fd = create_map(map_type, map_flags, in test_parallel_lru_dist()
332 static void test_lru_loss0(int map_type, int map_flags) in test_lru_loss0() argument
341 map_flags); in test_lru_loss0()
345 if (map_flags & BPF_F_NO_COMMON_LRU) in test_lru_loss0()
[all …]
H A Dmap_perf_test.bpf.c36 __uint(map_flags, BPF_F_NO_COMMON_LRU);
44 __uint(map_flags, BPF_F_NUMA_NODE);
70 __uint(map_flags, BPF_F_NO_PREALLOC);
78 __uint(map_flags, BPF_F_NO_PREALLOC);
86 __uint(map_flags, BPF_F_NO_PREALLOC);
H A Dtcp_dumpstats_kern.c17 __u32 map_flags; member
22 .map_flags = BPF_F_NO_PREALLOC,
/linux/kernel/bpf/
H A Darraymap.c60 attr->map_flags & ~ARRAY_CREATE_FLAG_MASK || in array_map_alloc_check()
61 !bpf_map_flags_access_ok(attr->map_flags) || in array_map_alloc_check()
66 attr->map_flags & (BPF_F_MMAPABLE | BPF_F_INNER_MAP)) in array_map_alloc_check()
70 attr->map_flags & BPF_F_PRESERVE_ELEMS) in array_map_alloc_check()
122 if (attr->map_flags & BPF_F_MMAPABLE) { in array_map_alloc()
131 if (attr->map_flags & BPF_F_MMAPABLE) { in array_map_alloc()
217 if (map->map_flags & BPF_F_INNER_MAP) in array_map_gen_lookup()
261 if (map->map_flags & BPF_F_INNER_MAP) in percpu_array_map_gen_lookup()
346 u64 map_flags) in array_map_update_elem() argument
352 if (unlikely((map_flags & ~BPF_F_LOCK) > BPF_EXIST)) in array_map_update_elem()
[all …]
H A Dreuseport_array.c192 u32 map_flags) in reuseport_array_update_check() argument
194 if (osk && map_flags == BPF_NOEXIST) in reuseport_array_update_check()
197 if (!osk && map_flags == BPF_EXIST) in reuseport_array_update_check()
233 void *value, u64 map_flags) in bpf_fd_reuseport_array_update_elem() argument
243 if (map_flags > BPF_EXIST) in bpf_fd_reuseport_array_update_elem()
273 map_flags); in bpf_fd_reuseport_array_update_elem()
289 err = reuseport_array_update_check(array, nsk, osk, reuse, map_flags); in bpf_fd_reuseport_array_update_elem()
H A Dhashtab.c134 return !(htab->map.map_flags & BPF_F_NO_PREALLOC); in htab_is_prealloc()
353 htab->map.map_flags & BPF_F_NO_COMMON_LRU, in prealloc_init()
425 bool percpu_lru = (attr->map_flags & BPF_F_NO_COMMON_LRU); in htab_map_alloc_check()
426 bool prealloc = !(attr->map_flags & BPF_F_NO_PREALLOC); in htab_map_alloc_check()
427 bool zero_seed = (attr->map_flags & BPF_F_ZERO_SEED); in htab_map_alloc_check()
437 if (attr->map_flags & ~HTAB_CREATE_FLAG_MASK || in htab_map_alloc_check()
438 !bpf_map_flags_access_ok(attr->map_flags)) in htab_map_alloc_check()
483 bool percpu_lru = (attr->map_flags & BPF_F_NO_COMMON_LRU); in htab_map_alloc()
484 bool prealloc = !(attr->map_flags & BPF_F_NO_PREALLOC); in htab_map_alloc()
548 if (htab->map.map_flags & BPF_F_ZERO_SEED) in htab_map_alloc()
[all …]
H A Dmap_in_map.c39 inner_map_meta->map_flags = inner_map->map_flags; in bpf_map_meta_alloc()
90 meta0->map_flags == meta1->map_flags && in bpf_map_meta_equal()
H A Ddevmap.c120 attr->map_flags & ~DEV_CREATE_FLAG_MASK) in dev_map_alloc_check()
139 attr->map_flags |= BPF_F_RDONLY_PROG; in dev_map_init_map()
908 void *key, void *value, u64 map_flags) in __dev_map_update_elem() argument
915 if (unlikely(map_flags > BPF_EXIST)) in __dev_map_update_elem()
919 if (unlikely(map_flags == BPF_NOEXIST)) in __dev_map_update_elem()
950 u64 map_flags) in dev_map_update_elem() argument
953 map, key, value, map_flags); in dev_map_update_elem()
957 void *key, void *value, u64 map_flags) in __dev_map_hash_update_elem() argument
969 if (unlikely(map_flags > BPF_EXIST || !val.ifindex)) in __dev_map_hash_update_elem()
975 if (old_dev && (map_flags in __dev_map_hash_update_elem()
1010 dev_map_hash_update_elem(struct bpf_map * map,void * key,void * value,u64 map_flags) dev_map_hash_update_elem() argument
[all...]
H A Dbloom_filter.c103 attr->map_flags & ~BLOOM_CREATE_FLAG_MASK || in bloom_map_alloc()
104 !bpf_map_flags_access_ok(attr->map_flags) || in bloom_map_alloc()
156 if (!(attr->map_flags & BPF_F_ZERO_SEED)) in bloom_map_alloc()
/linux/tools/testing/selftests/bpf/progs/
H A Dlocal_storage.c28 __uint(map_flags, BPF_F_NO_PREALLOC);
35 __uint(map_flags, BPF_F_NO_PREALLOC | BPF_F_CLONE);
42 __uint(map_flags, BPF_F_NO_PREALLOC | BPF_F_CLONE);
49 __uint(map_flags, BPF_F_NO_PREALLOC);
56 __uint(map_flags, BPF_F_NO_PREALLOC);
H A Dsockopt_inherit.c20 __uint(map_flags, BPF_F_NO_PREALLOC | BPF_F_CLONE);
27 __uint(map_flags, BPF_F_NO_PREALLOC | BPF_F_CLONE);
34 __uint(map_flags, BPF_F_NO_PREALLOC);
H A Dtest_btf_map_in_map.c46 __uint(map_flags, BPF_F_INNER_MAP);
55 __uint(map_flags, BPF_F_INNER_MAP);
68 __uint(map_flags, BPF_F_INNER_MAP);
H A Dtest_mmap.c12 __uint(map_flags, BPF_F_MMAPABLE | BPF_F_RDONLY_PROG);
19 __uint(map_flags, BPF_F_MMAPABLE);
H A Dtest_queue_stack_map.h14 __uint(map_flags, 0);
22 __uint(map_flags, 0);
H A Dtask_ls_recursion.c18 __uint(map_flags, BPF_F_NO_PREALLOC);
25 __uint(map_flags, BPF_F_NO_PREALLOC);
H A Dnetns_cookie_prog.c11 __uint(map_flags, BPF_F_NO_PREALLOC);
18 __uint(map_flags, BPF_F_NO_PREALLOC);
H A Dmap_kptr.c45 __uint(map_flags, BPF_F_NO_PREALLOC);
53 __uint(map_flags, BPF_F_NO_PREALLOC);
72 __uint(map_flags, BPF_F_NO_PREALLOC);
79 __uint(map_flags, BPF_F_NO_PREALLOC);
86 __uint(map_flags, BPF_F_NO_PREALLOC);
93 __uint(map_flags, BPF_F_NO_PREALLOC);
H A Dbench_local_storage_create.c20 __uint(map_flags, BPF_F_NO_PREALLOC);
27 __uint(map_flags, BPF_F_NO_PREALLOC);
H A Dtest_map_in_map.c11 __uint(map_flags, 0);
19 __uint(map_flags, 0);
H A Dcgrp_ls_recursion.c12 __uint(map_flags, BPF_F_NO_PREALLOC);
19 __uint(map_flags, BPF_F_NO_PREALLOC);
/linux/tools/testing/selftests/bpf/map_tests/
H A Dmap_percpu_stats.c289 n_iter, n_real, map_type_to_s(info->type), info->map_flags); in check_expected_number_elements()
316 opts.retry_for_nomem = is_percpu(opts.map_type) && (info.map_flags & BPF_F_NO_PREALLOC); in __test()
363 LIBBPF_OPTS(bpf_map_create_opts, map_opts, .map_flags = BPF_F_NO_PREALLOC); in create_hash()
370 LIBBPF_OPTS(bpf_map_create_opts, map_opts, .map_flags = BPF_F_NO_PREALLOC); in create_percpu_hash()
385 static int create_lru_hash(__u32 type, __u32 map_flags) in create_lru_hash() argument
387 LIBBPF_OPTS(bpf_map_create_opts, map_opts, .map_flags = map_flags); in create_lru_hash()
395 .map_flags = BPF_F_NO_PREALLOC, in create_hash_of_maps()
/linux/net/xdp/
H A Dxskmap.c72 attr->map_flags & ~(BPF_F_NUMA_NODE | BPF_F_RDONLY | BPF_F_WRONLY)) in xsk_map_alloc()
162 u64 map_flags) in xsk_map_update_elem() argument
172 if (unlikely(map_flags > BPF_EXIST)) in xsk_map_update_elem()
200 } else if (old_xs && map_flags == BPF_NOEXIST) { in xsk_map_update_elem()
203 } else if (!old_xs && map_flags == BPF_EXIST) { in xsk_map_update_elem()
/linux/tools/testing/selftests/bpf/benchs/
H A Dbench_bpf_hashmap_lookup.c22 __u32 map_flags; member
28 .map_flags = 0,
75 args.map_flags = ret; in parse_arg()
169 bpf_map__set_map_flags(ctx.skel->maps.hash_map_bench, args.map_flags); in setup()

123456