Home
last modified time | relevance | path

Searched refs:cpu_map (Results 1 – 25 of 40) sorted by relevance

12

/linux/kernel/sched/
H A Dtopology.c377 static void perf_domain_debug(const struct cpumask *cpu_map, in perf_domain_debug() argument
383 printk(KERN_DEBUG "root_domain %*pbl:", cpumask_pr_args(cpu_map)); in perf_domain_debug()
425 static bool build_perf_domains(const struct cpumask *cpu_map) in build_perf_domains() argument
429 int cpu = cpumask_first(cpu_map); in build_perf_domains()
435 if (!sched_is_eas_possible(cpu_map)) in build_perf_domains()
438 for_each_cpu(i, cpu_map) { in build_perf_domains()
451 perf_domain_debug(cpu_map, pd); in build_perf_domains()
1353 const struct cpumask *cpu_map) in asym_cpu_capacity_classify()
1367 else if (cpumask_intersects(cpu_map, cpu_capacity_span(entry))) in asym_cpu_capacity_classify()
1491 static void __sdt_free(const struct cpumask *cpu_map); in __free_domain_allocs()
1345 asym_cpu_capacity_classify(const struct cpumask * sd_span,const struct cpumask * cpu_map) asym_cpu_capacity_classify() argument
1487 __free_domain_allocs(struct s_data * d,enum s_alloc what,const struct cpumask * cpu_map) __free_domain_allocs() argument
1506 __visit_domain_allocation_hell(struct s_data * d,const struct cpumask * cpu_map) __visit_domain_allocation_hell() argument
1581 sd_init(struct sched_domain_topology_level * tl,const struct cpumask * cpu_map,struct sched_domain * child,int cpu) sd_init() argument
2214 __sdt_alloc(const struct cpumask * cpu_map) __sdt_alloc() argument
2283 __sdt_free(const struct cpumask * cpu_map) __sdt_free() argument
2320 build_sched_domain(struct sched_domain_topology_level * tl,const struct cpumask * cpu_map,struct sched_domain_attr * attr,struct sched_domain * child,int cpu) build_sched_domain() argument
2354 topology_span_sane(struct sched_domain_topology_level * tl,const struct cpumask * cpu_map,int cpu) topology_span_sane() argument
2388 build_sched_domains(const struct cpumask * cpu_map,struct sched_domain_attr * attr) build_sched_domains() argument
2595 sched_init_domains(const struct cpumask * cpu_map) sched_init_domains() argument
2619 detach_destroy_domains(const struct cpumask * cpu_map) detach_destroy_domains() argument
[all...]
/linux/drivers/gpu/drm/xe/tests/
H A Dxe_bo.c26 u64 *cpu_map; in ccs_test_migrate() local
83 cpu_map = kmap_local_page(page); in ccs_test_migrate()
86 if (cpu_map[0] != get_val) { in ccs_test_migrate()
90 (unsigned long long)cpu_map[0]); in ccs_test_migrate()
97 if (cpu_map[offset] != get_val) { in ccs_test_migrate()
101 (unsigned long long)cpu_map[offset]); in ccs_test_migrate()
105 cpu_map[0] = assign_val; in ccs_test_migrate()
106 cpu_map[offset] = assign_val; in ccs_test_migrate()
107 kunmap_local(cpu_map); in ccs_test_migrate()
/linux/tools/power/x86/intel-speed-select/
H A Disst-config.c72 struct _cpu_map *cpu_map; variable
332 if (cpu_map && cpu_map[cpu].initialized) in get_physical_package_id()
333 return cpu_map[cpu].pkg_id; in get_physical_package_id()
356 if (cpu_map && cpu_map[cpu].initialized) in get_physical_core_id()
357 return cpu_map[cpu].core_id; in get_physical_core_id()
380 if (cpu_map && cpu_map[cpu].initialized) in get_physical_die_id()
381 return cpu_map[cpu].die_id; in get_physical_die_id()
409 if (cpu_map && cpu_map[cpu].initialized) in get_physical_punit_id()
410 return cpu_map[cpu].punit_id; in get_physical_punit_id()
691 cpu_map[i].punit_cpu_core > max_id) in get_max_punit_core_id()
[all …]
/linux/arch/mips/kernel/
H A Dcacheinfo.c58 static void fill_cpumask_siblings(int cpu, cpumask_t *cpu_map) in fill_cpumask_siblings() argument
64 cpumask_set_cpu(cpu1, cpu_map); in fill_cpumask_siblings()
67 static void fill_cpumask_cluster(int cpu, cpumask_t *cpu_map) in fill_cpumask_cluster() argument
74 cpumask_set_cpu(cpu1, cpu_map); in fill_cpumask_cluster()
/linux/Documentation/bpf/
H A Dmap_cpumap.rst103 ``cpu_map`` and how to redirect packets to a remote CPU using a round robin scheme.
112 } cpu_map SEC(".maps");
153 return bpf_redirect_map(&cpu_map, cpu_dest, 0);
164 int set_max_cpu_entries(struct bpf_map *cpu_map)
166 if (bpf_map__set_max_entries(cpu_map, libbpf_num_possible_cpus()) < 0) {
167 fprintf(stderr, "Failed to set max entries for cpu_map map: %s",
/linux/tools/testing/selftests/bpf/progs/
H A Dfreplace_progmap.c10 } cpu_map SEC(".maps");
21 return bpf_redirect_map(&cpu_map, 0, XDP_PASS); in xdp_cpumap_prog()
H A Dtest_xdp_with_cpumap_helpers.c13 } cpu_map SEC(".maps");
18 return bpf_redirect_map(&cpu_map, 1, 0); in xdp_redir_prog()
H A Dtest_xdp_with_cpumap_frags_helpers.c13 } cpu_map SEC(".maps");
H A Dxdp_features.c52 } cpu_map SEC(".maps");
222 return bpf_redirect_map(&cpu_map, 0, 0); in xdp_do_redirect()
/linux/tools/perf/util/
H A Dmmap.c249 const struct perf_cpu_map *cpu_map = NULL; in build_node_mask() local
251 cpu_map = cpu_map__online(); in build_node_mask()
252 if (!cpu_map) in build_node_mask()
255 nr_cpus = perf_cpu_map__nr(cpu_map); in build_node_mask()
257 cpu = perf_cpu_map__cpu(cpu_map, idx); /* map c index to online cpu index */ in build_node_mask()
H A Dmem-events.c246 struct perf_cpu_map *cpu_map = NULL; in perf_mem_events__record_args() local
272 cpu_map = perf_cpu_map__merge(cpu_map, pmu->cpus); in perf_mem_events__record_args()
276 if (cpu_map) { in perf_mem_events__record_args()
277 if (!perf_cpu_map__equal(cpu_map, cpu_map__online())) { in perf_mem_events__record_args()
280 cpu_map__snprint(cpu_map, buf, sizeof(buf)); in perf_mem_events__record_args()
283 perf_cpu_map__put(cpu_map); in perf_mem_events__record_args()
H A Dtool.h75 cpu_map, member
/linux/drivers/platform/x86/intel/speed_select_if/
H A Disst_if_common.c483 struct isst_if_cpu_map *cpu_map; in isst_if_proc_phyid_req() local
485 cpu_map = (struct isst_if_cpu_map *)cmd_ptr; in isst_if_proc_phyid_req()
486 if (cpu_map->logical_cpu >= nr_cpu_ids || in isst_if_proc_phyid_req()
487 cpu_map->logical_cpu >= num_possible_cpus()) in isst_if_proc_phyid_req()
491 cpu_map->physical_cpu = isst_cpu_info[cpu_map->logical_cpu].punit_cpu_id; in isst_if_proc_phyid_req()
615 cmd_cb.offset = offsetof(struct isst_if_cpu_maps, cpu_map); in isst_if_def_ioctl()
/linux/kernel/bpf/
H A Dcpumap.c79 struct bpf_cpu_map_entry __rcu **cpu_map; member
105 cmap->cpu_map = bpf_map_area_alloc(cmap->map.max_entries * in cpu_map_alloc()
108 if (!cmap->cpu_map) { in cpu_map_alloc()
511 old_rcpu = unrcu_pointer(xchg(&cmap->cpu_map[key_cpu], RCU_INITIALIZER(rcpu))); in __cpu_map_entry_replace()
590 rcpu = rcu_dereference_raw(cmap->cpu_map[i]); in cpu_map_free()
597 bpf_map_area_free(cmap->cpu_map); in cpu_map_free()
613 rcpu = rcu_dereference_check(cmap->cpu_map[key], in __cpu_map_lookup_elem()
/linux/drivers/base/
H A Dcacheinfo.c964 cpumask_t *cpu_map) in update_per_cpu_data_slice_size() argument
968 for_each_cpu(icpu, cpu_map) { in update_per_cpu_data_slice_size()
979 cpumask_t *cpu_map; in cacheinfo_cpu_online() local
986 if (cpu_map_shared_cache(true, cpu, &cpu_map)) in cacheinfo_cpu_online()
987 update_per_cpu_data_slice_size(true, cpu, cpu_map); in cacheinfo_cpu_online()
996 cpumask_t *cpu_map; in cacheinfo_cpu_pre_down() local
999 nr_shared = cpu_map_shared_cache(false, cpu, &cpu_map); in cacheinfo_cpu_pre_down()
1005 update_per_cpu_data_slice_size(false, cpu, cpu_map); in cacheinfo_cpu_pre_down()
/linux/mm/
H A Dpercpu.c2442 __alignof__(ai->groups[0].cpu_map[0])); in pcpu_alloc_alloc_info()
2443 ai_size = base_size + nr_units * sizeof(ai->groups[0].cpu_map[0]); in pcpu_alloc_alloc_info()
2451 ai->groups[0].cpu_map = ptr; in pcpu_alloc_alloc_info()
2454 ai->groups[0].cpu_map[unit] = NR_CPUS; in pcpu_alloc_alloc_info()
2520 if (gi->cpu_map[unit] != NR_CPUS) in pcpu_dump_alloc_info()
2522 cpu_width, gi->cpu_map[unit]); in pcpu_dump_alloc_info()
2664 cpu = gi->cpu_map[i]; in pcpu_setup_first_chunk()
2855 unsigned int *cpu_map; in pcpu_build_alloc_info() local
2947 cpu_map = ai->groups[0].cpu_map; in pcpu_build_alloc_info()
2950 ai->groups[group].cpu_map = cpu_map; in pcpu_build_alloc_info()
[all …]
/linux/tools/testing/selftests/bpf/prog_tests/
H A Dxdp_cpumap_attach.c35 map_fd = bpf_map__fd(skel->maps.cpu_map); in test_xdp_with_cpumap_helpers()
87 map_fd = bpf_map__fd(skel->maps.cpu_map); in test_xdp_with_cpumap_frags_helpers()
/linux/tools/perf/tests/
H A Dcpumap.c19 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_mask()
55 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_cpus()
81 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_range_cpus()
H A Dtests.h120 DECLARE_SUITE(cpu_map);
/linux/tools/perf/arch/arm/util/
H A Dcs-etm.c782 struct perf_cpu_map *cpu_map; in cs_etm_get_metadata()
796 /* If the cpu_map has the "any" CPU all online CPUs are involved */ in cs_etm_info_fill()
798 cpu_map = online_cpus; in cs_etm_info_fill()
806 cpu_map = event_cpus; in cs_etm_info_fill()
809 nr_cpu = perf_cpu_map__nr(cpu_map); in cs_etm_info_fill()
822 perf_cpu_map__for_each_cpu(cpu, i, cpu_map) { in cs_etm_info_fill()
803 struct perf_cpu_map *cpu_map; cs_etm_info_fill() local
/linux/tools/perf/python/
H A Dtwatch.py12 cpus = perf.cpu_map()
H A Dtracepoint.py19 cpus = perf.cpu_map()
/linux/drivers/gpu/drm/imagination/
H A Dpvr_queue.c1216 void *cpu_map; in pvr_queue_create() local
1275 cpu_map = pvr_fw_object_create_and_map(pvr_dev, sizeof(*queue->timeline_ufo.value), in pvr_queue_create()
1278 if (IS_ERR(cpu_map)) { in pvr_queue_create()
1279 err = PTR_ERR(cpu_map); in pvr_queue_create()
1283 queue->timeline_ufo.value = cpu_map; in pvr_queue_create()
/linux/tools/perf/
H A Dbuiltin-stat.c1171 struct perf_cpu_map *cpu_map = perf_cpu_map__new(map); in cpu__get_cache_id_from_map() local
1178 id = perf_cpu_map__min(cpu_map).cpu; in cpu__get_cache_id_from_map()
1183 perf_cpu_map__put(cpu_map); in cpu__get_cache_id_from_map()
1564 struct perf_cpu_map *cpu_map; in perf_env__get_cache_id_for_cpu() local
1575 cpu_map = perf_cpu_map__new(caches[i].map); in perf_env__get_cache_id_for_cpu()
1576 map_contains_cpu = perf_cpu_map__idx(cpu_map, cpu); in perf_env__get_cache_id_for_cpu()
1577 perf_cpu_map__put(cpu_map); in perf_env__get_cache_id_for_cpu()
2260 cpus = cpu_map__new_data(&event->cpu_map.data); in process_cpu_map_event()
2278 .cpu_map = process_cpu_map_event,
/linux/include/linux/
H A Dpercpu.h81 unsigned int *cpu_map; /* unit->cpu map, empty member

12