/linux/drivers/xen/ |
H A D | pcpu.c | 60 struct pcpu { struct 103 struct pcpu *cpu = container_of(dev, struct pcpu, dev); in online_show() 112 struct pcpu *pcpu = container_of(dev, struct pcpu, dev); in online_store() local 124 ret = xen_pcpu_down(pcpu->cpu_id); in online_store() 127 ret = xen_pcpu_up(pcpu->cpu_id); in online_store() 172 struct pcpu *pcpu) in pcpu_online_status() argument 175 !xen_pcpu_online(pcpu->flags)) { in pcpu_online_status() 177 pcpu->flags |= XEN_PCPU_FLAGS_ONLINE; in pcpu_online_status() 178 kobject_uevent(&pcpu->dev.kobj, KOBJ_ONLINE); in pcpu_online_status() 180 xen_pcpu_online(pcpu->flags)) { in pcpu_online_status() [all …]
|
/linux/arch/arm/common/ |
H A D | mcpm_platsmp.c | 20 unsigned int *pcpu, unsigned int *pcluster) in cpu_to_pcpu() argument 25 *pcpu = MPIDR_AFFINITY_LEVEL(mpidr, 0); in cpu_to_pcpu() 31 unsigned int pcpu, pcluster, ret; in mcpm_boot_secondary() local 34 cpu_to_pcpu(cpu, &pcpu, &pcluster); in mcpm_boot_secondary() 37 __func__, cpu, pcpu, pcluster); in mcpm_boot_secondary() 39 mcpm_set_entry_vector(pcpu, pcluster, NULL); in mcpm_boot_secondary() 40 ret = mcpm_cpu_power_up(pcpu, pcluster); in mcpm_boot_secondary() 43 mcpm_set_entry_vector(pcpu, pcluster, secondary_startup); in mcpm_boot_secondary() 58 unsigned int pcpu, pcluster; in mcpm_cpu_kill() local 60 cpu_to_pcpu(cpu, &pcpu, &pcluster); in mcpm_cpu_kill() [all …]
|
/linux/arch/s390/kernel/ |
H A D | smp.c | 78 DEFINE_PER_CPU(struct pcpu, pcpu_devices); 85 static struct pcpu *ipl_pcpu; 135 static int pcpu_sigp_retry(struct pcpu *pcpu, u8 order, u32 parm) in pcpu_sigp_retry() argument 140 cc = __pcpu_sigp(pcpu->address, order, parm, NULL); in pcpu_sigp_retry() 149 static inline int pcpu_stopped(struct pcpu *pcpu) in pcpu_stopped() argument 153 if (__pcpu_sigp(pcpu->address, SIGP_SENSE, in pcpu_stopped() 159 static inline int pcpu_running(struct pcpu *pcpu) in pcpu_running() argument 161 if (__pcpu_sigp(pcpu->address, SIGP_SENSE_RUNNING, in pcpu_running() 171 static struct pcpu *pcpu_find_address(const struct cpumask *mask, u16 address) in pcpu_find_address() 181 static void pcpu_ec_call(struct pcpu *pcpu, int ec_bit) in pcpu_ec_call() argument [all …]
|
/linux/arch/x86/events/amd/ |
H A D | ibs.c | 95 struct cpu_perf_ibs __percpu *pcpu; member 429 struct cpu_perf_ibs *pcpu = this_cpu_ptr(perf_ibs->pcpu); in perf_ibs_start() local 449 set_bit(IBS_STARTED, pcpu->state); in perf_ibs_start() 450 clear_bit(IBS_STOPPING, pcpu->state); in perf_ibs_start() 460 struct cpu_perf_ibs *pcpu = this_cpu_ptr(perf_ibs->pcpu); in perf_ibs_stop() local 464 if (test_and_set_bit(IBS_STOPPING, pcpu->state)) in perf_ibs_stop() 467 stopping = test_bit(IBS_STARTED, pcpu->state); in perf_ibs_stop() 481 set_bit(IBS_STOPPED, pcpu->state); in perf_ibs_stop() 492 clear_bit(IBS_STARTED, pcpu->state); in perf_ibs_stop() 513 struct cpu_perf_ibs *pcpu = this_cpu_ptr(perf_ibs->pcpu); in perf_ibs_add() local [all …]
|
/linux/kernel/ |
H A D | padata.c | 54 target_cpu = cpumask_first(pd->cpumask.pcpu); in padata_index_to_cpu() 56 target_cpu = cpumask_next(target_cpu, pd->cpumask.pcpu); in padata_index_to_cpu() 67 int cpu_index = seq_nr % cpumask_weight(pd->cpumask.pcpu); in padata_cpu_hash() 277 pd->cpu = cpumask_next_wrap(cpu, pd->cpumask.pcpu, -1, false); in padata_find_next() 435 cpumask_copy(attrs->cpumask, pinst->cpumask.pcpu); in padata_setup_cpumasks() 573 for_each_cpu(cpu, pd->cpumask.pcpu) { in padata_init_reorder_list() 599 if (!alloc_cpumask_var(&pd->cpumask.pcpu, GFP_KERNEL)) in padata_alloc_pd() 604 cpumask_and(pd->cpumask.pcpu, pinst->cpumask.pcpu, cpu_online_mask); in padata_alloc_pd() 612 pd->cpu = cpumask_first(pd->cpumask.pcpu); in padata_alloc_pd() 618 free_cpumask_var(pd->cpumask.pcpu); in padata_alloc_pd() [all …]
|
/linux/arch/alpha/kernel/ |
H A D | err_common.c | 242 cdl_process_console_data_log(int cpu, struct percpu_struct *pcpu) in cdl_process_console_data_log() argument 245 (IDENT_ADDR | pcpu->console_data_log_pa); in cdl_process_console_data_log() 256 pcpu->console_data_log_pa = 0; in cdl_process_console_data_log() 266 struct percpu_struct *pcpu; in cdl_check_console_data_log() local 270 pcpu = (struct percpu_struct *) in cdl_check_console_data_log() 273 if (pcpu->console_data_log_pa) in cdl_check_console_data_log() 274 cdl_process_console_data_log(cpu, pcpu); in cdl_check_console_data_log()
|
/linux/lib/ |
H A D | test_vmalloc.c | 292 void __percpu **pcpu; in pcpu_alloc_test() local 296 pcpu = vmalloc(sizeof(void __percpu *) * 35000); in pcpu_alloc_test() 297 if (!pcpu) in pcpu_alloc_test() 308 pcpu[i] = __alloc_percpu(size, align); in pcpu_alloc_test() 309 if (!pcpu[i]) in pcpu_alloc_test() 314 free_percpu(pcpu[i]); in pcpu_alloc_test() 316 vfree(pcpu); in pcpu_alloc_test()
|
/linux/arch/s390/include/asm/ |
H A D | processor.h | 44 struct pcpu { struct 54 DECLARE_PER_CPU(struct pcpu, pcpu_devices); argument 58 static __always_inline struct pcpu *this_pcpu(void) in this_pcpu() 60 return (struct pcpu *)(get_lowcore()->pcpu); in this_pcpu()
|
H A D | smp.h | 47 u16 pcpu = stap(); in smp_stop_cpu() local 50 __pcpu_sigp(pcpu, SIGP_STOP, 0, NULL); in smp_stop_cpu()
|
/linux/tools/testing/selftests/kvm/ |
H A D | arch_timer.c | 83 uint32_t pcpu; in test_get_pcpu() local 92 pcpu = rand() % nproc_conf; in test_get_pcpu() 93 } while (!CPU_ISSET(pcpu, &online_cpuset)); in test_get_pcpu() 95 return pcpu; in test_get_pcpu()
|
/linux/arch/arm64/kernel/ |
H A D | acpi.c | 445 int *pcpu) in acpi_map_cpu() argument 448 if (*pcpu < 0) { in acpi_map_cpu() 450 return *pcpu; in acpi_map_cpu()
|
/linux/arch/powerpc/kvm/ |
H A D | book3s_hv_builtin.c | 241 int cpu = vc->pcpu; in kvmhv_interrupt_vcore() 620 void kvmppc_check_need_tlb_flush(struct kvm *kvm, int pcpu) in kvmppc_check_need_tlb_flush() argument 622 if (cpumask_test_cpu(pcpu, &kvm->arch.need_tlb_flush)) { in kvmppc_check_need_tlb_flush() 626 cpumask_clear_cpu(pcpu, &kvm->arch.need_tlb_flush); in kvmppc_check_need_tlb_flush()
|
H A D | book3s_hv_rm_xics.c | 718 int pcpu = raw_smp_processor_id(); in ics_rm_eoi() local 720 pcpu = cpu_first_thread_sibling(pcpu); in ics_rm_eoi() 722 if (state->intr_cpu != pcpu) { in ics_rm_eoi() 724 xics_opal_set_server(state->host_irq, pcpu); in ics_rm_eoi()
|
H A D | book3s_hv_p9_entry.c | 453 static void check_need_tlb_flush(struct kvm *kvm, int pcpu, in check_need_tlb_flush() argument 465 if (likely(!cpumask_test_cpu(pcpu, need_tlb_flush))) in check_need_tlb_flush() 477 for (i = cpu_first_tlb_thread_sibling(pcpu); in check_need_tlb_flush() 478 i <= cpu_last_tlb_thread_sibling(pcpu); in check_need_tlb_flush() 491 cpumask_clear_cpu(pcpu, need_tlb_flush); in check_need_tlb_flush() 703 check_need_tlb_flush(kvm, vc->pcpu, nested); in kvmhv_vcpu_entry_p9()
|
/linux/drivers/crypto/caam/ |
H A D | qi.c | 443 int *pcpu = &get_cpu_var(last_cpu); in caam_drv_ctx_init() local 445 *pcpu = cpumask_next(*pcpu, cpus); in caam_drv_ctx_init() 446 if (*pcpu >= nr_cpu_ids) in caam_drv_ctx_init() 447 *pcpu = cpumask_first(cpus); in caam_drv_ctx_init() 448 *cpu = *pcpu; in caam_drv_ctx_init()
|
/linux/drivers/net/ethernet/stmicro/stmmac/ |
H A D | stmmac_ethtool.c | 553 struct stmmac_pcpu_stats *pcpu; in stmmac_get_rx_normal_irq_n() local 557 pcpu = per_cpu_ptr(priv->xstats.pcpu_stats, cpu); in stmmac_get_rx_normal_irq_n() 559 start = u64_stats_fetch_begin(&pcpu->syncp); in stmmac_get_rx_normal_irq_n() 560 irq_n = u64_stats_read(&pcpu->rx_normal_irq_n[q]); in stmmac_get_rx_normal_irq_n() 561 } while (u64_stats_fetch_retry(&pcpu->syncp, start)); in stmmac_get_rx_normal_irq_n() 574 struct stmmac_pcpu_stats *pcpu; in stmmac_get_tx_normal_irq_n() local 578 pcpu = per_cpu_ptr(priv->xstats.pcpu_stats, cpu); in stmmac_get_tx_normal_irq_n() 580 start = u64_stats_fetch_begin(&pcpu->syncp); in stmmac_get_tx_normal_irq_n() 581 irq_n = u64_stats_read(&pcpu->tx_normal_irq_n[q]); in stmmac_get_tx_normal_irq_n() 582 } while (u64_stats_fetch_retry(&pcpu->syncp, start)); in stmmac_get_tx_normal_irq_n()
|
/linux/kernel/sched/ |
H A D | psi.c | 172 .pcpu = &system_group_pcpu, 185 seqcount_init(&per_cpu_ptr(group->pcpu, cpu)->seq); in group_init() 253 struct psi_group_cpu *groupc = per_cpu_ptr(group->pcpu, cpu); in get_recent_times() 781 groupc = per_cpu_ptr(group->pcpu, cpu); in psi_group_change() 931 if (per_cpu_ptr(group->pcpu, cpu)->state_mask & in psi_task_switch() 1024 groupc = per_cpu_ptr(group->pcpu, cpu); in psi_account_irqtime() 1112 cgroup->psi->pcpu = alloc_percpu(struct psi_group_cpu); in psi_cgroup_alloc() 1113 if (!cgroup->psi->pcpu) { in psi_cgroup_alloc() 1128 free_percpu(cgroup->psi->pcpu); in psi_cgroup_free()
|
/linux/include/linux/ |
H A D | notifier.h | 107 #define SRCU_NOTIFIER_INIT(name, pcpu) \ argument 112 .srcu = __SRCU_STRUCT_INIT(name.srcu, name.srcuu, pcpu), \
|
H A D | cpufreq.h | 1123 static inline int of_perf_domain_get_sharing_cpumask(int pcpu, const char *list_name, in of_perf_domain_get_sharing_cpumask() argument 1130 ret = parse_perf_domain(pcpu, list_name, cell_name, pargs); in of_perf_domain_get_sharing_cpumask() 1134 cpumask_set_cpu(pcpu, cpumask); in of_perf_domain_get_sharing_cpumask() 1137 if (cpu == pcpu) in of_perf_domain_get_sharing_cpumask() 1179 static inline int of_perf_domain_get_sharing_cpumask(int pcpu, const char *list_name, in of_perf_domain_get_sharing_cpumask() argument
|
/linux/drivers/base/ |
H A D | devres.c | 1200 void __percpu *pcpu; in __devm_alloc_percpu() local 1202 pcpu = __alloc_percpu(size, align); in __devm_alloc_percpu() 1203 if (!pcpu) in __devm_alloc_percpu() 1208 free_percpu(pcpu); in __devm_alloc_percpu() 1212 *(void __percpu **)p = pcpu; in __devm_alloc_percpu() 1216 return pcpu; in __devm_alloc_percpu()
|
/linux/arch/powerpc/platforms/pseries/ |
H A D | hotplug-cpu.c | 121 unsigned int pcpu = get_hard_smp_processor_id(cpu); in pseries_cpu_die() local 125 cpu_status = smp_query_cpu_stopped(pcpu); in pseries_cpu_die() 132 cpu, pcpu); in pseries_cpu_die() 141 cpu, pcpu); in pseries_cpu_die()
|
/linux/net/core/ |
H A D | page_pool.c | 99 const struct page_pool_recycle_stats *pcpu = in page_pool_get_stats() local 102 stats->recycle_stats.cached += pcpu->cached; in page_pool_get_stats() 103 stats->recycle_stats.cache_full += pcpu->cache_full; in page_pool_get_stats() 104 stats->recycle_stats.ring += pcpu->ring; in page_pool_get_stats() 105 stats->recycle_stats.ring_full += pcpu->ring_full; in page_pool_get_stats() 106 stats->recycle_stats.released_refcnt += pcpu->released_refcnt; in page_pool_get_stats()
|
/linux/tools/perf/Documentation/ |
H A D | perf-lock.txt | 190 rtmutex, rwlock-rt, rwlock-rt:R, rwlock-rt:W, pcpu-sem, pcpu-sem:R, pcpu-sem:W,
|
/linux/include/xen/interface/ |
H A D | sched.h | 141 int32_t pcpu; member
|
/linux/arch/loongarch/kernel/ |
H A D | acpi.c | 346 int acpi_map_cpu(acpi_handle handle, phys_cpuid_t physid, u32 acpi_id, int *pcpu) in acpi_map_cpu() argument 360 *pcpu = cpu; in acpi_map_cpu()
|