Home
last modified time | relevance | path

Searched refs:new_cpu (Results 1 – 16 of 16) sorted by relevance

/linux/drivers/irqchip/
H A Dirq-bcm6345-l1.c194 unsigned int new_cpu; in bcm6345_l1_set_affinity() local
198 new_cpu = cpumask_first_and_and(&intc->cpumask, dest, cpu_online_mask); in bcm6345_l1_set_affinity()
199 if (new_cpu >= nr_cpu_ids) in bcm6345_l1_set_affinity()
202 dest = cpumask_of(new_cpu); in bcm6345_l1_set_affinity()
205 if (old_cpu != new_cpu) { in bcm6345_l1_set_affinity()
217 irq_data_update_effective_affinity(d, cpumask_of(new_cpu)); in bcm6345_l1_set_affinity()
/linux/kernel/sched/
H A Dcpudeadline.c175 int old_idx, new_cpu; in cpudl_clear() local
190 new_cpu = cp->elements[cp->size - 1].cpu; in cpudl_clear()
192 cp->elements[old_idx].cpu = new_cpu; in cpudl_clear()
194 cp->elements[new_cpu].idx = old_idx; in cpudl_clear()
H A Dfair.c3603 static void update_scan_period(struct task_struct *p, int new_cpu) in account_entity_dequeue()
3606 int dst_nid = cpu_to_node(new_cpu);
3650 static inline void update_scan_period(struct task_struct *p, int new_cpu)
7493 int new_cpu = cpu; in select_idle_capacity()
7521 new_cpu = sched_balance_find_dst_group_cpu(group, p, cpu);
7522 if (new_cpu == cpu) { in asym_fits_cpu()
7528 /* Now try balancing at a lower domain level of 'new_cpu': */ in asym_fits_cpu()
7529 cpu = new_cpu; in asym_fits_cpu()
7540 return new_cpu; in select_idle_sibling()
8575 int new_cpu in pick_next_task_fair()
3522 update_scan_period(struct task_struct * p,int new_cpu) update_scan_period() argument
3569 update_scan_period(struct task_struct * p,int new_cpu) update_scan_period() argument
7209 int new_cpu = cpu; sched_balance_find_dst_cpu() local
8192 int new_cpu = prev_cpu; select_task_rq_fair() local
8261 migrate_task_rq_fair(struct task_struct * p,int new_cpu) migrate_task_rq_fair() argument
[all...]
H A Dcore.c694 * [S] ->cpu = new_cpu [L] task_rq()
2402 struct task_struct *p, int new_cpu) in migration_cpu_stop()
2407 set_task_cpu(p, new_cpu); in migration_cpu_stop()
2410 rq = cpu_rq(new_cpu); in migration_cpu_stop()
2413 WARN_ON_ONCE(task_cpu(p) != new_cpu); in migration_cpu_stop()
3215 void set_task_cpu(struct task_struct *p, unsigned int new_cpu) in __migrate_swap_task()
3252 WARN_ON_ONCE(!cpu_online(new_cpu)); in migrate_swap_stop()
3257 trace_sched_migrate_task(p, new_cpu); in migrate_swap_stop()
3259 if (task_cpu(p) != new_cpu) { in migrate_swap_stop()
3261 p->sched_class->migrate_task_rq(p, new_cpu); in migrate_swap_stop()
2335 move_queued_task(struct rq * rq,struct rq_flags * rf,struct task_struct * p,int new_cpu) move_queued_task() argument
3148 set_task_cpu(struct task_struct * p,unsigned int new_cpu) set_task_cpu() argument
[all...]
H A Dsched.h2324 void (*migrate_task_rq)(struct task_struct *p, int new_cpu);
H A Ddeadline.c2262 static void migrate_task_rq_dl(struct task_struct *p, int new_cpu __maybe_unused) in find_later_rq()
/linux/arch/x86/hyperv/
H A Dhv_init.c235 unsigned int new_cpu; in hv_cpu_die() local
274 new_cpu = cpumask_any_but(cpu_online_mask, cpu); in hv_cpu_die()
276 if (new_cpu < nr_cpu_ids) in hv_cpu_die()
277 re_ctrl.target_vp = hv_vp_index[new_cpu]; in hv_cpu_die()
/linux/tools/perf/scripts/python/
H A Dsched-migration.py191 def migrate(self, ts_list, new, old_cpu, new_cpu): argument
192 if old_cpu == new_cpu:
199 new_rq = self.prev.rqs[new_cpu]
201 self.rqs[new_cpu] = in_rq
208 self.event_cpus.append(new_cpu)
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_device.c1061 int cpu, new_cpu; in kfd_queue_work()
1063 cpu = new_cpu = smp_processor_id(); in kfd_queue_work()
1065 new_cpu = cpumask_next(new_cpu, cpu_online_mask) % nr_cpu_ids; in kfd_queue_work()
1066 if (cpu_to_node(new_cpu) == numa_node_id()) in kfd_queue_work()
1068 } while (cpu != new_cpu); in kfd_queue_work()
1070 queue_work_on(new_cpu, wq, work); in kfd_queue_work()
1060 int cpu, new_cpu; kfd_queue_work() local
/linux/drivers/hv/
H A Dhyperv_vmbus.h454 unsigned int new_cpu) in hv_update_allocated_cpus() argument
456 hv_set_allocated_cpu(new_cpu); in hv_update_allocated_cpus()
/linux/drivers/perf/
H A Dthunderx2_pmu.c932 unsigned int new_cpu; in tx2_uncore_pmu_offline_cpu() local
943 new_cpu = cpumask_any_and_but(cpumask_of_node(tx2_pmu->node), in tx2_uncore_pmu_offline_cpu()
946 tx2_pmu->cpu = new_cpu; in tx2_uncore_pmu_offline_cpu()
947 if (new_cpu >= nr_cpu_ids) in tx2_uncore_pmu_offline_cpu()
949 perf_pmu_migrate_context(&tx2_pmu->pmu, cpu, new_cpu); in tx2_uncore_pmu_offline_cpu()
/linux/arch/powerpc/perf/
H A Dimc-pmu.c344 static void nest_change_cpu_context(int old_cpu, int new_cpu) in nest_change_cpu_context() argument
348 if (old_cpu < 0 || new_cpu < 0) in nest_change_cpu_context()
352 perf_pmu_migrate_context(&(*pn)->pmu, old_cpu, new_cpu); in nest_change_cpu_context()
/linux/arch/arm64/kvm/vgic/
H A Dvgic.c696 struct vgic_cpu *new_cpu = &target_vcpu->arch.vgic_cpu; in vgic_prune_ap_list() local
700 list_add_tail(&irq->ap_list, &new_cpu->ap_list_head); in vgic_prune_ap_list()
/linux/kernel/
H A Dworkqueue.c2214 int new_cpu; in wq_select_unbound_cpu()
2223 new_cpu = __this_cpu_read(wq_rr_cpu_last); in wq_select_unbound_cpu()
2224 new_cpu = cpumask_next_and(new_cpu, wq_unbound_cpumask, cpu_online_mask); in wq_select_unbound_cpu()
2225 if (unlikely(new_cpu >= nr_cpu_ids)) { in wq_select_unbound_cpu()
2226 new_cpu = cpumask_first_and(wq_unbound_cpumask, cpu_online_mask); in wq_select_unbound_cpu()
2227 if (unlikely(new_cpu >= nr_cpu_ids)) in wq_select_unbound_cpu()
2230 __this_cpu_write(wq_rr_cpu_last, new_cpu); in wq_select_unbound_cpu()
2232 return new_cpu; in wq_select_unbound_cpu()
2216 int new_cpu; wq_select_unbound_cpu() local
/linux/drivers/scsi/lpfc/
H A Dlpfc_init.c12434 int i, cpu, idx, next_idx, new_cpu, start_cpu, first_cpu; in lpfc_cpu_affinity_check() local
12498 new_cpu = start_cpu; in lpfc_cpu_affinity_check()
12500 new_cpup = &phba->sli4_hba.cpu_map[new_cpu]; in lpfc_cpu_affinity_check()
12505 new_cpu = lpfc_next_present_cpu(new_cpu); in lpfc_cpu_affinity_check()
12517 start_cpu = lpfc_next_present_cpu(new_cpu); in lpfc_cpu_affinity_check()
12523 cpu, cpup->eq, new_cpu, in lpfc_cpu_affinity_check()
12544 new_cpu = start_cpu; in lpfc_cpu_affinity_check()
12546 new_cpup = &phba->sli4_hba.cpu_map[new_cpu]; in lpfc_cpu_affinity_check()
12550 new_cpu in lpfc_cpu_affinity_check()
[all...]
/linux/tools/perf/
H A Dbuiltin-sched.c1626 bool new_cpu = false; in map_switch_event() local
1640 new_cpu = true; in map_switch_event()
1739 if (sched->map.comp && new_cpu) in map_switch_event()