/linux/drivers/perf/ |
H A D | arm_pmu_platform.c | 48 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_parse_percpu_irq() local 55 per_cpu(hw_events->irq, cpu) = irq; in pmu_parse_percpu_irq() 99 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_parse_irqs() local 144 if (per_cpu(hw_events->irq, cpu)) { in pmu_parse_irqs() 149 per_cpu(hw_events->irq, cpu) = irq; in pmu_parse_irqs() 158 struct pmu_hw_events __percpu *hw_events = armpmu->hw_events; in armpmu_request_irqs() local 162 int irq = per_cpu(hw_events->irq, cpu); in armpmu_request_irqs() 177 struct pmu_hw_events __percpu *hw_events = armpmu->hw_events; in armpmu_free_irqs() local 180 int irq = per_cpu(hw_events->irq, cpu); in armpmu_free_irqs()
|
H A D | starfive_starlink_pmu.c | 71 struct starlink_hw_events __percpu *hw_events; member 293 struct starlink_hw_events *hw_events = in starlink_pmu_add() local 294 this_cpu_ptr(starlink_pmu->hw_events); in starlink_pmu_add() 296 unsigned long *used_mask = hw_events->used_mask; in starlink_pmu_add() 319 hw_events->events[idx] = event; in starlink_pmu_add() 333 struct starlink_hw_events *hw_events = in starlink_pmu_del() local 334 this_cpu_ptr(starlink_pmu->hw_events); in starlink_pmu_del() 338 hw_events->events[hwc->idx] = NULL; in starlink_pmu_del() 339 clear_bit(hwc->idx, hw_events->used_mask); in starlink_pmu_del() 400 struct starlink_hw_events *hw_events = in starlink_pmu_handle_irq() local [all …]
|
H A D | arm_pmu.c | 316 struct pmu_hw_events *hw_events = this_cpu_ptr(armpmu->hw_events); in armpmu_del() local 321 hw_events->events[idx] = NULL; in armpmu_del() 322 armpmu->clear_event_idx(hw_events, event); in armpmu_del() 332 struct pmu_hw_events *hw_events = this_cpu_ptr(armpmu->hw_events); in armpmu_add() local 341 idx = armpmu->get_event_idx(hw_events, event); in armpmu_add() 351 hw_events->events[idx] = event; in armpmu_add() 364 validate_event(struct pmu *pmu, struct pmu_hw_events *hw_events, in validate_event() argument 387 return armpmu->get_event_idx(hw_events, event) >= 0; in validate_event() 524 struct pmu_hw_events *hw_events = this_cpu_ptr(armpmu->hw_events); in armpmu_enable() local 525 bool enabled = !bitmap_empty(hw_events->used_mask, ARMPMU_MAX_HWEVENTS); in armpmu_enable() [all …]
|
H A D | arm_pmu_acpi.c | 272 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_irq_matches() local 279 int other_irq = per_cpu(hw_events->irq, cpu); in pmu_irq_matches() 303 struct pmu_hw_events __percpu *hw_events; in arm_pmu_acpi_associate_pmu_cpu() local 304 hw_events = pmu->hw_events; in arm_pmu_acpi_associate_pmu_cpu() 305 per_cpu(hw_events->irq, cpu) = irq; in arm_pmu_acpi_associate_pmu_cpu()
|
H A D | riscv_pmu_sbi.c | 406 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in pmu_sbi_ctr_get_idx() 465 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in pmu_sbi_ctr_clear_idx() 571 struct cpu_hw_events *cpu_hw_evt = per_cpu_ptr(pmu->hw_events, cpu); in pmu_sbi_snapshot_free() 588 struct cpu_hw_events *cpu_hw_evt = per_cpu_ptr(pmu->hw_events, cpu); in pmu_sbi_snapshot_alloc() 621 cpu_hw_evt = per_cpu_ptr(pmu->hw_events, cpu); in pmu_sbi_snapshot_setup() 656 struct cpu_hw_events *cpu_hw_evt = this_cpu_ptr(pmu->hw_events); in pmu_sbi_ctr_read() 737 struct cpu_hw_events *cpu_hw_evt = this_cpu_ptr(pmu->hw_events); in pmu_sbi_ctr_stop() 822 struct cpu_hw_events *cpu_hw_evt = this_cpu_ptr(pmu->hw_events); in pmu_sbi_stop_hw_ctrs() 941 struct cpu_hw_events *cpu_hw_evt = this_cpu_ptr(pmu->hw_events); in pmu_sbi_start_overflow_mask() 1059 struct cpu_hw_events *cpu_hw_evt = this_cpu_ptr(pmu->hw_events); in pmu_sbi_starting_cpu() [all …]
|
H A D | riscv_pmu.c | 260 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in riscv_pmu_add() 284 struct cpu_hw_events *cpuc = this_cpu_ptr(rvpmu->hw_events); in riscv_pmu_del() 396 pmu->hw_events = alloc_percpu_gfp(struct cpu_hw_events, GFP_KERNEL); in riscv_pmu_alloc() 397 if (!pmu->hw_events) { in riscv_pmu_alloc() 403 cpuc = per_cpu_ptr(pmu->hw_events, cpuid); in riscv_pmu_alloc()
|
H A D | cxl_pmu.c | 98 struct perf_event **hw_events; member 747 info->hw_events[idx] = event; in cxl_pmu_event_add() 763 info->hw_events[hwc->idx] = NULL; in cxl_pmu_event_del() 783 struct perf_event *event = info->hw_events[i]; in cxl_pmu_irq() 837 info->hw_events = devm_kcalloc(dev, sizeof(*info->hw_events), in cxl_pmu_probe() 839 if (!info->hw_events) in cxl_pmu_probe()
|
H A D | arm_xscale_pmu.c | 149 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in xscale1pmu_handle_irq() 488 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in xscale2pmu_handle_irq()
|
H A D | arm_v6_pmu.c | 242 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv6pmu_handle_irq()
|
H A D | apple_m1_cpu_pmu.c | 416 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in m1_pmu_handle_irq()
|
H A D | arm_pmuv3.c | 771 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv8pmu_enable_user_access() 844 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv8pmu_handle_irq()
|
H A D | arm_v7_pmu.c | 920 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in armv7pmu_handle_irq()
|
/linux/arch/csky/kernel/ |
H A D | perf_event.c | 41 struct pmu_hw_events __percpu *hw_events; member 1075 struct pmu_hw_events *hw_events = this_cpu_ptr(csky_pmu.hw_events); in csky_pmu_del() local 1080 hw_events->events[hwc->idx] = NULL; in csky_pmu_del() 1088 struct pmu_hw_events *hw_events = this_cpu_ptr(csky_pmu.hw_events); in csky_pmu_add() local 1091 hw_events->events[hwc->idx] = event; in csky_pmu_add() 1106 struct pmu_hw_events *cpuc = this_cpu_ptr(csky_pmu.hw_events); in csky_pmu_handle_irq() 1178 this_cpu_ptr(csky_pmu.hw_events)); in csky_pmu_request_irq() 1195 free_percpu_irq(irq, this_cpu_ptr(csky_pmu.hw_events)); in csky_pmu_free_irq() 1200 csky_pmu.hw_events = alloc_percpu_gfp(struct pmu_hw_events, in init_hw_perf_events() 1202 if (!csky_pmu.hw_events) { in init_hw_perf_events() [all …]
|
/linux/drivers/perf/arm_cspmu/ |
H A D | ampere_cspmu.c | 156 idx = find_first_bit(cspmu->hw_events.used_ctrs, in ampere_cspmu_set_ev_filter() 159 event = cspmu->hw_events.events[idx]; in ampere_cspmu_set_ev_filter() 193 idx = find_first_bit(cspmu->hw_events.used_ctrs, in ampere_cspmu_validate_event() 200 curr = cspmu->hw_events.events[idx]; in ampere_cspmu_validate_event()
|
/linux/include/linux/perf/ |
H A D | arm_pmu.h | 91 int (*get_event_idx)(struct pmu_hw_events *hw_events, 93 void (*clear_event_idx)(struct pmu_hw_events *hw_events, 110 struct pmu_hw_events __percpu *hw_events; member
|
H A D | riscv_pmu.h | 71 struct cpu_hw_events __percpu *hw_events; member
|
/linux/drivers/perf/hisilicon/ |
H A D | hisi_pcie_pmu.c | 66 struct perf_event *hw_events[HISI_PCIE_MAX_COUNTERS]; member 430 sibling = pcie_pmu->hw_events[idx]; in hisi_pcie_pmu_get_event_idx() 590 if (!pcie_pmu->hw_events[idx]) { in hisi_pcie_pmu_add() 592 pcie_pmu->hw_events[idx] = event; in hisi_pcie_pmu_add() 607 pcie_pmu->hw_events[hwc->idx] = NULL; in hisi_pcie_pmu_del() 617 if (pcie_pmu->hw_events[num]) in hisi_pcie_pmu_enable() 649 event = pcie_pmu->hw_events[idx]; in hisi_pcie_pmu_irq()
|
H A D | hisi_uncore_pmu.c | 135 event = hisi_pmu->pmu_events.hw_events[idx]; in hisi_uncore_pmu_isr() 353 hisi_pmu->pmu_events.hw_events[idx] = event; in hisi_uncore_pmu_add() 370 hisi_pmu->pmu_events.hw_events[hwc->idx] = NULL; in hisi_uncore_pmu_del()
|
/linux/drivers/accel/habanalabs/gaudi2/ |
H A D | gaudi2P.h | 547 u32 hw_events[GAUDI2_EVENT_SIZE]; member
|
H A D | gaudi2.c | 3718 gaudi2->hw_events[gaudi2->num_of_valid_hw_events++] = gaudi2_irq_map_table[i].fc_id; in gaudi2_sw_init() 7136 irq_arr_size = gaudi2->num_of_valid_hw_events * sizeof(gaudi2->hw_events[0]); in gaudi2_compute_reset_late_init() 7137 return hl_fw_unmask_irq_arr(hdev, gaudi2->hw_events, irq_arr_size); in gaudi2_compute_reset_late_init()
|