Searched refs:eventsel (Results 1 – 9 of 9) sorted by relevance
| /linux/arch/x86/kvm/ |
| H A D | pmu.c | 428 static bool filter_contains_match(u64 *events, u64 nevents, u64 eventsel) in filter_contains_match() argument 430 u64 event_select = eventsel & kvm_pmu_ops.EVENTSEL_EVENT; in filter_contains_match() 431 u64 umask = eventsel & ARCH_PERFMON_EVENTSEL_UMASK; in filter_contains_match() 462 u64 eventsel) in is_gp_event_allowed() argument 464 if (filter_contains_match(f->includes, f->nr_includes, eventsel) && in is_gp_event_allowed() 465 !filter_contains_match(f->excludes, f->nr_excludes, eventsel)) in is_gp_event_allowed() 496 return is_gp_event_allowed(filter, pmc->eventsel); in pmc_is_event_allowed() 504 u64 eventsel = pmc->eventsel; in reprogram_counter() local 505 u64 new_config = eventsel; in reprogram_counter() 518 if (eventsel & ARCH_PERFMON_EVENTSEL_PIN_CONTROL) in reprogram_counter() [all …]
|
| H A D | pmu.h | 172 return pmc->eventsel & ARCH_PERFMON_EVENTSEL_ENABLE; in pmc_is_locally_enabled()
|
| /linux/tools/testing/selftests/kvm/include/x86/ |
| H A D | pmu.h | 21 #define RAW_EVENT(eventsel, umask) (((eventsel & 0xf00UL) << 24) | \ argument 22 ((eventsel) & 0xff) | \
|
| /linux/arch/arm64/kvm/ |
| H A D | pmu-emul.c | 665 static int kvm_map_pmu_event(struct kvm *kvm, unsigned int eventsel) in kvm_map_pmu_event() argument 674 return pmu->map_pmuv3_event(eventsel); in kvm_map_pmu_event() 676 return eventsel; in kvm_map_pmu_event() 689 int eventsel; in kvm_pmu_create_perf_event() local 696 eventsel = ARMV8_PMUV3_PERFCTR_CPU_CYCLES; in kvm_pmu_create_perf_event() 698 eventsel = evtreg & kvm_pmu_event_mask(vcpu->kvm); in kvm_pmu_create_perf_event() 704 if (eventsel == ARMV8_PMUV3_PERFCTR_SW_INCR || in kvm_pmu_create_perf_event() 705 eventsel == ARMV8_PMUV3_PERFCTR_CHAIN) in kvm_pmu_create_perf_event() 713 !test_bit(eventsel, vcpu->kvm->arch.pmu_filter)) in kvm_pmu_create_perf_event() 720 eventsel = kvm_map_pmu_event(vcpu->kvm, eventsel); in kvm_pmu_create_perf_event() [all …]
|
| /linux/arch/x86/kvm/vmx/ |
| H A D | pmu_intel.c | 372 msr_info->data = pmc->eventsel; in intel_pmu_get_msr() 444 if (data != pmc->eventsel) { in intel_pmu_set_msr() 445 pmc->eventsel = data; in intel_pmu_set_msr() 479 u64 eventsel; in intel_get_fixed_pmc_eventsel() local 488 eventsel = perf_get_hw_event_config(fixed_pmc_perf_ids[index]); in intel_get_fixed_pmc_eventsel() 489 WARN_ON_ONCE(!eventsel && index < kvm_pmu_cap.num_counters_fixed); in intel_get_fixed_pmc_eventsel() 490 return eventsel; in intel_get_fixed_pmc_eventsel() 624 pmu->fixed_counters[i].eventsel = intel_get_fixed_pmc_eventsel(i); in intel_pmu_init()
|
| /linux/arch/x86/kvm/svm/ |
| H A D | pmu.c | 143 msr_info->data = pmc->eventsel; in amd_pmu_get_msr() 167 if (data != pmc->eventsel) { in amd_pmu_set_msr() 168 pmc->eventsel = data; in amd_pmu_set_msr()
|
| /linux/tools/testing/selftests/kvm/x86/ |
| H A D | pmu_counters_test.c | 292 uint64_t eventsel = ARCH_PERFMON_EVENTSEL_OS | in guest_test_arch_event() local 301 MSR_P6_EVNTSEL0 + i, eventsel); in guest_test_arch_event()
|
| /linux/arch/x86/events/intel/ |
| H A D | core.c | 6016 .eventsel = MSR_ARCH_PERFMON_EVENTSEL0, 6070 .eventsel = MSR_ARCH_PERFMON_EVENTSEL0, 7098 static inline int intel_pmu_v6_addr_offset(int index, bool eventsel) in intel_pmu_v6_addr_offset() argument 8171 x86_pmu.eventsel = MSR_IA32_PMC_V6_GP0_CFG_A; in intel_pmu_init()
|
| /linux/arch/x86/include/asm/ |
| H A D | kvm_host.h | 531 u64 eventsel; member
|