| /linux/drivers/iio/buffer/ |
| H A D | industrialio-hw-consumer.c | 53 struct iio_hw_consumer *hwc, struct iio_dev *indio_dev) in iio_hw_consumer_get_buffer() argument 58 list_for_each_entry(buf, &hwc->buffers, head) { in iio_hw_consumer_get_buffer() 72 list_add_tail(&buf->head, &hwc->buffers); in iio_hw_consumer_get_buffer() 86 struct iio_hw_consumer *hwc; in iio_hw_consumer_alloc() local 90 hwc = kzalloc(sizeof(*hwc), GFP_KERNEL); in iio_hw_consumer_alloc() 91 if (!hwc) in iio_hw_consumer_alloc() 94 INIT_LIST_HEAD(&hwc->buffers); in iio_hw_consumer_alloc() 96 hwc->channels = iio_channel_get_all(dev); in iio_hw_consumer_alloc() 97 if (IS_ERR(hwc->channels)) { in iio_hw_consumer_alloc() 98 ret = PTR_ERR(hwc->channels); in iio_hw_consumer_alloc() [all …]
|
| /linux/drivers/perf/ |
| H A D | riscv_pmu.c | 149 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_ctr_get_width_mask() local 151 if (hwc->idx == -1) in riscv_pmu_ctr_get_width_mask() 155 cwidth = rvpmu->ctr_get_width(hwc->idx); in riscv_pmu_ctr_get_width_mask() 163 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_event_update() local 168 if (!rvpmu->ctr_read || (hwc->state & PERF_HES_UPTODATE)) in riscv_pmu_event_update() 174 prev_raw_count = local64_read(&hwc->prev_count); in riscv_pmu_event_update() 176 oldval = local64_cmpxchg(&hwc->prev_count, prev_raw_count, in riscv_pmu_event_update() 182 local64_sub(delta, &hwc->period_left); in riscv_pmu_event_update() 189 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_stop() local 192 if (!(hwc->state & PERF_HES_STOPPED)) { in riscv_pmu_stop() [all …]
|
| H A D | starfive_starlink_pmu.c | 158 struct hw_perf_event *hwc = &event->hw; in starlink_pmu_set_event_period() local 167 local64_set(&hwc->prev_count, val); in starlink_pmu_set_event_period() 168 if (hwc->config == STARLINK_CYCLES) in starlink_pmu_set_event_period() 178 struct hw_perf_event *hwc = &event->hw; in starlink_pmu_counter_start() local 193 if (hwc->config == STARLINK_CYCLES) { in starlink_pmu_counter_start() 215 struct hw_perf_event *hwc = &event->hw; in starlink_pmu_counter_stop() local 224 if (hwc->config == STARLINK_CYCLES) in starlink_pmu_counter_stop() 235 struct hw_perf_event *hwc = &event->hw; in starlink_pmu_update() local 236 int idx = hwc->idx; in starlink_pmu_update() 242 prev_raw_count = local64_read(&hwc->prev_count); in starlink_pmu_update() [all …]
|
| H A D | arm_smmuv3_pmu.c | 256 struct hw_perf_event *hwc = &event->hw; in smmu_pmu_event_update() local 259 u32 idx = hwc->idx; in smmu_pmu_event_update() 262 prev = local64_read(&hwc->prev_count); in smmu_pmu_event_update() 264 } while (local64_cmpxchg(&hwc->prev_count, prev, now) != prev); in smmu_pmu_event_update() 274 struct hw_perf_event *hwc) in smmu_pmu_set_period() argument 276 u32 idx = hwc->idx; in smmu_pmu_set_period() 299 local64_set(&hwc->prev_count, new); in smmu_pmu_set_period() 397 struct hw_perf_event *hwc = &event->hw; in smmu_pmu_event_init() local 407 if (hwc->sample_period) { in smmu_pmu_event_init() 440 hwc->idx = -1; in smmu_pmu_event_init() [all …]
|
| H A D | cxl_pmu.c | 615 struct hw_perf_event *hwc = &event->hw; in cxl_pmu_event_start() local 623 if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) in cxl_pmu_event_start() 626 WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); in cxl_pmu_event_start() 627 hwc->state = 0; in cxl_pmu_event_start() 638 writeq(cfg, base + CXL_PMU_FILTER_CFG_REG(hwc->idx, 0)); in cxl_pmu_event_start() 641 cfg = readq(base + CXL_PMU_COUNTER_CFG_REG(hwc->idx)); in cxl_pmu_event_start() 651 if (test_bit(hwc->idx, info->conf_counter_bm)) { in cxl_pmu_event_start() 653 hwc->event_base); in cxl_pmu_event_start() 668 writeq(cfg, base + CXL_PMU_COUNTER_CFG_REG(hwc->idx)); in cxl_pmu_event_start() 670 local64_set(&hwc->prev_count, 0); in cxl_pmu_event_start() [all …]
|
| /linux/arch/loongarch/kernel/ |
| H A D | perf_event.c | 253 static int loongarch_pmu_alloc_counter(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc) in loongarch_pmu_alloc_counter() argument 300 struct hw_perf_event *hwc, in loongarch_pmu_event_set_period() argument 304 u64 left = local64_read(&hwc->period_left); in loongarch_pmu_event_set_period() 305 u64 period = hwc->sample_period; in loongarch_pmu_event_set_period() 310 local64_set(&hwc->period_left, left); in loongarch_pmu_event_set_period() 311 hwc->last_period = period; in loongarch_pmu_event_set_period() 316 local64_set(&hwc->period_left, left); in loongarch_pmu_event_set_period() 317 hwc->last_period = period; in loongarch_pmu_event_set_period() 323 local64_set(&hwc->period_left, left); in loongarch_pmu_event_set_period() 326 local64_set(&hwc in loongarch_pmu_event_set_period() 336 loongarch_pmu_event_update(struct perf_event * event,struct hw_perf_event * hwc,int idx) loongarch_pmu_event_update() argument 358 struct hw_perf_event *hwc = &event->hw; loongarch_pmu_start() local 374 struct hw_perf_event *hwc = &event->hw; loongarch_pmu_stop() local 389 struct hw_perf_event *hwc = &event->hw; loongarch_pmu_add() local 423 struct hw_perf_event *hwc = &event->hw; loongarch_pmu_del() local 437 struct hw_perf_event *hwc = &event->hw; loongarch_pmu_read() local 475 struct hw_perf_event *hwc = &event->hw; handle_associated_event() local 739 struct hw_perf_event *hwc = &event->hw; __hw_perf_event_init() local [all...] |
| /linux/drivers/perf/hisilicon/ |
| H A D | hisi_uncore_l3c_pmu.c | 158 static u32 hisi_l3c_pmu_event_readl(struct hw_perf_event *hwc, u32 reg) in hisi_l3c_pmu_event_readl() argument 160 return readl((void __iomem *)hwc->event_base + reg); in hisi_l3c_pmu_event_readl() 163 static void hisi_l3c_pmu_event_writel(struct hw_perf_event *hwc, u32 reg, u32 val) in hisi_l3c_pmu_event_writel() argument 165 writel(val, (void __iomem *)hwc->event_base + reg); in hisi_l3c_pmu_event_writel() 168 static u64 hisi_l3c_pmu_event_readq(struct hw_perf_event *hwc, u32 reg) in hisi_l3c_pmu_event_readq() argument 170 return readq((void __iomem *)hwc->event_base + reg); in hisi_l3c_pmu_event_readq() 173 static void hisi_l3c_pmu_event_writeq(struct hw_perf_event *hwc, u32 reg, u64 val) in hisi_l3c_pmu_event_writeq() argument 175 writeq(val, (void __iomem *)hwc->event_base + reg); in hisi_l3c_pmu_event_writeq() 180 struct hw_perf_event *hwc = &event->hw; in hisi_l3c_pmu_config_req_tracetag() local 187 val = hisi_l3c_pmu_event_readl(hwc, L3C_TRACETAG_CTRL); in hisi_l3c_pmu_config_req_tracetag() [all …]
|
| H A D | hisi_uncore_pmu.c | 209 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_event_init() local 248 hwc->idx = -1; in hisi_uncore_pmu_event_init() 249 hwc->config_base = event->attr.config; in hisi_uncore_pmu_event_init() 268 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_enable_event() local 270 hisi_pmu->ops->write_evtype(hisi_pmu, hwc->idx, in hisi_uncore_pmu_enable_event() 276 hisi_pmu->ops->enable_counter_int(hisi_pmu, hwc); in hisi_uncore_pmu_enable_event() 277 hisi_pmu->ops->enable_counter(hisi_pmu, hwc); in hisi_uncore_pmu_enable_event() 286 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_disable_event() local 288 hisi_pmu->ops->disable_counter(hisi_pmu, hwc); in hisi_uncore_pmu_disable_event() 289 hisi_pmu->ops->disable_counter_int(hisi_pmu, hwc); in hisi_uncore_pmu_disable_event() [all …]
|
| H A D | hisi_pcie_pmu.c | 268 struct hw_perf_event *hwc = &event->hw; in hisi_pcie_pmu_config_event_ctrl() local 271 hisi_pcie_pmu_writeq(pcie_pmu, HISI_PCIE_EVENT_CTRL, hwc->idx, reg); in hisi_pcie_pmu_config_event_ctrl() 277 struct hw_perf_event *hwc = &event->hw; in hisi_pcie_pmu_clear_event_ctrl() local 279 hisi_pcie_pmu_writeq(pcie_pmu, HISI_PCIE_EVENT_CTRL, hwc->idx, HISI_PCIE_INIT_SET); in hisi_pcie_pmu_clear_event_ctrl() 384 struct hw_perf_event *hwc = &event->hw; in hisi_pcie_pmu_event_init() local 391 hwc->event_base = HISI_PCIE_EXT_CNT; in hisi_pcie_pmu_event_init() 393 hwc->event_base = HISI_PCIE_CNT; in hisi_pcie_pmu_event_init() 448 struct hw_perf_event *hwc = &event->hw; in hisi_pcie_pmu_event_update() local 452 prev_cnt = local64_read(&hwc->prev_count); in hisi_pcie_pmu_event_update() 454 } while (local64_cmpxchg(&hwc->prev_count, prev_cnt, in hisi_pcie_pmu_event_update() [all …]
|
| H A D | hisi_uncore_ddrc_pmu.c | 68 #define GET_DDRC_EVENTID(hwc) (hwc->config_base & 0x7) argument 87 struct hw_perf_event *hwc) in hisi_ddrc_pmu_read_counter() argument 92 return readl(ddrc_pmu->base + ddrc_reg_off[hwc->idx]); in hisi_ddrc_pmu_read_counter() 94 return readq(ddrc_pmu->base + DDRC_EVENT_CNTn(regs->event_cnt, hwc->idx)); in hisi_ddrc_pmu_read_counter() 98 struct hw_perf_event *hwc, u64 val) in hisi_ddrc_pmu_write_counter() argument 103 writel((u32)val, ddrc_pmu->base + ddrc_reg_off[hwc->idx]); in hisi_ddrc_pmu_write_counter() 105 writeq(val, ddrc_pmu->base + DDRC_EVENT_CNTn(regs->event_cnt, hwc->idx)); in hisi_ddrc_pmu_write_counter() 128 struct hw_perf_event *hwc = &event->hw; in hisi_ddrc_pmu_v1_get_event_idx() local 130 int idx = GET_DDRC_EVENTID(hwc); in hisi_ddrc_pmu_v1_get_event_idx() 172 struct hw_perf_event *hwc) in hisi_ddrc_pmu_enable_counter() argument [all …]
|
| H A D | hisi_uncore_noc_pmu.c | 93 struct hw_perf_event *hwc) in hisi_noc_pmu_read_counter() argument 97 return readq(noc_pmu->base + NOC_PMU_EVENT_CNTRn(reg_info->event_cntr0, hwc->idx)); in hisi_noc_pmu_read_counter() 101 struct hw_perf_event *hwc, u64 val) in hisi_noc_pmu_write_counter() argument 105 writeq(val, noc_pmu->base + NOC_PMU_EVENT_CNTRn(reg_info->event_cntr0, hwc->idx)); in hisi_noc_pmu_write_counter() 109 struct hw_perf_event *hwc) in hisi_noc_pmu_enable_counter() argument 114 reg = readl(noc_pmu->base + NOC_PMU_EVENT_CTRLn(reg_info->event_ctrl0, hwc->idx)); in hisi_noc_pmu_enable_counter() 116 writel(reg, noc_pmu->base + NOC_PMU_EVENT_CTRLn(reg_info->event_ctrl0, hwc->idx)); in hisi_noc_pmu_enable_counter() 120 struct hw_perf_event *hwc) in hisi_noc_pmu_disable_counter() argument 125 reg = readl(noc_pmu->base + NOC_PMU_EVENT_CTRLn(reg_info->event_ctrl0, hwc->idx)); in hisi_noc_pmu_disable_counter() 127 writel(reg, noc_pmu->base + NOC_PMU_EVENT_CTRLn(reg_info->event_ctrl0, hwc->idx)); in hisi_noc_pmu_disable_counter() [all …]
|
| H A D | hisi_uncore_mn_pmu.c | 74 struct hw_perf_event *hwc) in hisi_mn_pmu_read_counter() argument 78 return readq(mn_pmu->base + HISI_MN_CNTR_REGn(reg_info->event_cntr0, hwc->idx)); in hisi_mn_pmu_read_counter() 82 struct hw_perf_event *hwc, u64 val) in hisi_mn_pmu_write_counter() argument 86 writeq(val, mn_pmu->base + HISI_MN_CNTR_REGn(reg_info->event_cntr0, hwc->idx)); in hisi_mn_pmu_write_counter() 128 struct hw_perf_event *hwc) in hisi_mn_pmu_enable_counter() argument 134 val |= BIT(hwc->idx); in hisi_mn_pmu_enable_counter() 139 struct hw_perf_event *hwc) in hisi_mn_pmu_disable_counter() argument 145 val &= ~BIT(hwc->idx); in hisi_mn_pmu_disable_counter() 150 struct hw_perf_event *hwc) in hisi_mn_pmu_enable_counter_int() argument 156 val &= ~BIT(hwc->idx); in hisi_mn_pmu_enable_counter_int() [all …]
|
| H A D | hisi_uncore_hha_pmu.c | 169 struct hw_perf_event *hwc) in hisi_hha_pmu_read_counter() argument 172 return readq(hha_pmu->base + hisi_hha_pmu_get_counter_offset(hwc->idx)); in hisi_hha_pmu_read_counter() 176 struct hw_perf_event *hwc, u64 val) in hisi_hha_pmu_write_counter() argument 179 writeq(val, hha_pmu->base + hisi_hha_pmu_get_counter_offset(hwc->idx)); in hisi_hha_pmu_write_counter() 232 struct hw_perf_event *hwc) in hisi_hha_pmu_enable_counter() argument 238 val |= (1 << hwc->idx); in hisi_hha_pmu_enable_counter() 243 struct hw_perf_event *hwc) in hisi_hha_pmu_disable_counter() argument 249 val &= ~(1 << hwc->idx); in hisi_hha_pmu_disable_counter() 254 struct hw_perf_event *hwc) in hisi_hha_pmu_enable_counter_int() argument 260 val &= ~(1 << hwc->idx); in hisi_hha_pmu_enable_counter_int() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_pmu.c | 211 struct hw_perf_event *hwc = &event->hw; in amdgpu_perf_event_init() local 218 hwc->config = event->attr.config; in amdgpu_perf_event_init() 219 hwc->config_base = AMDGPU_PMU_PERF_TYPE_NONE; in amdgpu_perf_event_init() 227 struct hw_perf_event *hwc = &event->hw; in amdgpu_perf_start() local 233 if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) in amdgpu_perf_start() 240 WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); in amdgpu_perf_start() 241 hwc->state = 0; in amdgpu_perf_start() 243 switch (hwc->config_base) { in amdgpu_perf_start() 248 hwc->config, 0 /* unused */, in amdgpu_perf_start() 253 hwc->idx = target_cntr; in amdgpu_perf_start() [all …]
|
| /linux/arch/x86/events/amd/ |
| H A D | uncore.c | 137 struct hw_perf_event *hwc = &event->hw; in amd_uncore_read() local 146 prev = local64_read(&hwc->prev_count); in amd_uncore_read() 152 if (hwc->event_base_rdpmc < 0) in amd_uncore_read() 153 rdmsrq(hwc->event_base, new); in amd_uncore_read() 155 new = rdpmc(hwc->event_base_rdpmc); in amd_uncore_read() 157 local64_set(&hwc->prev_count, new); in amd_uncore_read() 167 struct hw_perf_event *hwc = &event->hw; in amd_uncore_start() local 173 wrmsrq(hwc->event_base, (u64)local64_read(&hwc->prev_count)); in amd_uncore_start() 175 hwc->state = 0; in amd_uncore_start() 176 __set_bit(hwc->idx, ctx->active_mask); in amd_uncore_start() [all …]
|
| H A D | ibs.c | 102 perf_event_set_period(struct hw_perf_event *hwc, u64 min, u64 max, u64 *hw_period) in perf_event_set_period() argument 104 s64 left = local64_read(&hwc->period_left); in perf_event_set_period() 105 s64 period = hwc->sample_period; in perf_event_set_period() 113 local64_set(&hwc->period_left, left); in perf_event_set_period() 114 hwc->last_period = period; in perf_event_set_period() 120 local64_set(&hwc->period_left, left); in perf_event_set_period() 121 hwc->last_period = period; in perf_event_set_period() 147 struct hw_perf_event *hwc = &event->hw; in perf_event_try_update() local 159 prev_raw_count = local64_read(&hwc->prev_count); in perf_event_try_update() 160 if (!local64_try_cmpxchg(&hwc->prev_count, in perf_event_try_update() [all …]
|
| /linux/arch/sh/kernel/ |
| H A D | perf_event.c | 103 struct hw_perf_event *hwc = &event->hw; in __hw_perf_event_init() local 153 hwc->config |= config; in __hw_perf_event_init() 159 struct hw_perf_event *hwc, int idx) in sh_perf_event_update() argument 178 prev_raw_count = local64_read(&hwc->prev_count); in sh_perf_event_update() 181 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sh_perf_event_update() 202 struct hw_perf_event *hwc = &event->hw; in sh_pmu_stop() local 203 int idx = hwc->idx; in sh_pmu_stop() 206 sh_pmu->disable(hwc, idx); in sh_pmu_stop() 220 struct hw_perf_event *hwc = &event->hw; in sh_pmu_start() local 221 int idx = hwc->idx; in sh_pmu_start() [all …]
|
| /linux/arch/x86/events/intel/ |
| H A D | uncore_discovery.c | 508 struct hw_perf_event *hwc = &event->hw; in intel_generic_uncore_msr_enable_event() local 510 wrmsrq(hwc->config_base, hwc->config); in intel_generic_uncore_msr_enable_event() 516 struct hw_perf_event *hwc = &event->hw; in intel_generic_uncore_msr_disable_event() local 518 wrmsrq(hwc->config_base, 0); in intel_generic_uncore_msr_disable_event() 533 struct hw_perf_event *hwc = &event->hw; in intel_generic_uncore_assign_hw_event() local 540 hwc->config_base = uncore_pci_event_ctl(box, hwc->idx); in intel_generic_uncore_assign_hw_event() 541 hwc->event_base = uncore_pci_perf_ctr(box, hwc->idx); in intel_generic_uncore_assign_hw_event() 551 hwc->config_base = box_ctl + uncore_pci_event_ctl(box, hwc->idx); in intel_generic_uncore_assign_hw_event() 552 hwc->event_base = box_ctl + uncore_pci_perf_ctr(box, hwc->idx); in intel_generic_uncore_assign_hw_event() 556 hwc->config_base = box_ctl + box->pmu->type->event_ctl + hwc->idx; in intel_generic_uncore_assign_hw_event() [all …]
|
| H A D | uncore.c | 257 struct hw_perf_event *hwc = &event->hw; in uncore_assign_hw_event() local 259 hwc->idx = idx; in uncore_assign_hw_event() 260 hwc->last_tag = ++box->tags[idx]; in uncore_assign_hw_event() 262 if (uncore_pmc_fixed(hwc->idx)) { in uncore_assign_hw_event() 263 hwc->event_base = uncore_fixed_ctr(box); in uncore_assign_hw_event() 264 hwc->config_base = uncore_fixed_ctl(box); in uncore_assign_hw_event() 271 hwc->config_base = uncore_event_ctl(box, hwc->idx); in uncore_assign_hw_event() 272 hwc->event_base = uncore_perf_ctr(box, hwc->idx); in uncore_assign_hw_event() 459 struct hw_perf_event *hwc; in uncore_assign_events() local 472 hwc = &box->event_list[i]->hw; in uncore_assign_events() [all …]
|
| /linux/arch/x86/events/ |
| H A D | core.c | 127 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_update() local 132 if (unlikely(!hwc->event_base)) in x86_perf_event_update() 142 prev_raw_count = local64_read(&hwc->prev_count); in x86_perf_event_update() 144 new_raw_count = rdpmc(hwc->event_base_rdpmc); in x86_perf_event_update() 145 } while (!local64_try_cmpxchg(&hwc->prev_count, in x86_perf_event_update() 160 local64_sub(delta, &hwc->period_left); in x86_perf_event_update() 375 set_ext_hw_attr(struct hw_perf_event *hwc, struct perf_event *event) in set_ext_hw_attr() argument 405 hwc->config |= val; in set_ext_hw_attr() 492 struct hw_perf_event *hwc = &event->hw; in x86_setup_perfctr() local 496 hwc->sample_period = x86_pmu.max_period; in x86_setup_perfctr() [all …]
|
| /linux/drivers/clk/ |
| H A D | clk-qoriq.c | 852 struct mux_hwclock *hwc = to_mux_hwclock(hw); in mux_set_parent() local 855 if (idx >= hwc->num_parents) in mux_set_parent() 858 clksel = hwc->parent_to_clksel[idx]; in mux_set_parent() 859 cg_out(hwc->cg, (clksel << CLKSEL_SHIFT) & CLKSEL_MASK, hwc->reg); in mux_set_parent() 866 struct mux_hwclock *hwc = to_mux_hwclock(hw); in mux_get_parent() local 870 clksel = (cg_in(hwc->cg, hwc->reg) & CLKSEL_MASK) >> CLKSEL_SHIFT; in mux_get_parent() 872 ret = hwc->clksel_to_parent[clksel]; in mux_get_parent() 874 pr_err("%s: mux at %p has bad clksel\n", __func__, hwc->reg); in mux_get_parent() 896 struct mux_hwclock *hwc, in get_pll_div() argument 901 if (!(hwc->info->clksel[idx].flags & CLKSEL_VALID)) in get_pll_div() [all …]
|
| /linux/arch/sparc/kernel/ |
| H A D | perf_event.c | 827 static inline void sparc_pmu_enable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, in… in sparc_pmu_enable_event() argument 845 static inline void sparc_pmu_disable_event(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc, i… in sparc_pmu_disable_event() argument 864 struct hw_perf_event *hwc, int idx) in sparc_perf_event_update() argument 871 prev_raw_count = local64_read(&hwc->prev_count); in sparc_perf_event_update() 874 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sparc_perf_event_update() 882 local64_sub(delta, &hwc->period_left); in sparc_perf_event_update() 888 struct hw_perf_event *hwc, int idx) in sparc_perf_event_set_period() argument 890 s64 left = local64_read(&hwc->period_left); in sparc_perf_event_set_period() 891 s64 period = hwc->sample_period; in sparc_perf_event_set_period() 895 if (unlikely(period != hwc->last_period)) in sparc_perf_event_set_period() [all …]
|
| /linux/drivers/iommu/intel/ |
| H A D | perfmon.c | 281 struct hw_perf_event *hwc = &event->hw; in iommu_pmu_event_init() local 296 hwc->config = iommu_event_config(event); in iommu_pmu_event_init() 304 struct hw_perf_event *hwc = &event->hw; in iommu_pmu_event_update() local 309 prev_count = local64_read(&hwc->prev_count); in iommu_pmu_event_update() 310 new_count = dmar_readq(iommu_event_base(iommu_pmu, hwc->idx)); in iommu_pmu_event_update() 311 if (local64_xchg(&hwc->prev_count, new_count) != prev_count) in iommu_pmu_event_update() 328 struct hw_perf_event *hwc = &event->hw; in iommu_pmu_start() local 331 if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) in iommu_pmu_start() 334 if (WARN_ON_ONCE(hwc->idx < 0 || hwc->idx >= IOMMU_PMU_IDX_MAX)) in iommu_pmu_start() 340 hwc->state = 0; in iommu_pmu_start() [all …]
|
| /linux/drivers/dma/idxd/ |
| H A D | perfmon.c | 97 struct hw_perf_event *hwc = &event->hw; in perfmon_assign_hw_event() local 99 hwc->idx = idx; in perfmon_assign_hw_event() 100 hwc->config_base = ioread64(CNTRCFG_REG(idxd, idx)); in perfmon_assign_hw_event() 101 hwc->event_base = ioread64(CNTRCFG_REG(idxd, idx)); in perfmon_assign_hw_event() 201 struct hw_perf_event *hwc = &event->hw; in perfmon_pmu_read_counter() local 203 int cntr = hwc->idx; in perfmon_pmu_read_counter() 215 struct hw_perf_event *hwc = &event->hw; in perfmon_pmu_event_update() local 217 prev_raw_count = local64_read(&hwc->prev_count); in perfmon_pmu_event_update() 220 } while (!local64_try_cmpxchg(&hwc->prev_count, in perfmon_pmu_event_update() 293 struct hw_perf_event *hwc = &event->hw; in perfmon_pmu_event_start() local [all …]
|
| /linux/arch/powerpc/perf/ |
| H A D | vpa-dtl.c | 308 struct hw_perf_event *hwc = &event->hw; in vpa_dtl_start_hrtimer() local 310 period = max_t(u64, NSEC_PER_MSEC, hwc->sample_period); in vpa_dtl_start_hrtimer() 311 hrtimer_start(&hwc->hrtimer, ns_to_ktime(period), HRTIMER_MODE_REL_PINNED); in vpa_dtl_start_hrtimer() 316 struct hw_perf_event *hwc = &event->hw; in vpa_dtl_stop_hrtimer() local 318 hrtimer_cancel(&hwc->hrtimer); in vpa_dtl_stop_hrtimer() 352 struct hw_perf_event *hwc = &event->hw; in vpa_dtl_event_init() local 412 hrtimer_setup(&hwc->hrtimer, vpa_dtl_hrtimer_handle, CLOCK_MONOTONIC, HRTIMER_MODE_REL); in vpa_dtl_event_init() 422 hwc->sample_period = event->attr.sample_period; in vpa_dtl_event_init() 423 local64_set(&hwc->period_left, hwc->sample_period); in vpa_dtl_event_init() 424 hwc->last_period = hwc->sample_period; in vpa_dtl_event_init()
|