/linux/drivers/net/ethernet/microsoft/mana/ |
H A D | hw_channel.c | 8 static int mana_hwc_get_msg_index(struct hw_channel_context *hwc, u16 *msg_id) in mana_hwc_get_msg_index() argument 10 struct gdma_resource *r = &hwc->inflight_msg_res; in mana_hwc_get_msg_index() 14 down(&hwc->sema); in mana_hwc_get_msg_index() 18 index = find_first_zero_bit(hwc->inflight_msg_res.map, in mana_hwc_get_msg_index() 19 hwc->inflight_msg_res.size); in mana_hwc_get_msg_index() 21 bitmap_set(hwc->inflight_msg_res.map, index, 1); in mana_hwc_get_msg_index() 30 static void mana_hwc_put_msg_index(struct hw_channel_context *hwc, u16 msg_id) in mana_hwc_put_msg_index() argument 32 struct gdma_resource *r = &hwc->inflight_msg_res; in mana_hwc_put_msg_index() 36 bitmap_clear(hwc->inflight_msg_res.map, msg_id, 1); in mana_hwc_put_msg_index() 39 up(&hwc->sema); in mana_hwc_put_msg_index() [all …]
|
/linux/drivers/net/ethernet/mellanox/mlx5/core/sf/ |
H A D | hw_table.c | 36 struct mlx5_sf_hwc_table hwc[MLX5_SF_HWC_MAX]; member 44 return &dev->priv.sf_hw_table->hwc[idx]; in mlx5_sf_controller_to_hwc() 49 struct mlx5_sf_hwc_table *hwc; in mlx5_sf_sw_to_hw_id() local 51 hwc = mlx5_sf_controller_to_hwc(dev, controller); in mlx5_sf_sw_to_hw_id() 52 return hwc->start_fn_id + sw_id; in mlx5_sf_sw_to_hw_id() 55 static u16 mlx5_sf_hw_to_sw_id(struct mlx5_sf_hwc_table *hwc, u16 hw_id) in mlx5_sf_hw_to_sw_id() argument 57 return hw_id - hwc->start_fn_id; in mlx5_sf_hw_to_sw_id() 65 for (i = 0; i < ARRAY_SIZE(table->hwc); i++) { in mlx5_sf_table_fn_to_hwc() 66 if (table->hwc[i].max_fn && in mlx5_sf_table_fn_to_hwc() 67 fn_id >= table->hwc[i].start_fn_id && in mlx5_sf_table_fn_to_hwc() [all …]
|
/linux/drivers/iio/buffer/ |
H A D | industrialio-hw-consumer.c | 53 struct iio_hw_consumer *hwc, struct iio_dev *indio_dev) in iio_hw_consumer_get_buffer() argument 58 list_for_each_entry(buf, &hwc->buffers, head) { in iio_hw_consumer_get_buffer() 72 list_add_tail(&buf->head, &hwc->buffers); in iio_hw_consumer_get_buffer() 86 struct iio_hw_consumer *hwc; in iio_hw_consumer_alloc() local 90 hwc = kzalloc(sizeof(*hwc), GFP_KERNEL); in iio_hw_consumer_alloc() 91 if (!hwc) in iio_hw_consumer_alloc() 94 INIT_LIST_HEAD(&hwc->buffers); in iio_hw_consumer_alloc() 96 hwc->channels = iio_channel_get_all(dev); in iio_hw_consumer_alloc() 97 if (IS_ERR(hwc->channels)) { in iio_hw_consumer_alloc() 98 ret = PTR_ERR(hwc->channels); in iio_hw_consumer_alloc() [all …]
|
/linux/arch/alpha/kernel/ |
H A D | perf_event.c | 253 struct hw_perf_event *hwc, int idx) in alpha_perf_event_set_period() argument 255 long left = local64_read(&hwc->period_left); in alpha_perf_event_set_period() 256 long period = hwc->sample_period; in alpha_perf_event_set_period() 261 local64_set(&hwc->period_left, left); in alpha_perf_event_set_period() 262 hwc->last_period = period; in alpha_perf_event_set_period() 268 local64_set(&hwc->period_left, left); in alpha_perf_event_set_period() 269 hwc->last_period = period; in alpha_perf_event_set_period() 283 local64_set(&hwc->prev_count, (unsigned long)(-left)); in alpha_perf_event_set_period() 308 struct hw_perf_event *hwc, int idx, long ovf) in alpha_perf_event_update() argument 314 prev_raw_count = local64_read(&hwc->prev_count); in alpha_perf_event_update() [all …]
|
/linux/drivers/perf/ |
H A D | riscv_pmu.c | 149 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_ctr_get_width_mask() local 151 if (hwc->idx == -1) in riscv_pmu_ctr_get_width_mask() 155 cwidth = rvpmu->ctr_get_width(hwc->idx); in riscv_pmu_ctr_get_width_mask() 163 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_event_update() local 168 if (!rvpmu->ctr_read || (hwc->state & PERF_HES_UPTODATE)) in riscv_pmu_event_update() 174 prev_raw_count = local64_read(&hwc->prev_count); in riscv_pmu_event_update() 176 oldval = local64_cmpxchg(&hwc->prev_count, prev_raw_count, in riscv_pmu_event_update() 182 local64_sub(delta, &hwc->period_left); in riscv_pmu_event_update() 189 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_stop() local 192 if (!(hwc->state & PERF_HES_STOPPED)) { in riscv_pmu_stop() [all …]
|
H A D | arm_xscale_pmu.c | 175 struct hw_perf_event *hwc; in xscale1pmu_handle_irq() local 183 hwc = &event->hw; in xscale1pmu_handle_irq() 185 perf_sample_data_init(&data, 0, hwc->last_period); in xscale1pmu_handle_irq() 207 struct hw_perf_event *hwc = &event->hw; in xscale1pmu_enable_event() local 208 int idx = hwc->idx; in xscale1pmu_enable_event() 217 evt = (hwc->config_base << XSCALE1_COUNT0_EVT_SHFT) | in xscale1pmu_enable_event() 222 evt = (hwc->config_base << XSCALE1_COUNT1_EVT_SHFT) | in xscale1pmu_enable_event() 239 struct hw_perf_event *hwc = &event->hw; in xscale1pmu_disable_event() local 240 int idx = hwc->idx; in xscale1pmu_disable_event() 270 struct hw_perf_event *hwc = &event->hw; in xscale1pmu_get_event_idx() local [all …]
|
H A D | arm_pmu.c | 203 struct hw_perf_event *hwc = &event->hw; in armpmu_event_set_period() local 204 s64 left = local64_read(&hwc->period_left); in armpmu_event_set_period() 205 s64 period = hwc->sample_period; in armpmu_event_set_period() 212 local64_set(&hwc->period_left, left); in armpmu_event_set_period() 213 hwc->last_period = period; in armpmu_event_set_period() 219 local64_set(&hwc->period_left, left); in armpmu_event_set_period() 220 hwc->last_period = period; in armpmu_event_set_period() 233 local64_set(&hwc->prev_count, (u64)-left); in armpmu_event_set_period() 245 struct hw_perf_event *hwc = &event->hw; in armpmu_event_update() local 250 prev_raw_count = local64_read(&hwc->prev_count); in armpmu_event_update() [all …]
|
H A D | starfive_starlink_pmu.c | 158 struct hw_perf_event *hwc = &event->hw; in starlink_pmu_set_event_period() local 167 local64_set(&hwc->prev_count, val); in starlink_pmu_set_event_period() 168 if (hwc->config == STARLINK_CYCLES) in starlink_pmu_set_event_period() 178 struct hw_perf_event *hwc = &event->hw; in starlink_pmu_counter_start() local 193 if (hwc->config == STARLINK_CYCLES) { in starlink_pmu_counter_start() 215 struct hw_perf_event *hwc = &event->hw; in starlink_pmu_counter_stop() local 224 if (hwc->config == STARLINK_CYCLES) in starlink_pmu_counter_stop() 235 struct hw_perf_event *hwc = &event->hw; in starlink_pmu_update() local 236 int idx = hwc->idx; in starlink_pmu_update() 242 prev_raw_count = local64_read(&hwc->prev_count); in starlink_pmu_update() [all …]
|
H A D | arm_v6_pmu.c | 174 struct hw_perf_event *hwc = &event->hw; in armv6pmu_read_counter() local 175 int counter = hwc->idx; in armv6pmu_read_counter() 192 struct hw_perf_event *hwc = &event->hw; in armv6pmu_write_counter() local 193 int counter = hwc->idx; in armv6pmu_write_counter() 208 struct hw_perf_event *hwc = &event->hw; in armv6pmu_enable_event() local 209 int idx = hwc->idx; in armv6pmu_enable_event() 216 evt = (hwc->config_base << ARMV6_PMCR_EVT_COUNT0_SHIFT) | in armv6pmu_enable_event() 220 evt = (hwc->config_base << ARMV6_PMCR_EVT_COUNT1_SHIFT) | in armv6pmu_enable_event() 260 struct hw_perf_event *hwc; in armv6pmu_handle_irq() local 273 hwc = &event->hw; in armv6pmu_handle_irq() [all …]
|
H A D | arm_v7_pmu.c | 733 struct hw_perf_event *hwc = &event->hw; in armv7pmu_read_counter() local 734 int idx = hwc->idx; in armv7pmu_read_counter() 753 struct hw_perf_event *hwc = &event->hw; in armv7pmu_write_counter() local 754 int idx = hwc->idx; in armv7pmu_write_counter() 850 struct hw_perf_event *hwc = &event->hw; in armv7pmu_enable_event() local 852 int idx = hwc->idx; in armv7pmu_enable_event() 876 armv7_pmnc_write_evtsel(idx, hwc->config_base); in armv7pmu_enable_event() 891 struct hw_perf_event *hwc = &event->hw; in armv7pmu_disable_event() local 893 int idx = hwc->idx; in armv7pmu_disable_event() 942 struct hw_perf_event *hwc; in armv7pmu_handle_irq() local [all …]
|
/linux/arch/loongarch/kernel/ |
H A D | perf_event.c | 253 static int loongarch_pmu_alloc_counter(struct cpu_hw_events *cpuc, struct hw_perf_event *hwc) in loongarch_pmu_alloc_counter() argument 300 struct hw_perf_event *hwc, in loongarch_pmu_event_set_period() argument 304 u64 left = local64_read(&hwc->period_left); in loongarch_pmu_event_set_period() 305 u64 period = hwc->sample_period; in loongarch_pmu_event_set_period() 310 local64_set(&hwc->period_left, left); in loongarch_pmu_event_set_period() 311 hwc->last_period = period; in loongarch_pmu_event_set_period() 316 local64_set(&hwc->period_left, left); in loongarch_pmu_event_set_period() 317 hwc->last_period = period; in loongarch_pmu_event_set_period() 323 local64_set(&hwc->period_left, left); in loongarch_pmu_event_set_period() 326 local64_set(&hwc->prev_count, loongarch_pmu.overflow - left); in loongarch_pmu_event_set_period() [all …]
|
/linux/arch/x86/events/amd/ |
H A D | iommu.c | 208 struct hw_perf_event *hwc = &event->hw; in perf_iommu_event_init() local 226 hwc->conf = event->attr.config; in perf_iommu_event_init() 227 hwc->conf1 = event->attr.config1; in perf_iommu_event_init() 240 struct hw_perf_event *hwc = &ev->hw; in perf_iommu_enable_event() local 241 u8 bank = hwc->iommu_bank; in perf_iommu_enable_event() 242 u8 cntr = hwc->iommu_cntr; in perf_iommu_enable_event() 245 reg = GET_CSOURCE(hwc); in perf_iommu_enable_event() 248 reg = GET_DEVID_MASK(hwc); in perf_iommu_enable_event() 249 reg = GET_DEVID(hwc) | (reg << 32); in perf_iommu_enable_event() 254 reg = GET_PASID_MASK(hwc); in perf_iommu_enable_event() [all …]
|
H A D | ibs.c | 101 perf_event_set_period(struct hw_perf_event *hwc, u64 min, u64 max, u64 *hw_period) in perf_event_set_period() argument 103 s64 left = local64_read(&hwc->period_left); in perf_event_set_period() 104 s64 period = hwc->sample_period; in perf_event_set_period() 112 local64_set(&hwc->period_left, left); in perf_event_set_period() 113 hwc->last_period = period; in perf_event_set_period() 119 local64_set(&hwc->period_left, left); in perf_event_set_period() 120 hwc->last_period = period; in perf_event_set_period() 146 struct hw_perf_event *hwc = &event->hw; in perf_event_try_update() local 158 prev_raw_count = local64_read(&hwc->prev_count); in perf_event_try_update() 159 if (!local64_try_cmpxchg(&hwc->prev_count, in perf_event_try_update() [all …]
|
H A D | power.c | 45 struct hw_perf_event *hwc = &event->hw; in event_update() local 49 prev_pwr_acc = hwc->pwr_acc; in event_update() 50 prev_ptsc = hwc->ptsc; in event_update() 89 struct hw_perf_event *hwc = &event->hw; in pmu_event_stop() local 92 if (!(hwc->state & PERF_HES_STOPPED)) in pmu_event_stop() 93 hwc->state |= PERF_HES_STOPPED; in pmu_event_stop() 96 if ((mode & PERF_EF_UPDATE) && !(hwc->state & PERF_HES_UPTODATE)) { in pmu_event_stop() 102 hwc->state |= PERF_HES_UPTODATE; in pmu_event_stop() 108 struct hw_perf_event *hwc = &event->hw; in pmu_event_add() local 110 hwc->state = PERF_HES_UPTODATE | PERF_HES_STOPPED; in pmu_event_add()
|
/linux/drivers/perf/hisilicon/ |
H A D | hisi_uncore_pmu.c | 172 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_event_init() local 211 hwc->idx = -1; in hisi_uncore_pmu_event_init() 212 hwc->config_base = event->attr.config; in hisi_uncore_pmu_event_init() 231 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_enable_event() local 233 hisi_pmu->ops->write_evtype(hisi_pmu, hwc->idx, in hisi_uncore_pmu_enable_event() 239 hisi_pmu->ops->enable_counter_int(hisi_pmu, hwc); in hisi_uncore_pmu_enable_event() 240 hisi_pmu->ops->enable_counter(hisi_pmu, hwc); in hisi_uncore_pmu_enable_event() 249 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_disable_event() local 251 hisi_pmu->ops->disable_counter(hisi_pmu, hwc); in hisi_uncore_pmu_disable_event() 252 hisi_pmu->ops->disable_counter_int(hisi_pmu, hwc); in hisi_uncore_pmu_disable_event() [all …]
|
H A D | hisi_pcie_pmu.c | 268 struct hw_perf_event *hwc = &event->hw; in hisi_pcie_pmu_config_event_ctrl() local 271 hisi_pcie_pmu_writeq(pcie_pmu, HISI_PCIE_EVENT_CTRL, hwc->idx, reg); in hisi_pcie_pmu_config_event_ctrl() 277 struct hw_perf_event *hwc = &event->hw; in hisi_pcie_pmu_clear_event_ctrl() local 279 hisi_pcie_pmu_writeq(pcie_pmu, HISI_PCIE_EVENT_CTRL, hwc->idx, HISI_PCIE_INIT_SET); in hisi_pcie_pmu_clear_event_ctrl() 384 struct hw_perf_event *hwc = &event->hw; in hisi_pcie_pmu_event_init() local 391 hwc->event_base = HISI_PCIE_EXT_CNT; in hisi_pcie_pmu_event_init() 393 hwc->event_base = HISI_PCIE_CNT; in hisi_pcie_pmu_event_init() 448 struct hw_perf_event *hwc = &event->hw; in hisi_pcie_pmu_event_update() local 452 prev_cnt = local64_read(&hwc->prev_count); in hisi_pcie_pmu_event_update() 454 } while (local64_cmpxchg(&hwc->prev_count, prev_cnt, in hisi_pcie_pmu_event_update() [all …]
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_pmu.c | 211 struct hw_perf_event *hwc = &event->hw; in amdgpu_perf_event_init() local 218 hwc->config = event->attr.config; in amdgpu_perf_event_init() 219 hwc->config_base = AMDGPU_PMU_PERF_TYPE_NONE; in amdgpu_perf_event_init() 227 struct hw_perf_event *hwc = &event->hw; in amdgpu_perf_start() local 233 if (WARN_ON_ONCE(!(hwc->state & PERF_HES_STOPPED))) in amdgpu_perf_start() 240 WARN_ON_ONCE(!(hwc->state & PERF_HES_UPTODATE)); in amdgpu_perf_start() 241 hwc->state = 0; in amdgpu_perf_start() 243 switch (hwc->config_base) { in amdgpu_perf_start() 248 hwc->config, 0 /* unused */, in amdgpu_perf_start() 253 hwc->idx = target_cntr; in amdgpu_perf_start() [all …]
|
/linux/arch/xtensa/kernel/ |
H A D | perf_event.c | 146 struct hw_perf_event *hwc, int idx) in xtensa_perf_event_update() argument 152 prev_raw_count = local64_read(&hwc->prev_count); in xtensa_perf_event_update() 154 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in xtensa_perf_event_update() 160 local64_sub(delta, &hwc->period_left); in xtensa_perf_event_update() 164 struct hw_perf_event *hwc, int idx) in xtensa_perf_event_set_period() argument 172 s64 period = hwc->sample_period; in xtensa_perf_event_set_period() 174 left = local64_read(&hwc->period_left); in xtensa_perf_event_set_period() 177 local64_set(&hwc->period_left, left); in xtensa_perf_event_set_period() 178 hwc->last_period = period; in xtensa_perf_event_set_period() 182 local64_set(&hwc->period_left, left); in xtensa_perf_event_set_period() [all …]
|
/linux/arch/arc/kernel/ |
H A D | perf_event.c | 281 struct hw_perf_event *hwc, int idx) in arc_perf_event_update() argument 283 u64 prev_raw_count = local64_read(&hwc->prev_count); in arc_perf_event_update() 291 local64_set(&hwc->prev_count, new_raw_count); in arc_perf_event_update() 293 local64_sub(delta, &hwc->period_left); in arc_perf_event_update() 331 struct hw_perf_event *hwc = &event->hw; in arc_pmu_event_init() local 335 hwc->sample_period = arc_pmu->max_period; in arc_pmu_event_init() 336 hwc->last_period = hwc->sample_period; in arc_pmu_event_init() 337 local64_set(&hwc->period_left, hwc->sample_period); in arc_pmu_event_init() 340 hwc->config = 0; in arc_pmu_event_init() 345 hwc->config |= ARC_REG_PCT_CONFIG_KERN; in arc_pmu_event_init() [all …]
|
/linux/arch/x86/events/intel/ |
H A D | uncore_nhmex.c | 248 struct hw_perf_event *hwc = &event->hw; in nhmex_uncore_msr_enable_event() local 250 if (hwc->idx == UNCORE_PMC_IDX_FIXED) in nhmex_uncore_msr_enable_event() 251 wrmsrl(hwc->config_base, NHMEX_PMON_CTL_EN_BIT0); in nhmex_uncore_msr_enable_event() 253 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT22); in nhmex_uncore_msr_enable_event() 255 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT0); in nhmex_uncore_msr_enable_event() 353 struct hw_perf_event *hwc = &event->hw; in nhmex_bbox_hw_config() local 354 struct hw_perf_event_extra *reg1 = &hwc->extra_reg; in nhmex_bbox_hw_config() 355 struct hw_perf_event_extra *reg2 = &hwc->branch_reg; in nhmex_bbox_hw_config() 358 ctr = (hwc->config & NHMEX_B_PMON_CTR_MASK) >> in nhmex_bbox_hw_config() 360 ev_sel = (hwc->config & NHMEX_B_PMON_CTL_EV_SEL_MASK) >> in nhmex_bbox_hw_config() [all …]
|
H A D | uncore_discovery.c | 460 struct hw_perf_event *hwc = &event->hw; in intel_generic_uncore_msr_enable_event() local 462 wrmsrl(hwc->config_base, hwc->config); in intel_generic_uncore_msr_enable_event() 468 struct hw_perf_event *hwc = &event->hw; in intel_generic_uncore_msr_disable_event() local 470 wrmsrl(hwc->config_base, 0); in intel_generic_uncore_msr_disable_event() 485 struct hw_perf_event *hwc = &event->hw; in intel_generic_uncore_assign_hw_event() local 492 hwc->config_base = uncore_pci_event_ctl(box, hwc->idx); in intel_generic_uncore_assign_hw_event() 493 hwc->event_base = uncore_pci_perf_ctr(box, hwc->idx); in intel_generic_uncore_assign_hw_event() 503 hwc->config_base = box_ctl + uncore_pci_event_ctl(box, hwc->idx); in intel_generic_uncore_assign_hw_event() 504 hwc->event_base = box_ctl + uncore_pci_perf_ctr(box, hwc->idx); in intel_generic_uncore_assign_hw_event() 508 hwc->config_base = box_ctl + box->pmu->type->event_ctl + hwc->idx; in intel_generic_uncore_assign_hw_event() [all …]
|
H A D | p4.c | 856 static inline int p4_pmu_clear_cccr_ovf(struct hw_perf_event *hwc) in p4_pmu_clear_cccr_ovf() argument 861 rdmsrl(hwc->config_base, v); in p4_pmu_clear_cccr_ovf() 863 wrmsrl(hwc->config_base, v & ~P4_CCCR_OVF); in p4_pmu_clear_cccr_ovf() 874 rdmsrl(hwc->event_base, v); in p4_pmu_clear_cccr_ovf() 906 struct hw_perf_event *hwc = &event->hw; in p4_pmu_disable_event() local 913 (void)wrmsrl_safe(hwc->config_base, in p4_pmu_disable_event() 914 p4_config_unpack_cccr(hwc->config) & ~P4_CCCR_ENABLE & ~P4_CCCR_OVF & ~P4_CCCR_RESERVED); in p4_pmu_disable_event() 952 struct hw_perf_event *hwc = &event->hw; in __p4_pmu_enable_event() local 953 int thread = p4_ht_config_thread(hwc->config); in __p4_pmu_enable_event() 954 u64 escr_conf = p4_config_unpack_escr(p4_clear_ht_bit(hwc->config)); in __p4_pmu_enable_event() [all …]
|
/linux/arch/sh/kernel/ |
H A D | perf_event.c | 103 struct hw_perf_event *hwc = &event->hw; in __hw_perf_event_init() local 153 hwc->config |= config; in __hw_perf_event_init() 159 struct hw_perf_event *hwc, int idx) in sh_perf_event_update() argument 178 prev_raw_count = local64_read(&hwc->prev_count); in sh_perf_event_update() 181 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sh_perf_event_update() 202 struct hw_perf_event *hwc = &event->hw; in sh_pmu_stop() local 203 int idx = hwc->idx; in sh_pmu_stop() 206 sh_pmu->disable(hwc, idx); in sh_pmu_stop() 220 struct hw_perf_event *hwc = &event->hw; in sh_pmu_start() local 221 int idx = hwc->idx; in sh_pmu_start() [all …]
|
/linux/arch/mips/kernel/ |
H A D | perf_event_mipsxx.c | 315 struct hw_perf_event *hwc) in mipsxx_pmu_alloc_counter() argument 325 cntr_mask = (hwc->event_base >> 10) & 0xffff; in mipsxx_pmu_alloc_counter() 327 cntr_mask = (hwc->event_base >> 8) & 0xffff; in mipsxx_pmu_alloc_counter() 410 struct hw_perf_event *hwc, in mipspmu_event_set_period() argument 413 u64 left = local64_read(&hwc->period_left); in mipspmu_event_set_period() 414 u64 period = hwc->sample_period; in mipspmu_event_set_period() 420 local64_set(&hwc->period_left, left); in mipspmu_event_set_period() 421 hwc->last_period = period; in mipspmu_event_set_period() 426 local64_set(&hwc->period_left, left); in mipspmu_event_set_period() 427 hwc->last_period = period; in mipspmu_event_set_period() [all …]
|
/linux/arch/x86/events/ |
H A D | core.c | 119 struct hw_perf_event *hwc = &event->hw; in x86_perf_event_update() local 124 if (unlikely(!hwc->event_base)) in x86_perf_event_update() 134 prev_raw_count = local64_read(&hwc->prev_count); in x86_perf_event_update() 136 rdpmcl(hwc->event_base_rdpmc, new_raw_count); in x86_perf_event_update() 137 } while (!local64_try_cmpxchg(&hwc->prev_count, in x86_perf_event_update() 152 local64_sub(delta, &hwc->period_left); in x86_perf_event_update() 367 set_ext_hw_attr(struct hw_perf_event *hwc, struct perf_event *event) in set_ext_hw_attr() argument 397 hwc->config |= val; in set_ext_hw_attr() 484 struct hw_perf_event *hwc = &event->hw; in x86_setup_perfctr() local 488 hwc->sample_period = x86_pmu.max_period; in x86_setup_perfctr() [all …]
|