Home
last modified time | relevance | path

Searched refs:pmu (Results 1 – 25 of 459) sorted by relevance

12345678910>>...19

/linux/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/
H A Dbase.c32 struct nvkm_pmu *pmu = device->pmu; in nvkm_pmu_fan_controlled() local
37 if (pmu && pmu->func->code.size) in nvkm_pmu_fan_controlled()
48 nvkm_pmu_pgob(struct nvkm_pmu *pmu, bool enable) in nvkm_pmu_pgob() argument
50 if (pmu && pmu->func->pgob) in nvkm_pmu_pgob()
51 pmu->func->pgob(pmu, enable); in nvkm_pmu_pgob()
57 struct nvkm_pmu *pmu = container_of(work, typeof(*pmu), recv.work); in nvkm_pmu_recv() local
58 return pmu->func->recv(pmu); in nvkm_pmu_recv()
62 nvkm_pmu_send(struct nvkm_pmu *pmu, u32 reply[2], in nvkm_pmu_send() argument
65 if (!pmu || !pmu->func->send) in nvkm_pmu_send()
67 return pmu->func->send(pmu, reply, process, message, data0, data1); in nvkm_pmu_send()
[all …]
H A Dgt215.c30 gt215_pmu_send(struct nvkm_pmu *pmu, u32 reply[2], in gt215_pmu_send() argument
33 struct nvkm_subdev *subdev = &pmu->subdev; in gt215_pmu_send()
37 mutex_lock(&pmu->send.mutex); in gt215_pmu_send()
45 mutex_unlock(&pmu->send.mutex); in gt215_pmu_send()
54 pmu->recv.message = message; in gt215_pmu_send()
55 pmu->recv.process = process; in gt215_pmu_send()
65 pmu->send.base)); in gt215_pmu_send()
77 wait_event(pmu->recv.wait, (pmu->recv.process == 0)); in gt215_pmu_send()
78 reply[0] = pmu->recv.data[0]; in gt215_pmu_send()
79 reply[1] = pmu->recv.data[1]; in gt215_pmu_send()
[all …]
H A Dgk20a.c51 gk20a_pmu_dvfs_target(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_target() argument
53 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_target()
59 gk20a_pmu_dvfs_get_cur_state(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_get_cur_state() argument
61 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_cur_state()
67 gk20a_pmu_dvfs_get_target_state(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_target_state() argument
70 struct gk20a_pmu_dvfs_data *data = pmu->data; in gk20a_pmu_dvfs_get_target_state()
71 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_target_state()
86 nvkm_trace(&pmu->base.subdev, "cur level = %d, new level = %d\n", in gk20a_pmu_dvfs_get_target_state()
95 gk20a_pmu_dvfs_get_dev_status(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_dev_status() argument
98 struct nvkm_falcon *falcon = &pmu->base.falcon; in gk20a_pmu_dvfs_get_dev_status()
[all …]
H A Dgm20b.c42 struct nvkm_pmu *pmu = container_of(falcon, typeof(*pmu), falcon); in gm20b_pmu_acr_bootstrap_falcon() local
52 ret = nvkm_falcon_cmdq_send(pmu->hpq, &cmd.cmd.hdr, in gm20b_pmu_acr_bootstrap_falcon()
54 &pmu->subdev, msecs_to_jiffies(1000)); in gm20b_pmu_acr_bootstrap_falcon()
129 struct nvkm_pmu *pmu = priv; in gm20b_pmu_acr_init_wpr_callback() local
130 struct nvkm_subdev *subdev = &pmu->subdev; in gm20b_pmu_acr_init_wpr_callback()
139 complete_all(&pmu->wpr_ready); in gm20b_pmu_acr_init_wpr_callback()
144 gm20b_pmu_acr_init_wpr(struct nvkm_pmu *pmu) in gm20b_pmu_acr_init_wpr() argument
154 return nvkm_falcon_cmdq_send(pmu->hpq, &cmd.cmd.hdr, in gm20b_pmu_acr_init_wpr()
155 gm20b_pmu_acr_init_wpr_callback, pmu, 0); in gm20b_pmu_acr_init_wpr()
159 gm20b_pmu_initmsg(struct nvkm_pmu *pmu) in gm20b_pmu_initmsg() argument
[all …]
H A DKbuild2 nvkm-y += nvkm/subdev/pmu/base.o
3 nvkm-y += nvkm/subdev/pmu/memx.o
4 nvkm-y += nvkm/subdev/pmu/gt215.o
5 nvkm-y += nvkm/subdev/pmu/gf100.o
6 nvkm-y += nvkm/subdev/pmu/gf119.o
7 nvkm-y += nvkm/subdev/pmu/gk104.o
8 nvkm-y += nvkm/subdev/pmu/gk110.o
9 nvkm-y += nvkm/subdev/pmu/gk208.o
10 nvkm-y += nvkm/subdev/pmu/gk20a.o
11 nvkm-y += nvkm/subdev/pmu/gm107.o
[all …]
/linux/drivers/gpu/drm/i915/
H A Di915_pmu.c36 return container_of(event->pmu, struct i915_pmu, base); in event_to_pmu()
39 static struct drm_i915_private *pmu_to_i915(struct i915_pmu *pmu) in pmu_to_i915() argument
41 return container_of(pmu, struct drm_i915_private, pmu); in pmu_to_i915()
152 static bool pmu_needs_timer(struct i915_pmu *pmu) in pmu_needs_timer() argument
154 struct drm_i915_private *i915 = pmu_to_i915(pmu); in pmu_needs_timer()
162 enable = pmu->enable; in pmu_needs_timer()
204 static u64 read_sample(struct i915_pmu *pmu, unsigned int gt_id, int sample) in read_sample() argument
206 return pmu->sample[gt_id][sample].cur; in read_sample()
210 store_sample(struct i915_pmu *pmu, unsigned int gt_id, int sample, u64 val) in store_sample() argument
212 pmu->sample[gt_id][sample].cur = val; in store_sample()
[all …]
/linux/drivers/soc/dove/
H A Dpmu.c50 struct pmu_data *pmu = rcdev_to_pmu(rc); in pmu_reset_reset() local
54 spin_lock_irqsave(&pmu->lock, flags); in pmu_reset_reset()
55 val = readl_relaxed(pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
56 writel_relaxed(val & ~BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
57 writel_relaxed(val | BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset()
58 spin_unlock_irqrestore(&pmu->lock, flags); in pmu_reset_reset()
65 struct pmu_data *pmu = rcdev_to_pmu(rc); in pmu_reset_assert() local
69 spin_lock_irqsave(&pmu->lock, flags); in pmu_reset_assert()
70 val &= readl_relaxed(pmu->pmc_base + PMC_SW_RST); in pmu_reset_assert()
71 writel_relaxed(val, pmu->pmc_base + PMC_SW_RST); in pmu_reset_assert()
[all …]
/linux/drivers/pmdomain/starfive/
H A Djh71xx-pmu.c64 struct jh71xx_pmu *pmu);
81 struct jh71xx_pmu *pmu; member
87 struct jh71xx_pmu *pmu = pmd->pmu; in jh71xx_pmu_get_state() local
92 *is_on = readl(pmu->base + pmu->match_data->pmu_status) & mask; in jh71xx_pmu_get_state()
99 struct jh71xx_pmu *pmu = pmd->pmu; in jh7110_pmu_set_state() local
107 spin_lock_irqsave(&pmu->lock, flags); in jh7110_pmu_set_state()
129 writel(mask, pmu->base + mode); in jh7110_pmu_set_state()
139 writel(JH71XX_PMU_SW_ENCOURAGE_ON, pmu->base + JH71XX_PMU_SW_ENCOURAGE); in jh7110_pmu_set_state()
140 writel(encourage_lo, pmu->base + JH71XX_PMU_SW_ENCOURAGE); in jh7110_pmu_set_state()
141 writel(encourage_hi, pmu->base + JH71XX_PMU_SW_ENCOURAGE); in jh7110_pmu_set_state()
[all …]
/linux/drivers/perf/amlogic/
H A Dmeson_ddr_pmu_core.c21 struct pmu pmu; member
35 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu)
38 static void dmc_pmu_enable(struct ddr_pmu *pmu) in dmc_pmu_enable() argument
40 if (!pmu->pmu_enabled) in dmc_pmu_enable()
41 pmu->info.hw_info->enable(&pmu->info); in dmc_pmu_enable()
43 pmu->pmu_enabled = true; in dmc_pmu_enable()
46 static void dmc_pmu_disable(struct ddr_pmu *pmu) in dmc_pmu_disable() argument
48 if (pmu->pmu_enabled) in dmc_pmu_disable()
49 pmu->info.hw_info->disable(&pmu->info); in dmc_pmu_disable()
51 pmu->pmu_enabled = false; in dmc_pmu_disable()
[all …]
/linux/tools/perf/arch/arm/util/
H A Dpmu.c19 void perf_pmu__arch_init(struct perf_pmu *pmu) in perf_pmu__arch_init() argument
24 if (!strcmp(pmu->name, CORESIGHT_ETM_PMU_NAME)) { in perf_pmu__arch_init()
26 pmu->auxtrace = true; in perf_pmu__arch_init()
27 pmu->selectable = true; in perf_pmu__arch_init()
28 pmu->perf_event_attr_init_default = cs_etm_get_default_config; in perf_pmu__arch_init()
30 } else if (strstarts(pmu->name, ARM_SPE_PMU_NAME)) { in perf_pmu__arch_init()
31 pmu->auxtrace = true; in perf_pmu__arch_init()
32 pmu->selectable = true; in perf_pmu__arch_init()
33 pmu->is_uncore = false; in perf_pmu__arch_init()
34 pmu->perf_event_attr_init_default = arm_spe_pmu_default_config; in perf_pmu__arch_init()
[all …]
/linux/drivers/perf/
H A Darm_pmu_platform.c25 static int probe_current_pmu(struct arm_pmu *pmu, in probe_current_pmu() argument
37 ret = info->init(pmu); in probe_current_pmu()
45 static int pmu_parse_percpu_irq(struct arm_pmu *pmu, int irq) in pmu_parse_percpu_irq() argument
48 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_parse_percpu_irq()
50 ret = irq_get_percpu_devid_partition(irq, &pmu->supported_cpus); in pmu_parse_percpu_irq()
54 for_each_cpu(cpu, &pmu->supported_cpus) in pmu_parse_percpu_irq()
95 static int pmu_parse_irqs(struct arm_pmu *pmu) in pmu_parse_irqs() argument
98 struct platform_device *pdev = pmu->plat_device; in pmu_parse_irqs()
99 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_parse_irqs()
112 pmu->pmu.capabilities |= PERF_PMU_CAP_NO_INTERRUPT; in pmu_parse_irqs()
[all …]
H A Darm_pmu.c185 if (type == event->pmu->type) in armpmu_map_event()
202 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_event_set_period()
244 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_event_update()
274 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_stop()
290 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_start()
315 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_del()
331 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_add()
364 validate_event(struct pmu *pmu, struct pmu_hw_events *hw_events, in validate_event() argument
377 if (event->pmu != pmu) in validate_event()
386 armpmu = to_arm_pmu(event->pmu); in validate_event()
[all …]
H A Darm_pmu_acpi.c252 struct arm_pmu *pmu; in arm_pmu_acpi_find_pmu() local
256 pmu = per_cpu(probed_pmus, cpu); in arm_pmu_acpi_find_pmu()
257 if (!pmu || pmu->acpi_cpuid != cpuid) in arm_pmu_acpi_find_pmu()
260 return pmu; in arm_pmu_acpi_find_pmu()
270 static bool pmu_irq_matches(struct arm_pmu *pmu, int irq) in pmu_irq_matches() argument
272 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_irq_matches()
278 for_each_cpu(cpu, &pmu->supported_cpus) { in pmu_irq_matches()
295 static void arm_pmu_acpi_associate_pmu_cpu(struct arm_pmu *pmu, in arm_pmu_acpi_associate_pmu_cpu() argument
300 per_cpu(probed_pmus, cpu) = pmu; in arm_pmu_acpi_associate_pmu_cpu()
302 if (pmu_irq_matches(pmu, irq)) { in arm_pmu_acpi_associate_pmu_cpu()
[all …]
/linux/arch/x86/kvm/
H A Dpmu.h9 #define vcpu_to_pmu(vcpu) (&(vcpu)->arch.pmu)
10 #define pmu_to_vcpu(pmu) (container_of((pmu), struct kvm_vcpu, arch.pmu)) argument
11 #define pmc_to_pmu(pmc) (&(pmc)->vcpu->arch.pmu)
52 static inline bool kvm_pmu_has_perf_global_ctrl(struct kvm_pmu *pmu) in kvm_pmu_has_perf_global_ctrl() argument
63 return pmu->version > 1; in kvm_pmu_has_perf_global_ctrl()
80 static inline struct kvm_pmc *kvm_pmc_idx_to_pmc(struct kvm_pmu *pmu, int idx) in kvm_pmc_idx_to_pmc() argument
82 if (idx < pmu->nr_arch_gp_counters) in kvm_pmc_idx_to_pmc()
83 return &pmu->gp_counters[idx]; in kvm_pmc_idx_to_pmc()
86 if (idx >= 0 && idx < pmu->nr_arch_fixed_counters) in kvm_pmc_idx_to_pmc()
87 return &pmu->fixed_counters[idx]; in kvm_pmc_idx_to_pmc()
[all …]
/linux/tools/perf/tests/
H A Dpmu-events.c41 struct perf_pmu pmu; member
47 .pmu = "default_core",
59 .pmu = "default_core",
71 .pmu = "default_core",
83 .pmu = "default_core",
95 .pmu = "default_core",
107 .pmu = "default_core",
135 .pmu = "hisi_sccl,ddrc",
149 .pmu = "uncore_cbox",
163 .pmu = "uncore_cbox",
[all …]
/linux/arch/powerpc/perf/
H A Dimc-pmu.c61 return container_of(event->pmu, struct imc_pmu, pmu); in imc_event_to_pmu()
105 struct pmu *pmu = dev_get_drvdata(dev); in imc_pmu_cpumask_get_attr() local
106 struct imc_pmu *imc_pmu = container_of(pmu, struct imc_pmu, pmu); in imc_pmu_cpumask_get_attr()
221 static int update_events_in_group(struct device_node *node, struct imc_pmu *pmu) in update_events_in_group() argument
260 pmu->events = kcalloc(ct, sizeof(struct imc_events), GFP_KERNEL); in update_events_in_group()
261 if (!pmu->events) { in update_events_in_group()
269 ret = imc_parse_event(np, g_scale, g_unit, prefix, base_reg, &pmu->events[ct]); in update_events_in_group()
279 imc_free_events(pmu->events, ct); in update_events_in_group()
294 imc_free_events(pmu->events, ct); in update_events_in_group()
301 ev_val_str = kasprintf(GFP_KERNEL, "event=0x%x", pmu->events[i].value); in update_events_in_group()
[all …]
/linux/tools/perf/pmu-events/
H A DBuild1 pmu-events-y += pmu-events.o
2 JDIR = pmu-events/arch/$(SRCARCH)
5 JDIR_TEST = pmu-events/arch/test
8 JEVENTS_PY = pmu-events/jevents.py
9 METRIC_PY = pmu-events/metric.py
10 METRIC_TEST_PY = pmu-events/metric_test.py
11 EMPTY_PMU_EVENTS_C = pmu-events/empty-pmu-events.c
12 PMU_EVENTS_C = $(OUTPUT)pmu-events/pmu-events.c
13 METRIC_TEST_LOG = $(OUTPUT)pmu-events/metric_test.log
14 TEST_EMPTY_PMU_EVENTS_C = $(OUTPUT)pmu-events/test-empty-pmu-events.c
[all …]
/linux/Documentation/devicetree/bindings/pinctrl/
H A Dmarvell,dove-pinctrl.txt14 Note: pmu* also allows for Power Management functions listed below
18 mpp0 0 gpio, pmu, uart2(rts), sdio0(cd), lcd0(pwm), pmu*
19 mpp1 1 gpio, pmu, uart2(cts), sdio0(wp), lcd1(pwm), pmu*
20 mpp2 2 gpio, pmu, uart2(txd), sdio0(buspwr), sata(prsnt),
21 uart1(rts), pmu*
22 mpp3 3 gpio, pmu, uart2(rxd), sdio0(ledctrl), sata(act),
23 uart1(cts), lcd-spi(cs1), pmu*
24 mpp4 4 gpio, pmu, uart3(rts), sdio1(cd), spi1(miso), pmu*
25 mpp5 5 gpio, pmu, uart3(cts), sdio1(wp), spi1(cs), pmu*
26 mpp6 6 gpio, pmu, uart3(txd), sdio1(buspwr), spi1(mosi), pmu*
[all …]
/linux/arch/x86/events/intel/
H A Duncore.c138 struct intel_uncore_box *uncore_pmu_to_box(struct intel_uncore_pmu *pmu, int cpu) in uncore_pmu_to_box() argument
146 return dieid < uncore_max_dies() ? pmu->boxes[dieid] : NULL; in uncore_pmu_to_box()
389 return &box->pmu->pmu == event->pmu; in is_box_event()
399 max_count = box->pmu->type->num_counters; in uncore_collect_events()
400 if (box->pmu->type->fixed_ctl) in uncore_collect_events()
433 struct intel_uncore_type *type = box->pmu->type; in uncore_get_event_constraint()
458 if (box->pmu->type->ops->put_constraint) in uncore_put_event_constraint()
459 box->pmu->type->ops->put_constraint(box, event); in uncore_put_event_constraint()
697 static int uncore_validate_group(struct intel_uncore_pmu *pmu, in uncore_validate_group() argument
708 fake_box = uncore_alloc_box(pmu->type, NUMA_NO_NODE); in uncore_validate_group()
[all …]
/linux/drivers/pmdomain/rockchip/
H A Dpm-domains.c89 struct rockchip_pmu *pmu; member
221 struct rockchip_pmu *pmu; in rockchip_pmu_block() local
231 pmu = dmc_pmu; in rockchip_pmu_block()
238 mutex_lock(&pmu->mutex); in rockchip_pmu_block()
250 for (i = 0; i < pmu->genpd_data.num_domains; i++) { in rockchip_pmu_block()
251 genpd = pmu->genpd_data.domains[i]; in rockchip_pmu_block()
256 dev_err(pmu->dev, in rockchip_pmu_block()
268 genpd = pmu->genpd_data.domains[i]; in rockchip_pmu_block()
274 mutex_unlock(&pmu->mutex); in rockchip_pmu_block()
284 struct rockchip_pmu *pmu; in rockchip_pmu_unblock() local
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/devinit/
H A Dgm200.c33 pmu_code(struct nv50_devinit *init, u32 pmu, u32 img, u32 len, bool sec) in pmu_code() argument
39 nvkm_wr32(device, 0x10a180, 0x01000000 | (sec ? 0x10000000 : 0) | pmu); in pmu_code()
42 nvkm_wr32(device, 0x10a188, (pmu + i) >> 8); in pmu_code()
53 pmu_data(struct nv50_devinit *init, u32 pmu, u32 img, u32 len) in pmu_data() argument
59 nvkm_wr32(device, 0x10a1c0, 0x01000000 | pmu); in pmu_data()
88 struct nvbios_pmuR pmu; in pmu_load() local
91 if (!nvbios_pmuRm(bios, type, &pmu)) in pmu_load()
94 if (!post || !subdev->device->pmu) in pmu_load()
97 ret = nvkm_falcon_reset(&subdev->device->pmu->falcon); in pmu_load()
101 pmu_code(init, pmu.boot_addr_pmu, pmu.boot_addr, pmu.boot_size, false); in pmu_load()
[all …]
/linux/tools/perf/util/
H A Dhwmon_pmu.c105 struct perf_pmu pmu; member
141 bool perf_pmu__is_hwmon(const struct perf_pmu *pmu) in perf_pmu__is_hwmon() argument
143 return pmu && pmu->type >= PERF_PMU_TYPE_HWMON_START && in perf_pmu__is_hwmon()
144 pmu->type <= PERF_PMU_TYPE_HWMON_END; in perf_pmu__is_hwmon()
149 return perf_pmu__is_hwmon(evsel->pmu); in evsel__is_hwmon()
250 static int hwmon_pmu__read_events(struct hwmon_pmu *pmu) in hwmon_pmu__read_events() argument
258 if (pmu->pmu.sysfs_aliases_loaded) in hwmon_pmu__read_events()
265 dup_fd = openat(pmu->hwmon_dir_fd, ".", O_DIRECTORY); in hwmon_pmu__read_events()
293 if (!hashmap__find(&pmu->events, key.type_and_num, &value)) { in hwmon_pmu__read_events()
299 err = hashmap__add(&pmu->events, key.type_and_num, value); in hwmon_pmu__read_events()
[all …]
/linux/drivers/regulator/
H A Dbcm590xx-regulator.c278 struct bcm590xx_reg *pmu; in bcm590xx_probe() local
284 pmu = devm_kzalloc(&pdev->dev, sizeof(*pmu), GFP_KERNEL); in bcm590xx_probe()
285 if (!pmu) in bcm590xx_probe()
288 pmu->mfd = bcm590xx; in bcm590xx_probe()
290 platform_set_drvdata(pdev, pmu); in bcm590xx_probe()
292 pmu->desc = devm_kcalloc(&pdev->dev, in bcm590xx_probe()
296 if (!pmu->desc) in bcm590xx_probe()
303 pmu->desc[i].name = info->name; in bcm590xx_probe()
304 pmu->desc[i].of_match = of_match_ptr(info->name); in bcm590xx_probe()
305 pmu->desc[i].regulators_node = of_match_ptr("regulators"); in bcm590xx_probe()
[all …]
/linux/drivers/cxl/core/
H A Dpmu.c14 struct cxl_pmu *pmu = to_cxl_pmu(dev); in cxl_pmu_release() local
16 kfree(pmu); in cxl_pmu_release()
32 struct cxl_pmu *pmu; in devm_cxl_pmu_add() local
36 pmu = kzalloc(sizeof(*pmu), GFP_KERNEL); in devm_cxl_pmu_add()
37 if (!pmu) in devm_cxl_pmu_add()
40 pmu->assoc_id = assoc_id; in devm_cxl_pmu_add()
41 pmu->index = index; in devm_cxl_pmu_add()
42 pmu->type = type; in devm_cxl_pmu_add()
43 pmu->base = regs->pmu; in devm_cxl_pmu_add()
44 dev = &pmu->dev; in devm_cxl_pmu_add()
[all …]
/linux/drivers/dma/idxd/
H A Dperfmon.h22 struct pmu *pmu; in event_to_pmu() local
24 pmu = event->pmu; in event_to_pmu()
25 idxd_pmu = container_of(pmu, struct idxd_pmu, pmu); in event_to_pmu()
33 struct pmu *pmu; in event_to_idxd() local
35 pmu = event->pmu; in event_to_idxd()
36 idxd_pmu = container_of(pmu, struct idxd_pmu, pmu); in event_to_idxd()
41 static inline struct idxd_device *pmu_to_idxd(struct pmu *pmu) in pmu_to_idxd() argument
45 idxd_pmu = container_of(pmu, struct idxd_pmu, pmu); in pmu_to_idxd()

12345678910>>...19