/linux/include/linux/ |
H A D | math64.h | 163 #ifndef mul_u64_u32_shr 164 static __always_inline u64 mul_u64_u32_shr(u64 a, u32 mul, unsigned int shift) in mul_u64_u32_shr() function 179 #ifndef mul_u64_u32_shr 180 static __always_inline u64 mul_u64_u32_shr(u64 a, u32 mul, unsigned int shift) in mul_u64_u32_shr() function
|
/linux/drivers/gpu/drm/i915/ |
H A D | i915_hwmon.c | 112 return mul_u64_u32_shr(reg_value, scale_factor, nshift); in hwm_field_read_and_scale() 161 *energy = mul_u64_u32_shr(ei->accum_energy, SF_ENERGY, in hwm_energy() 192 out = mul_u64_u32_shr(tau4, SF_TIME, hwmon->scl_shift_time + x_w); in hwm_power1_max_interval_show() 227 max_win = mul_u64_u32_shr(tau4, SF_TIME, hwmon->scl_shift_time + x_w); in hwm_power1_max_interval_store() 431 min = mul_u64_u32_shr(min, SF_POWER, hwmon->scl_shift_power); in hwm_power_max_read() 433 max = mul_u64_u32_shr(max, SF_POWER, hwmon->scl_shift_power); in hwm_power_max_read() 520 *val = mul_u64_u32_shr(REG_FIELD_GET(POWER_SETUP_I1_DATA_MASK, uval), in hwm_power_read() 637 *val = mul_u64_u32_shr(REG_FIELD_GET(POWER_SETUP_I1_DATA_MASK, uval), in hwm_curr_read()
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_hwmon.c | 313 *value = mul_u64_u32_shr(reg_val, SF_POWER, hwmon->scl_shift_power); in xe_hwmon_power_max_read() 320 min = mul_u64_u32_shr(min, SF_POWER, hwmon->scl_shift_power); in xe_hwmon_power_max_read() 321 max = mul_u64_u32_shr(max, SF_POWER, hwmon->scl_shift_power); in xe_hwmon_power_max_read() 425 *value = mul_u64_u32_shr(reg_val, SF_POWER, hwmon->scl_shift_power); in xe_hwmon_power_rated_max_read() 486 *energy = mul_u64_u32_shr(ei->accum_energy, SF_ENERGY, in xe_hwmon_energy_get() 539 out = mul_u64_u32_shr(tau4, SF_TIME, hwmon->scl_shift_time + x_w); in xe_hwmon_power_max_interval_show() 580 max_win = mul_u64_u32_shr(tau4, SF_TIME, hwmon->scl_shift_time + x_w); in xe_hwmon_power_max_interval_store() 745 *value = mul_u64_u32_shr(REG_FIELD_GET(POWER_SETUP_I1_DATA_MASK, uval), in xe_hwmon_power_curr_crit_read()
|
/linux/arch/x86/kernel/cpu/ |
H A D | vmware.c | 168 ns = mul_u64_u32_shr(rdtsc(), vmware_cyc2ns.cyc2ns_mul, in vmware_sched_clock() 181 d->cyc2ns_offset = mul_u64_u32_shr(tsc_now, d->cyc2ns_mul, in vmware_cyc2ns_setup() 250 return mul_u64_u32_shr(clock, vmware_cyc2ns.cyc2ns_mul, in vmware_steal_clock()
|
/linux/kernel/sched/ |
H A D | pelt.c | 54 val = mul_u64_u32_shr(val, runnable_avg_yN_inv[local_n], 32); in decay_load()
|
H A D | fair.c | 284 return mul_u64_u32_shr(delta_exec, fact, shift); in __calc_delta()
|
/linux/drivers/iio/accel/ |
H A D | adxl355_core.c | 281 odr = mul_u64_u32_shr(adxl355_odr_table[data->odr][0], MEGA, 0) + in adxl355_fill_3db_frequency_table() 286 div = div64_u64_rem(mul_u64_u32_shr(odr, multiplier, 0), in adxl355_fill_3db_frequency_table()
|
/linux/drivers/perf/ |
H A D | riscv_pmu.c | 64 ns = mul_u64_u32_shr(rd->epoch_cyc, rd->mult, rd->shift); in arch_perf_update_userpage()
|
H A D | arm_pmuv3.c | 1592 ns = mul_u64_u32_shr(rd->epoch_cyc, rd->mult, rd->shift); in device_initcall()
|
/linux/tools/lib/perf/ |
H A D | mmap.c | 529 delta = time_offset + mul_u64_u32_shr(cyc, time_mult, time_shift); in perf_mmap__read_self()
|
/linux/drivers/regulator/ |
H A D | max5970-regulator.c | 93 *val = mul_u64_u32_shr(*val, ddata->mon_rng, 10); in max5970_read()
|
/linux/kernel/time/ |
H A D | clocksource.c | 32 return mul_u64_u32_shr(delta, cs->mult, cs->shift); in cycles_to_nsec_safe()
|