| /linux/drivers/clk/mmp/ |
| H A D | clk-frac.c | 37 do_div(rate, d->numerator * factor->masks->factor); in clk_factor_determine_rate() 56 struct mmp_clk_factor_masks *masks = factor->masks; in clk_factor_recalc_rate() local 64 d.numerator = (val >> masks->num_shift) & masks->num_mask; in clk_factor_recalc_rate() 67 d.denominator = (val >> masks->den_shift) & masks->den_mask; in clk_factor_recalc_rate() 72 do_div(rate, d.numerator * factor->masks->factor); in clk_factor_recalc_rate() 82 struct mmp_clk_factor_masks *masks = factor->masks; in clk_factor_set_rate() local 93 do_div(rate, d->numerator * factor->masks->factor); in clk_factor_set_rate() 104 val &= ~(masks->num_mask << masks->num_shift); in clk_factor_set_rate() 105 val |= (d->numerator & masks->num_mask) << masks->num_shift; in clk_factor_set_rate() 107 val &= ~(masks->den_mask << masks->den_shift); in clk_factor_set_rate() [all …]
|
| /linux/lib/ |
| H A D | group_cpus.c | 47 cpumask_var_t *masks; in alloc_node_to_cpumask() local 50 masks = kcalloc(nr_node_ids, sizeof(cpumask_var_t), GFP_KERNEL); in alloc_node_to_cpumask() 51 if (!masks) in alloc_node_to_cpumask() 55 if (!zalloc_cpumask_var(&masks[node], GFP_KERNEL)) in alloc_node_to_cpumask() 59 return masks; in alloc_node_to_cpumask() 63 free_cpumask_var(masks[node]); in alloc_node_to_cpumask() 64 kfree(masks); in alloc_node_to_cpumask() 68 static void free_node_to_cpumask(cpumask_var_t *masks) in free_node_to_cpumask() argument 73 free_cpumask_var(masks[node]); in free_node_to_cpumask() 74 kfree(masks); in free_node_to_cpumask() [all …]
|
| /linux/drivers/clk/spear/ |
| H A D | clk-aux-synth.c | 79 eqn = (val >> aux->masks->eq_sel_shift) & aux->masks->eq_sel_mask; in clk_aux_recalc_rate() 80 if (eqn == aux->masks->eq1_mask) in clk_aux_recalc_rate() 84 num = (val >> aux->masks->xscale_sel_shift) & in clk_aux_recalc_rate() 85 aux->masks->xscale_sel_mask; in clk_aux_recalc_rate() 88 den *= (val >> aux->masks->yscale_sel_shift) & in clk_aux_recalc_rate() 89 aux->masks->yscale_sel_mask; in clk_aux_recalc_rate() 113 ~(aux->masks->eq_sel_mask << aux->masks->eq_sel_shift); in clk_aux_set_rate() 114 val |= (rtbl[i].eq & aux->masks->eq_sel_mask) << in clk_aux_set_rate() 115 aux->masks->eq_sel_shift; in clk_aux_set_rate() 116 val &= ~(aux->masks->xscale_sel_mask << aux->masks->xscale_sel_shift); in clk_aux_set_rate() [all …]
|
| /linux/kernel/irq/ |
| H A D | affinity.c | 29 struct irq_affinity_desc *masks = NULL; in irq_create_affinity_masks() local 59 masks = kcalloc(nvecs, sizeof(*masks), GFP_KERNEL); in irq_create_affinity_masks() 60 if (!masks) in irq_create_affinity_masks() 65 cpumask_copy(&masks[curvec].mask, irq_default_affinity); in irq_create_affinity_masks() 76 kfree(masks); in irq_create_affinity_masks() 81 cpumask_copy(&masks[curvec + j].mask, &result[j]); in irq_create_affinity_masks() 94 cpumask_copy(&masks[curvec].mask, irq_default_affinity); in irq_create_affinity_masks() 98 masks[i].is_managed = 1; in irq_create_affinity_masks() 100 return masks; in irq_create_affinity_masks()
|
| /linux/drivers/net/dsa/microchip/ |
| H A D | ksz8.c | 287 const u32 *masks; in ksz8_r_mib_cnt() local 294 masks = dev->info->masks; in ksz8_r_mib_cnt() 309 if (check & masks[MIB_COUNTER_VALID]) { in ksz8_r_mib_cnt() 311 if (check & masks[MIB_COUNTER_OVERFLOW]) in ksz8_r_mib_cnt() 323 const u32 *masks; in ksz8795_r_mib_pkt() local 330 masks = dev->info->masks; in ksz8795_r_mib_pkt() 347 if (check & masks[MIB_COUNTER_VALID]) { in ksz8795_r_mib_pkt() 356 if (check & masks[MIB_COUNTER_OVERFLOW]) { in ksz8795_r_mib_pkt() 362 if (check & masks[MIB_COUNTER_OVERFLOW]) in ksz8795_r_mib_pkt() 512 const u32 *masks; in ksz8_valid_dyn_entry() local [all …]
|
| /linux/drivers/gpu/drm/amd/display/dc/dpp/dcn10/ |
| H A D | dcn10_dpp_cm.c | 119 gam_regs.masks.csc_c11 = dpp->tf_mask->CM_GAMUT_REMAP_C11; in program_gamut_remap() 121 gam_regs.masks.csc_c12 = dpp->tf_mask->CM_GAMUT_REMAP_C12; in program_gamut_remap() 197 gam_regs.masks.csc_c11 = dpp->tf_mask->CM_GAMUT_REMAP_C11; in read_gamut_remap() 199 gam_regs.masks.csc_c12 = dpp->tf_mask->CM_GAMUT_REMAP_C12; in read_gamut_remap() 282 gam_regs.masks.csc_c11 = dpp->tf_mask->CM_OCSC_C11; in dpp1_cm_program_color_matrix() 284 gam_regs.masks.csc_c12 = dpp->tf_mask->CM_OCSC_C12; in dpp1_cm_program_color_matrix() 329 reg->masks.exp_region0_lut_offset = dpp->tf_mask->CM_RGAM_RAMA_EXP_REGION0_LUT_OFFSET; in dpp1_cm_get_reg_field() 331 reg->masks.exp_region0_num_segments = dpp->tf_mask->CM_RGAM_RAMA_EXP_REGION0_NUM_SEGMENTS; in dpp1_cm_get_reg_field() 333 reg->masks.exp_region1_lut_offset = dpp->tf_mask->CM_RGAM_RAMA_EXP_REGION1_LUT_OFFSET; in dpp1_cm_get_reg_field() 335 reg->masks.exp_region1_num_segments = dpp->tf_mask->CM_RGAM_RAMA_EXP_REGION1_NUM_SEGMENTS; in dpp1_cm_get_reg_field() [all …]
|
| /linux/drivers/gpu/drm/amd/display/dc/dpp/dcn30/ |
| H A D | dcn30_dpp_cm.c | 174 reg->masks.field_region_start_base = dpp->tf_mask->CM_GAMCOR_RAMA_EXP_REGION_START_BASE_B; in dpp3_gamcor_reg_field() 176 reg->masks.field_offset = dpp->tf_mask->CM_GAMCOR_RAMA_OFFSET_B; in dpp3_gamcor_reg_field() 179 reg->masks.exp_region0_lut_offset = dpp->tf_mask->CM_GAMCOR_RAMA_EXP_REGION0_LUT_OFFSET; in dpp3_gamcor_reg_field() 181 reg->masks.exp_region0_num_segments = dpp->tf_mask->CM_GAMCOR_RAMA_EXP_REGION0_NUM_SEGMENTS; in dpp3_gamcor_reg_field() 183 reg->masks.exp_region1_lut_offset = dpp->tf_mask->CM_GAMCOR_RAMA_EXP_REGION1_LUT_OFFSET; in dpp3_gamcor_reg_field() 185 reg->masks.exp_region1_num_segments = dpp->tf_mask->CM_GAMCOR_RAMA_EXP_REGION1_NUM_SEGMENTS; in dpp3_gamcor_reg_field() 188 reg->masks.field_region_end = dpp->tf_mask->CM_GAMCOR_RAMA_EXP_REGION_END_B; in dpp3_gamcor_reg_field() 190 reg->masks.field_region_end_slope = dpp->tf_mask->CM_GAMCOR_RAMA_EXP_REGION_END_SLOPE_B; in dpp3_gamcor_reg_field() 192 reg->masks.field_region_end_base = dpp->tf_mask->CM_GAMCOR_RAMA_EXP_REGION_END_BASE_B; in dpp3_gamcor_reg_field() 194 reg->masks.field_region_linear_slope = dpp->tf_mask->CM_GAMCOR_RAMA_EXP_REGION_START_SLOPE_B; in dpp3_gamcor_reg_field() [all …]
|
| /linux/drivers/gpu/drm/amd/display/dc/dwb/dcn30/ |
| H A D | dcn30_dwb_cm.c | 53 reg->masks.field_region_start_base = dwbc30->dwbc_mask->DWB_OGAM_RAMA_EXP_REGION_START_BASE_B; in dwb3_get_reg_field_ogam() 55 reg->masks.field_offset = dwbc30->dwbc_mask->DWB_OGAM_RAMA_OFFSET_B; in dwb3_get_reg_field_ogam() 58 reg->masks.exp_region0_lut_offset = dwbc30->dwbc_mask->DWB_OGAM_RAMA_EXP_REGION0_LUT_OFFSET; in dwb3_get_reg_field_ogam() 60 reg->masks.exp_region0_num_segments = dwbc30->dwbc_mask->DWB_OGAM_RAMA_EXP_REGION0_NUM_SEGMENTS; in dwb3_get_reg_field_ogam() 62 reg->masks.exp_region1_lut_offset = dwbc30->dwbc_mask->DWB_OGAM_RAMA_EXP_REGION1_LUT_OFFSET; in dwb3_get_reg_field_ogam() 64 reg->masks.exp_region1_num_segments = dwbc30->dwbc_mask->DWB_OGAM_RAMA_EXP_REGION1_NUM_SEGMENTS; in dwb3_get_reg_field_ogam() 67 reg->masks.field_region_end = dwbc30->dwbc_mask->DWB_OGAM_RAMA_EXP_REGION_END_B; in dwb3_get_reg_field_ogam() 69 reg->masks.field_region_end_slope = dwbc30->dwbc_mask->DWB_OGAM_RAMA_EXP_REGION_END_SLOPE_B; in dwb3_get_reg_field_ogam() 71 reg->masks.field_region_end_base = dwbc30->dwbc_mask->DWB_OGAM_RAMA_EXP_REGION_END_BASE_B; in dwb3_get_reg_field_ogam() 73 reg->masks.field_region_linear_slope = dwbc30->dwbc_mask->DWB_OGAM_RAMA_EXP_REGION_START_SLOPE_B; in dwb3_get_reg_field_ogam() [all …]
|
| /linux/drivers/virtio/ |
| H A D | virtio_vdpa.c | 277 struct cpumask *masks = NULL; in create_affinity_masks() local 290 masks = kcalloc(nvecs, sizeof(*masks), GFP_KERNEL); in create_affinity_masks() 291 if (!masks) in create_affinity_masks() 296 cpumask_setall(&masks[curvec]); in create_affinity_masks() 305 kfree(masks); in create_affinity_masks() 310 cpumask_copy(&masks[curvec + j], &result[j]); in create_affinity_masks() 323 cpumask_setall(&masks[curvec]); in create_affinity_masks() 325 return masks; in create_affinity_masks() 336 struct cpumask *masks; in virtio_vdpa_find_vqs() local 342 masks = create_affinity_masks(nvqs, desc); in virtio_vdpa_find_vqs() [all …]
|
| /linux/block/ |
| H A D | blk-mq-cpumap.c | 61 const struct cpumask *masks; in blk_mq_map_queues() local 64 masks = group_cpus_evenly(qmap->nr_queues, &nr_masks); in blk_mq_map_queues() 65 if (!masks) { in blk_mq_map_queues() 72 for_each_cpu(cpu, &masks[queue % nr_masks]) in blk_mq_map_queues() 75 kfree(masks); in blk_mq_map_queues()
|
| /linux/drivers/clk/uniphier/ |
| H A D | clk-uniphier-mux.c | 17 const unsigned int *masks; member 27 return regmap_write_bits(mux->regmap, mux->reg, mux->masks[index], in uniphier_clk_mux_set_parent() 44 if ((mux->masks[i] & val) == mux->vals[i]) in uniphier_clk_mux_get_parent() 77 mux->masks = data->masks; in uniphier_clk_register_mux()
|
| /linux/kernel/bpf/ |
| H A D | liveness.c | 303 struct per_frame_masks *masks; in mark_stack_read() local 306 masks = alloc_frame_masks(env, instance, frame, insn_idx); in mark_stack_read() 307 if (IS_ERR(masks)) in mark_stack_read() 308 return PTR_ERR(masks); in mark_stack_read() 309 new_may_read = masks->may_read | mask; in mark_stack_read() 310 if (new_may_read != masks->may_read && in mark_stack_read() 311 ((new_may_read | masks->live_before) != masks->live_before)) in mark_stack_read() 313 masks->may_read |= mask; in mark_stack_read() 360 struct per_frame_masks *masks; in commit_stack_write_marks() local 373 masks = alloc_frame_masks(env, instance, frame, liveness->write_insn_idx); in commit_stack_write_marks() [all …]
|
| /linux/drivers/net/ethernet/intel/ice/ |
| H A D | ice_flex_pipe.c | 1169 for (i = hw->blk[blk].masks.first; i < hw->blk[blk].masks.first + in ice_prof_has_mask_idx() 1170 hw->blk[blk].masks.count; i++) in ice_prof_has_mask_idx() 1172 if (hw->blk[blk].masks.masks[i].in_use && in ice_prof_has_mask_idx() 1173 hw->blk[blk].masks.masks[i].idx == idx) { in ice_prof_has_mask_idx() 1175 if (hw->blk[blk].masks.masks[i].mask == mask) in ice_prof_has_mask_idx() 1199 ice_prof_has_mask(struct ice_hw *hw, enum ice_block blk, u8 prof, u16 *masks) in ice_prof_has_mask() argument 1205 if (!ice_prof_has_mask_idx(hw, blk, prof, i, masks[i])) in ice_prof_has_mask() 1222 struct ice_fv_word *fv, u16 *masks, bool symm, in ice_find_prof_id_with_mask() argument 1244 if (masks && !ice_prof_has_mask(hw, blk, i, masks)) in ice_find_prof_id_with_mask() 1473 mutex_init(&hw->blk[blk].masks.lock); in ice_init_prof_masks() [all …]
|
| /linux/drivers/gpu/drm/amd/display/dc/mpc/dcn20/ |
| H A D | dcn20_mpc.c | 165 ocsc_regs.masks.csc_c11 = mpc20->mpc_mask->MPC_OCSC_C11_A; in mpc2_set_output_csc() 167 ocsc_regs.masks.csc_c12 = mpc20->mpc_mask->MPC_OCSC_C12_A; in mpc2_set_output_csc() 223 ocsc_regs.masks.csc_c11 = mpc20->mpc_mask->MPC_OCSC_C11_A; in mpc2_set_ocsc_default() 225 ocsc_regs.masks.csc_c12 = mpc20->mpc_mask->MPC_OCSC_C12_A; in mpc2_set_ocsc_default() 251 reg->masks.exp_region0_lut_offset = mpc20->mpc_mask->MPCC_OGAM_RAMA_EXP_REGION0_LUT_OFFSET; in mpc2_ogam_get_reg_field() 253 reg->masks.exp_region0_num_segments = mpc20->mpc_mask->MPCC_OGAM_RAMA_EXP_REGION0_NUM_SEGMENTS; in mpc2_ogam_get_reg_field() 255 reg->masks.exp_region1_lut_offset = mpc20->mpc_mask->MPCC_OGAM_RAMA_EXP_REGION1_LUT_OFFSET; in mpc2_ogam_get_reg_field() 257 reg->masks.exp_region1_num_segments = mpc20->mpc_mask->MPCC_OGAM_RAMA_EXP_REGION1_NUM_SEGMENTS; in mpc2_ogam_get_reg_field() 259 reg->masks.field_region_end = mpc20->mpc_mask->MPCC_OGAM_RAMA_EXP_REGION_END_B; in mpc2_ogam_get_reg_field() 261 reg->masks.field_region_end_slope = mpc20->mpc_mask->MPCC_OGAM_RAMA_EXP_REGION_END_SLOPE_B; in mpc2_ogam_get_reg_field() [all …]
|
| /linux/Documentation/devicetree/bindings/sound/ |
| H A D | tdm-slot.txt | 20 tx and rx masks. 22 For snd_soc_of_xlate_tdm_slot_mask(), the tx and rx masks will use a 1 bit 24 the masks. 26 The explicit masks are given as array of integers, where the first
|
| /linux/kernel/sched/ |
| H A D | topology.c | 2016 struct cpumask ***masks; in sched_init_numa() local 2058 masks = kzalloc(sizeof(void *) * nr_levels, GFP_KERNEL); in sched_init_numa() 2059 if (!masks) in sched_init_numa() 2067 masks[i] = kzalloc(nr_node_ids * sizeof(void *), GFP_KERNEL); in sched_init_numa() 2068 if (!masks[i]) in sched_init_numa() 2078 masks[i][j] = mask; in sched_init_numa() 2094 rcu_assign_pointer(sched_domains_numa_masks, masks); in sched_init_numa() 2135 struct cpumask ***masks; in sched_reset_numa() local 2147 masks = sched_domains_numa_masks; in sched_reset_numa() 2149 if (distances || masks) { in sched_reset_numa() [all …]
|
| /linux/scripts/ |
| H A D | gfp-translate | 81 static const char *masks[] = { 104 (i < ___GFP_LAST_BIT && masks[i]) ? 105 masks[i] : "*** INVALID ***",
|
| /linux/drivers/gpu/drm/amd/display/dc/hubbub/dcn301/ |
| H A D | dcn301_hubbub.c | 38 hubbub1->shifts->field_name, hubbub1->masks->field_name 48 hubbub1->shifts->field_name, hubbub1->masks->field_name 80 hubbub3->masks = hubbub_mask; in hubbub301_construct()
|
| /linux/drivers/clk/starfive/ |
| H A D | clk-starfive-jh7110-pll.c | 103 } masks; member 122 .masks = { \ 285 ret->dacpd = (val & info->masks.dacpd) >> info->shifts.dacpd; in jh7110_pll_regvals_get() 286 ret->dsmpd = (val & info->masks.dsmpd) >> info->shifts.dsmpd; in jh7110_pll_regvals_get() 289 ret->fbdiv = (val & info->masks.fbdiv) >> info->shifts.fbdiv; in jh7110_pll_regvals_get() 379 regmap_update_bits(priv->regmap, info->offsets.pd, info->masks.dacpd, in jh7110_pll_set_rate() 381 regmap_update_bits(priv->regmap, info->offsets.pd, info->masks.dsmpd, in jh7110_pll_set_rate() 385 regmap_update_bits(priv->regmap, info->offsets.fbdiv, info->masks.fbdiv, in jh7110_pll_set_rate()
|
| /linux/drivers/pci/msi/ |
| H A D | msi.c | 286 struct irq_affinity_desc *masks) in msi_setup_msi_desc() argument 307 desc.affinity = masks; in msi_setup_msi_desc() 338 static int __msi_capability_init(struct pci_dev *dev, int nvec, struct irq_affinity_desc *masks) in __msi_capability_init() argument 340 int ret = msi_setup_msi_desc(dev, nvec, masks); in __msi_capability_init() 404 struct irq_affinity_desc *masks __free(kfree) = in msi_capability_init() 408 return __msi_capability_init(dev, nvec, masks); in msi_capability_init() 623 int nvec, struct irq_affinity_desc *masks) in msix_setup_msi_descs() argument 631 for (i = 0, curmsk = masks; i < nvec; i++, curmsk++) { in msix_setup_msi_descs() 633 desc.affinity = masks ? curmsk : NULL; in msix_setup_msi_descs() 669 int nvec, struct irq_affinity_desc *masks) in __msix_setup_interrupts() argument [all …]
|
| /linux/drivers/gpu/drm/amd/display/dc/hubbub/dcn201/ |
| H A D | dcn201_hubbub.c | 41 hubbub1->shifts->field_name, hubbub1->masks->field_name 51 hubbub1->shifts->field_name, hubbub1->masks->field_name 103 hubbub->masks = hubbub_mask; in hubbub201_construct()
|
| /linux/tools/perf/trace/beauty/ |
| H A D | prctl.c | 65 const u8 masks[] = { in syscall_arg__scnprintf_prctl_option() local 78 if (option < ARRAY_SIZE(masks)) in syscall_arg__scnprintf_prctl_option() 79 arg->mask |= masks[option]; in syscall_arg__scnprintf_prctl_option()
|
| /linux/drivers/gpu/drm/amd/display/dc/dce/ |
| H A D | dce_i2c_hw.h | 302 const struct dce_i2c_mask *masks; member 311 const struct dce_i2c_mask *masks); 319 const struct dce_i2c_mask *masks); 327 const struct dce_i2c_mask *masks); 335 const struct dce_i2c_mask *masks); 343 const struct dce_i2c_mask *masks);
|
| /linux/drivers/gpu/drm/amd/display/dc/dpp/dcn20/ |
| H A D | dcn20_dpp_cm.c | 190 gam_regs.masks.csc_c11 = dpp->tf_mask->CM_GAMUT_REMAP_C11; in program_gamut_remap() 192 gam_regs.masks.csc_c12 = dpp->tf_mask->CM_GAMUT_REMAP_C12; in program_gamut_remap() 251 gam_regs.masks.csc_c11 = dpp->tf_mask->CM_GAMUT_REMAP_C11; in read_gamut_remap() 253 gam_regs.masks.csc_c12 = dpp->tf_mask->CM_GAMUT_REMAP_C12; in read_gamut_remap() 340 icsc_regs.masks.csc_c11 = dpp->tf_mask->CM_ICSC_C11; in dpp2_program_input_csc() 342 icsc_regs.masks.csc_c12 = dpp->tf_mask->CM_ICSC_C12; in dpp2_program_input_csc() 418 reg->masks.exp_region0_lut_offset = dpp->tf_mask->CM_BLNDGAM_RAMA_EXP_REGION0_LUT_OFFSET; in dcn20_dpp_cm_get_reg_field() 420 reg->masks.exp_region0_num_segments = dpp->tf_mask->CM_BLNDGAM_RAMA_EXP_REGION0_NUM_SEGMENTS; in dcn20_dpp_cm_get_reg_field() 422 reg->masks.exp_region1_lut_offset = dpp->tf_mask->CM_BLNDGAM_RAMA_EXP_REGION1_LUT_OFFSET; in dcn20_dpp_cm_get_reg_field() 424 reg->masks.exp_region1_num_segments = dpp->tf_mask->CM_BLNDGAM_RAMA_EXP_REGION1_NUM_SEGMENTS; in dcn20_dpp_cm_get_reg_field() [all …]
|
| /linux/drivers/edac/ |
| H A D | dmc520_edac.c | 173 int masks[NUMBER_OF_IRQS]; member 436 mask = pvt->masks[idx]; in dmc520_isr() 477 int masks[NUMBER_OF_IRQS] = { 0 }; in dmc520_edac_probe() local 493 masks[idx] = dmc520_irq_configs[idx].mask; in dmc520_edac_probe() 531 memcpy(pvt->masks, masks, sizeof(masks)); in dmc520_edac_probe() 620 irq_mask_all |= pvt->masks[idx]; in dmc520_edac_remove()
|