/linux/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_hw_csr.h | 21 #define HINIC_CSR_DMA_ATTR_ADDR(idx) \ argument 22 (HINIC_DMA_ATTR_BASE + (idx) * HINIC_DMA_ATTR_STRIDE) 26 #define HINIC_CSR_PPF_ELECTION_ADDR(idx) \ argument 27 (HINIC_ELECTION_BASE + (idx) * HINIC_PPF_ELECTION_STRIDE) 34 #define HINIC_CSR_API_CMD_CHAIN_HEAD_HI_ADDR(idx) \ argument 35 (HINIC_CSR_API_CMD_BASE + 0x0 + (idx) * HINIC_CSR_API_CMD_STRIDE) 37 #define HINIC_CSR_API_CMD_CHAIN_HEAD_LO_ADDR(idx) \ argument 38 (HINIC_CSR_API_CMD_BASE + 0x4 + (idx) * HINIC_CSR_API_CMD_STRIDE) 40 #define HINIC_CSR_API_CMD_STATUS_HI_ADDR(idx) \ argument 41 (HINIC_CSR_API_CMD_BASE + 0x8 + (idx) * HINIC_CSR_API_CMD_STRIDE) [all …]
|
/linux/kernel/events/ |
H A D | hw_breakpoint_test.c | 31 static struct perf_event *register_test_bp(int cpu, struct task_struct *tsk, int idx) in register_test_bp() argument 35 if (WARN_ON(idx < 0 || idx >= MAX_TEST_BREAKPOINTS)) in register_test_bp() 39 attr.bp_addr = (unsigned long)&break_vars[idx]; in register_test_bp() 124 int idx = 0; in test_one_cpu() local 126 fill_bp_slots(test, &idx, get_test_cpu(0), NULL, 0); in test_one_cpu() 127 TEST_EXPECT_NOSPC(register_test_bp(-1, current, idx)); in test_one_cpu() 128 TEST_EXPECT_NOSPC(register_test_bp(get_test_cpu(0), NULL, idx)); in test_one_cpu() 133 int idx = 0; in test_many_cpus() local 138 bool do_continue = fill_bp_slots(test, &idx, cpu, NULL, 0); in test_many_cpus() 140 TEST_EXPECT_NOSPC(register_test_bp(cpu, NULL, idx)); in test_many_cpus() [all …]
|
/linux/drivers/net/ethernet/microchip/lan966x/ |
H A D | lan966x_ethtool.c | 300 uint idx = i * lan966x->num_stats; in lan966x_stats_update() local 308 lan966x_add_cnt(&lan966x->stats[idx++], in lan966x_stats_update() 362 u32 idx; in lan966x_get_eth_mac_stats() local 366 idx = port->chip_port * lan966x->num_stats; in lan966x_get_eth_mac_stats() 371 lan966x->stats[idx + SYS_COUNT_TX_UC] + in lan966x_get_eth_mac_stats() 372 lan966x->stats[idx + SYS_COUNT_TX_MC] + in lan966x_get_eth_mac_stats() 373 lan966x->stats[idx + SYS_COUNT_TX_BC] + in lan966x_get_eth_mac_stats() 374 lan966x->stats[idx + SYS_COUNT_TX_PMAC_UC] + in lan966x_get_eth_mac_stats() 375 lan966x->stats[idx + SYS_COUNT_TX_PMAC_MC] + in lan966x_get_eth_mac_stats() 376 lan966x->stats[idx + SYS_COUNT_TX_PMAC_BC]; in lan966x_get_eth_mac_stats() [all …]
|
/linux/drivers/net/ethernet/microchip/vcap/ |
H A D | vcap_api_debugfs_kunit.c | 41 int idx; in test_val_keyset() local 46 for (idx = 0; idx < kslist->cnt; idx++) { in test_val_keyset() 47 if (kslist->keysets[idx] == VCAP_KFS_ETAG) in test_val_keyset() 48 return kslist->keysets[idx]; in test_val_keyset() 49 if (kslist->keysets[idx] == in test_val_keyset() 51 return kslist->keysets[idx]; in test_val_keyset() 52 if (kslist->keysets[idx] == in test_val_keyset() 54 return kslist->keysets[idx]; in test_val_keyset() 55 if (kslist->keysets[idx] == in test_val_keyset() 57 return kslist->keysets[idx]; in test_val_keyset() [all …]
|
/linux/tools/testing/selftests/bpf/ |
H A D | uprobe_multi.c | 27 #define NAME(name, idx) PASTE(name, idx) argument 29 #define DEF(name, idx) int __attribute__((weak)) NAME(name, idx)(void) { return 0; } argument 30 #define CALL(name, idx) NAME(name, idx)(); argument 32 #define F(body, name, idx) body(name, idx) argument 34 #define F10(body, name, idx) \ argument 35 F(body, PASTE(name, idx), 0) F(body, PASTE(name, idx), 1) F(body, PASTE(name, idx), 2) \ 36 F(body, PASTE(name, idx), 3) F(body, PASTE(name, idx), 4) F(body, PASTE(name, idx), 5) \ 37 F(body, PASTE(name, idx), 6) F(body, PASTE(name, idx), 7) F(body, PASTE(name, idx), 8) \ 38 F(body, PASTE(name, idx), 9) 40 #define F100(body, name, idx) \ argument [all …]
|
/linux/drivers/net/can/sja1000/ |
H A D | sja1000_isa.c | 122 int idx = pdev->id; in sja1000_isa_probe() local 126 idx, port[idx], mem[idx], irq[idx]); in sja1000_isa_probe() 128 if (mem[idx]) { in sja1000_isa_probe() 129 if (!request_mem_region(mem[idx], iosize, DRV_NAME)) { in sja1000_isa_probe() 133 base = ioremap(mem[idx], iosize); in sja1000_isa_probe() 139 if (indirect[idx] > 0 || in sja1000_isa_probe() 140 (indirect[idx] == -1 && indirect[0] > 0)) in sja1000_isa_probe() 142 if (!request_region(port[idx], iosize, DRV_NAME)) { in sja1000_isa_probe() 155 dev->irq = irq[idx]; in sja1000_isa_probe() 157 if (mem[idx]) { in sja1000_isa_probe() [all …]
|
/linux/drivers/net/ethernet/microchip/sparx5/ |
H A D | sparx5_calendar.c | 157 u32 cal[7], value, idx, portno; in sparx5_config_auto_calendar() local 212 for (idx = 0; idx < consts->n_auto_cals; idx++) in sparx5_config_auto_calendar() 213 spx5_wr(cal[idx], sparx5, QSYS_CAL_AUTO(idx)); in sparx5_config_auto_calendar() 224 for (idx = 2; idx < 5; idx++) in sparx5_config_auto_calendar() 227 HSCH_OUTB_SHARE_ENA(idx)); in sparx5_config_auto_calendar() 252 u32 idx = 0, len = 0; in sparx5_dsm_cal_len() local 254 while (idx < SPX5_DSM_CAL_LEN) { in sparx5_dsm_cal_len() 255 if (cal[idx] != SPX5_DSM_CAL_EMPTY) in sparx5_dsm_cal_len() 257 idx++; in sparx5_dsm_cal_len() 264 u32 idx = 0, tmp; in sparx5_dsm_cp_cal() local [all …]
|
/linux/drivers/net/can/cc770/ |
H A D | cc770_isa.c | 167 int idx = pdev->id; in cc770_isa_probe() local 172 idx, port[idx], mem[idx], irq[idx]); in cc770_isa_probe() 173 if (mem[idx]) { in cc770_isa_probe() 174 if (!request_mem_region(mem[idx], iosize, KBUILD_MODNAME)) { in cc770_isa_probe() 178 base = ioremap(mem[idx], iosize); in cc770_isa_probe() 184 if (indirect[idx] > 0 || in cc770_isa_probe() 185 (indirect[idx] == -1 && indirect[0] > 0)) in cc770_isa_probe() 187 if (!request_region(port[idx], iosize, KBUILD_MODNAME)) { in cc770_isa_probe() 200 dev->irq = irq[idx]; in cc770_isa_probe() 202 if (mem[idx]) { in cc770_isa_probe() [all …]
|
/linux/tools/testing/selftests/kvm/lib/ |
H A D | sparsebit.c | 172 sparsebit_idx_t idx; /* index of least-significant bit in mask */ member 287 root->idx = subtree->idx; in node_copy_subtree() 310 static struct node *node_find(const struct sparsebit *s, sparsebit_idx_t idx) in node_find() argument 316 nodep = nodep->idx > idx ? nodep->left : nodep->right) { in node_find() 317 if (idx >= nodep->idx && in node_find() 318 idx <= nodep->idx + MASK_BITS + nodep->num_after - 1) in node_find() 333 static struct node *node_add(struct sparsebit *s, sparsebit_idx_t idx) in node_add() argument 344 nodep->idx = idx & -MASK_BITS; in node_add() 358 if (idx < parentp->idx) { in node_add() 366 assert(idx > parentp->idx + MASK_BITS + parentp->num_after - 1); in node_add() [all …]
|
/linux/lib/ |
H A D | find_bit.c | 30 unsigned long idx, val, sz = (size); \ 32 for (idx = 0; idx * BITS_PER_LONG < sz; idx++) { \ 35 sz = min(idx * BITS_PER_LONG + __ffs(MUNGE(val)), sz); \ 52 unsigned long mask, idx, tmp, sz = (size), __start = (start); \ 58 idx = __start / BITS_PER_LONG; \ 61 if ((idx + 1) * BITS_PER_LONG >= sz) \ 63 idx++; \ 66 sz = min(idx * BITS_PER_LON 220 unsigned long idx = (size-1) / BITS_PER_LONG; _find_last_bit() local [all...] |
/linux/kernel/sched/ |
H A D | cpudeadline.c | 25 static void cpudl_heapify_down(struct cpudl *cp, int idx) in cpudl_heapify_down() argument 29 int orig_cpu = cp->elements[idx].cpu; in cpudl_heapify_down() 30 u64 orig_dl = cp->elements[idx].dl; in cpudl_heapify_down() 32 if (left_child(idx) >= cp->size) in cpudl_heapify_down() 39 l = left_child(idx); in cpudl_heapify_down() 40 r = right_child(idx); in cpudl_heapify_down() 41 largest = idx; in cpudl_heapify_down() 53 if (largest == idx) in cpudl_heapify_down() 57 cp->elements[idx].cpu = cp->elements[largest].cpu; in cpudl_heapify_down() 58 cp->elements[idx].dl = cp->elements[largest].dl; in cpudl_heapify_down() [all …]
|
/linux/arch/x86/um/ |
H A D | tls_32.c | 51 int idx; in get_free_idx() local 53 for (idx = 0; idx < GDT_ENTRY_TLS_ENTRIES; idx++) in get_free_idx() 54 if (!t->arch.tls_array[idx].present) in get_free_idx() 55 return idx + GDT_ENTRY_TLS_MIN; in get_free_idx() 77 int idx; in load_TLS() local 79 for (idx = GDT_ENTRY_TLS_MIN; idx < GDT_ENTRY_TLS_MAX; idx++) { in load_TLS() 81 &to->thread.arch.tls_array[idx - GDT_ENTRY_TLS_MIN]; in load_TLS() 90 curr->tls.entry_number = idx; in load_TLS() 186 int idx, int flushed) in set_tls_entry() argument 190 if (idx < GDT_ENTRY_TLS_MIN || idx > GDT_ENTRY_TLS_MAX) in set_tls_entry() [all …]
|
/linux/tools/perf/scripts/perl/Perf-Trace-Util/lib/Perf/Trace/ |
H A D | Core.pm | 39 foreach my $idx (sort {$a <=> $b} keys %trace_flags) { 40 if (!$value && !$idx) { 45 if ($idx && ($value & $idx) == $idx) { 49 $string .= "$trace_flags{$idx}"; 51 $value &= ~$idx; 69 foreach my $idx (sort {$a <=> $b} keys %{$flag_fields{$event_name}{$field_name}{"values"}}) { 70 if (!$value && !$idx) { 71 $string .= "$flag_fields{$event_name}{$field_name}{'values'}{$idx}"; 74 if ($idx && ($value & $idx) == $idx) { 78 $string .= "$flag_fields{$event_name}{$field_name}{'values'}{$idx}"; [all …]
|
/linux/drivers/clk/uniphier/ |
H A D | clk-uniphier-sys.c | 27 #define UNIPHIER_LD4_SYS_CLK_NAND(idx) \ argument 29 UNIPHIER_CLK_GATE("nand", (idx), "nand-50m", 0x2104, 2) 31 #define UNIPHIER_PRO5_SYS_CLK_NAND(idx) \ argument 33 UNIPHIER_CLK_GATE("nand", (idx), "nand-50m", 0x2104, 2) 35 #define UNIPHIER_LD11_SYS_CLK_NAND(idx) \ argument 37 UNIPHIER_CLK_GATE("nand", (idx), "nand-50m", 0x210c, 0) 39 #define UNIPHIER_SYS_CLK_NAND_4X(idx) \ argument 40 UNIPHIER_CLK_FACTOR("nand-4x", (idx), "nand", 4, 1) 42 #define UNIPHIER_LD11_SYS_CLK_EMMC(idx) \ argument 43 UNIPHIER_CLK_GATE("emmc", (idx), NULL, 0x210c, 2) [all …]
|
/linux/arch/x86/events/intel/ |
H A D | uncore_nhmex.c | 250 if (hwc->idx == UNCORE_PMC_IDX_FIXED) in nhmex_uncore_msr_enable_event() 372 reg1->idx = 0; in nhmex_bbox_hw_config() 384 if (reg1->idx != EXTRA_REG_NONE) { in nhmex_bbox_msr_enable_event() 457 reg1->idx = 0; in nhmex_sbox_hw_config() 469 if (reg1->idx != EXTRA_REG_NONE) { in nhmex_sbox_msr_enable_event() 554 static bool nhmex_mbox_get_shared_reg(struct intel_uncore_box *box, int idx, u64 config) in nhmex_mbox_get_shared_reg() argument 561 if (idx < EXTRA_REG_NHMEX_M_ZDP_CTL_FVC) { in nhmex_mbox_get_shared_reg() 562 er = &box->shared_regs[idx]; in nhmex_mbox_get_shared_reg() 578 idx -= EXTRA_REG_NHMEX_M_ZDP_CTL_FVC; in nhmex_mbox_get_shared_reg() 579 if (WARN_ON_ONCE(idx >= 4)) in nhmex_mbox_get_shared_reg() [all …]
|
/linux/drivers/gpu/drm/radeon/ |
H A D | evergreen_cs.c | 757 unsigned idx) in evergreen_cs_track_validate_texture() argument 765 texdw[0] = radeon_get_ib_value(p, idx + 0); in evergreen_cs_track_validate_texture() 766 texdw[1] = radeon_get_ib_value(p, idx + 1); in evergreen_cs_track_validate_texture() 767 texdw[2] = radeon_get_ib_value(p, idx + 2); in evergreen_cs_track_validate_texture() 768 texdw[3] = radeon_get_ib_value(p, idx + 3); in evergreen_cs_track_validate_texture() 769 texdw[4] = radeon_get_ib_value(p, idx + 4); in evergreen_cs_track_validate_texture() 770 texdw[5] = radeon_get_ib_value(p, idx + 5); in evergreen_cs_track_validate_texture() 771 texdw[6] = radeon_get_ib_value(p, idx + 6); in evergreen_cs_track_validate_texture() 772 texdw[7] = radeon_get_ib_value(p, idx + 7); in evergreen_cs_track_validate_texture() 1051 unsigned idx, unsigned reg) in evergreen_packet0_check() argument [all …]
|
H A D | r600_cs.c | 837 r = radeon_cs_packet_parse(p, &wait_reg_mem, p->idx); in r600_cs_common_vline_parse() 848 wait_reg_mem_info = radeon_get_ib_value(p, wait_reg_mem.idx + 1); in r600_cs_common_vline_parse() 864 if ((radeon_get_ib_value(p, wait_reg_mem.idx + 2) << 2) != vline_status[0]) { in r600_cs_common_vline_parse() 869 if (radeon_get_ib_value(p, wait_reg_mem.idx + 5) != RADEON_VLINE_STAT) { in r600_cs_common_vline_parse() 875 r = radeon_cs_packet_parse(p, &p3reloc, p->idx + wait_reg_mem.count + 2); in r600_cs_common_vline_parse() 879 h_idx = p->idx - 2; in r600_cs_common_vline_parse() 880 p->idx += wait_reg_mem.count + 2; in r600_cs_common_vline_parse() 881 p->idx += p3reloc.count + 2; in r600_cs_common_vline_parse() 918 unsigned idx, unsigned reg) in r600_packet0_check() argument 927 idx, reg); in r600_packet0_check() [all …]
|
/linux/mm/ |
H A D | hugetlb_cgroup.c | 43 __hugetlb_cgroup_counter_from_cgroup(struct hugetlb_cgroup *h_cg, int idx, in __hugetlb_cgroup_counter_from_cgroup() argument 47 return &h_cg->rsvd_hugepage[idx]; in __hugetlb_cgroup_counter_from_cgroup() 48 return &h_cg->hugepage[idx]; in __hugetlb_cgroup_counter_from_cgroup() 52 hugetlb_cgroup_counter_from_cgroup(struct hugetlb_cgroup *h_cg, int idx) in hugetlb_cgroup_counter_from_cgroup() argument 54 return __hugetlb_cgroup_counter_from_cgroup(h_cg, idx, false); in hugetlb_cgroup_counter_from_cgroup() 58 hugetlb_cgroup_counter_from_cgroup_rsvd(struct hugetlb_cgroup *h_cg, int idx) in hugetlb_cgroup_counter_from_cgroup_rsvd() argument 60 return __hugetlb_cgroup_counter_from_cgroup(h_cg, idx, true); in hugetlb_cgroup_counter_from_cgroup_rsvd() 101 int idx; in hugetlb_cgroup_init() local 103 for (idx = 0; idx < HUGE_MAX_HSTATE; idx++) { in hugetlb_cgroup_init() 111 parent_h_cgroup, idx); in hugetlb_cgroup_init() [all …]
|
/linux/drivers/net/ethernet/chelsio/cxgb/ |
H A D | fpga_defs.h | 215 #define MAC_REG_ADDR(idx, reg) (MAC_REG_BASE + (idx) * 128 + (reg)) argument 217 #define MAC_REG_IDLO(idx) MAC_REG_ADDR(idx, A_GMAC_MACID_LO) argument 218 #define MAC_REG_IDHI(idx) MAC_REG_ADDR(idx, A_GMAC_MACID_HI) argument 219 #define MAC_REG_CSR(idx) MAC_REG_ADDR(idx, A_GMAC_CSR) argument 220 #define MAC_REG_IFS(idx) MAC_REG_ADDR(idx, A_GMAC_IFS) argument 221 #define MAC_REG_LARGEFRAMELENGTH(idx) MAC_REG_ADDR(idx, A_GMAC_JUMBO_FRAME_LEN) argument 222 #define MAC_REG_LINKDLY(idx) MAC_REG_ADDR(idx, A_GMAC_LNK_DLY) argument 223 #define MAC_REG_PAUSETIME(idx) MAC_REG_ADDR(idx, A_GMAC_PAUSETIME) argument 224 #define MAC_REG_CASTLO(idx) MAC_REG_ADDR(idx, A_GMAC_MCAST_LO) argument 225 #define MAC_REG_MCASTHI(idx) MAC_REG_ADDR(idx, A_GMAC_MCAST_HI) argument [all …]
|
/linux/tools/testing/selftests/kvm/x86_64/ |
H A D | hyperv_features.c | 25 uint32_t idx; member 47 GUEST_ASSERT(msr->idx); in guest_msr() 50 vector = wrmsr_safe(msr->idx, msr->write_val); in guest_msr() 52 if (!vector && (!msr->write || !is_write_only_msr(msr->idx))) in guest_msr() 53 vector = rdmsr_safe(msr->idx, &msr_val); in guest_msr() 58 msr->write ? "WR" : "RD", msr->idx, vector); in guest_msr() 62 msr->write ? "WR" : "RD", msr->idx, vector); in guest_msr() 64 if (vector || is_write_only_msr(msr->idx)) in guest_msr() 70 msr->idx, msr->write_val, msr_val); in guest_msr() 73 if (msr->idx == HV_X64_MSR_TSC_INVARIANT_CONTROL) { in guest_msr() [all …]
|
/linux/include/asm-generic/ |
H A D | fixmap.h | 30 static __always_inline unsigned long fix_to_virt(const unsigned int idx) in fix_to_virt() argument 32 BUILD_BUG_ON(idx >= __end_of_fixed_addresses); in fix_to_virt() 33 return __fix_to_virt(idx); in fix_to_virt() 64 #define set_fixmap(idx, phys) \ argument 65 __set_fixmap(idx, phys, FIXMAP_PAGE_NORMAL) 69 #define clear_fixmap(idx) \ argument 70 __set_fixmap(idx, 0, FIXMAP_PAGE_CLEAR) 74 #define __set_fixmap_offset(idx, phys, flags) \ argument 77 __set_fixmap(idx, phys, flags); \ 78 ________addr = fix_to_virt(idx) + ((phys) & (PAGE_SIZE - 1)); \ [all …]
|
/linux/drivers/net/ethernet/ti/ |
H A D | cpsw_ale.c | 130 int idx, idx2; in cpsw_ale_get_field() local 133 idx = start / 32; in cpsw_ale_get_field() 136 if (idx != idx2) { in cpsw_ale_get_field() 140 start -= idx * 32; in cpsw_ale_get_field() 141 idx = 2 - idx; /* flip */ in cpsw_ale_get_field() 142 return (hi_val + (ale_entry[idx] >> start)) & BITMASK(bits); in cpsw_ale_get_field() 148 int idx, idx2; in cpsw_ale_set_field() local 151 idx = start / 32; in cpsw_ale_set_field() 154 if (idx != idx2) { in cpsw_ale_set_field() 159 start -= idx * 32; in cpsw_ale_set_field() [all …]
|
/linux/arch/x86/kernel/ |
H A D | tls.c | 25 int idx; in get_free_idx() local 27 for (idx = 0; idx < GDT_ENTRY_TLS_ENTRIES; idx++) in get_free_idx() 28 if (desc_empty(&t->tls_array[idx])) in get_free_idx() 29 return idx + GDT_ENTRY_TLS_MIN; in get_free_idx() 85 static void set_tls_desc(struct task_struct *p, int idx, in set_tls_desc() argument 89 struct desc_struct *desc = &t->tls_array[idx - GDT_ENTRY_TLS_MIN]; in set_tls_desc() 115 int do_set_thread_area(struct task_struct *p, int idx, in do_set_thread_area() argument 128 if (idx == -1) in do_set_thread_area() 129 idx = info.entry_number; in do_set_thread_area() 135 if (idx == -1 && can_allocate) { in do_set_thread_area() [all …]
|
/linux/drivers/net/wireless/mediatek/mt7601u/ |
H A D | main.c | 49 unsigned int idx = 0; in mt7601u_add_interface() local 50 unsigned int wcid = GROUP_WCID(idx); in mt7601u_add_interface() 57 mvif->idx = idx; in mt7601u_add_interface() 65 mvif->group_wcid.idx = wcid; in mt7601u_add_interface() 76 unsigned int wcid = mvif->group_wcid.idx; in mt7601u_remove_interface() 189 int i, idx = 0; in mt76_wcid_alloc() local 192 idx = ffs(~dev->wcid_mask[i]); in mt76_wcid_alloc() 193 if (!idx) in mt76_wcid_alloc() 196 idx--; in mt76_wcid_alloc() 197 dev->wcid_mask[i] |= BIT(idx); in mt76_wcid_alloc() [all …]
|
/linux/tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/ |
H A D | Core.py | 35 for idx in sorted(flag_fields[event_name][field_name]['values']): 36 if not value and not idx: 37 string += flag_fields[event_name][field_name]['values'][idx] 39 if idx and (value & idx) == idx: 42 string += flag_fields[event_name][field_name]['values'][idx] 44 value &= ~idx 52 for idx in sorted(symbolic_fields[event_name][field_name]['values']): 53 if not value and not idx: 54 string = symbolic_fields[event_name][field_name]['values'][idx] 56 if (value == idx): [all …]
|