| /linux/drivers/regulator/ |
| H A D | anatop-regulator.c | 162 struct anatop_regulator *sreg; in anatop_regulator_probe() local 175 sreg = devm_kzalloc(dev, sizeof(*sreg), GFP_KERNEL); in anatop_regulator_probe() 176 if (!sreg) in anatop_regulator_probe() 179 rdesc = &sreg->rdesc; in anatop_regulator_probe() 236 &sreg->delay_reg); in anatop_regulator_probe() 238 &sreg->delay_bit_width); in anatop_regulator_probe() 240 &sreg->delay_bit_shift); in anatop_regulator_probe() 253 config.driver_data = sreg; in anatop_regulator_probe() 258 if (control_reg && sreg->delay_bit_width) { in anatop_regulator_probe() 267 sreg->sel = (val & rdesc->vsel_mask) >> vol_bit_shift; in anatop_regulator_probe() [all …]
|
| H A D | hi655x-regulator.c | 108 sreg, cmask, vtable) { \ argument 125 .status_reg = HI655X_BUS_ADDR(sreg), \ 129 sreg, cmask, minv, nvolt, vstep) { \ argument 147 .status_reg = HI655X_BUS_ADDR(sreg), \
|
| /linux/tools/perf/arch/x86/annotate/ |
| H A D | instructions.c | 361 int sreg = src->reg1; in update_insn_state_x86() local 364 if (!has_reg_type(state, sreg) || in update_insn_state_x86() 369 src_tsr = state->regs[sreg]; in update_insn_state_x86() 376 if (sreg == fbreg || sreg == state->stack_reg) { in update_insn_state_x86() 389 if (sreg == fbreg) { in update_insn_state_x86() 394 insn_offset, src->offset, sreg, dst->reg1); in update_insn_state_x86() 405 __die_get_real_type(&state->regs[sreg].type, &type_die) == NULL) in update_insn_state_x86() 409 type_die = state->regs[sreg].type; in update_insn_state_x86() 423 abs(src->offset), sreg, dst->reg1); in update_insn_state_x86() 552 int sreg = src->reg1; in update_insn_state_x86() local [all …]
|
| /linux/net/netfilter/ |
| H A D | nft_cmp.c | 21 u8 sreg; member 33 d = memcmp(®s->data[priv->sreg], &priv->data, priv->len); in nft_cmp_eval() 86 err = nft_parse_register_load(ctx, tb[NFTA_CMP_SREG], &priv->sreg, desc.len); in nft_cmp_init() 100 if (nft_dump_register(skb, NFTA_CMP_SREG, priv->sreg)) in nft_cmp_dump() 143 struct nft_offload_reg *reg = &ctx->regs[priv->sreg]; in __nft_cmp_offload() 225 err = nft_parse_register_load(ctx, tb[NFTA_CMP_SREG], &priv->sreg, desc.len); in nft_cmp_fast_init() 249 .sreg = priv->sreg, in nft_cmp_fast_offload() 264 if (nft_dump_register(skb, NFTA_CMP_SREG, priv->sreg)) in nft_cmp_fast_dump() 326 err = nft_parse_register_load(ctx, tb[NFTA_CMP_SREG], &priv->sreg, desc.len); in nft_cmp16_fast_init() 344 .sreg = priv->sreg, in nft_cmp16_fast_offload() [all …]
|
| H A D | nft_bitwise.c | 19 u8 sreg; member 95 const u32 *src = ®s->data[priv->sreg], *src2; in nft_bitwise_eval() 256 err = nft_parse_register_load(ctx, tb[NFTA_BITWISE_SREG], &priv->sreg, in nft_bitwise_init() 346 if (nft_dump_register(skb, NFTA_BITWISE_SREG, priv->sreg)) in nft_bitwise_dump() 386 priv->sreg != priv->dreg || priv->len != reg->len) in nft_bitwise_offload() 403 if (!track->regs[priv->sreg].selector) in nft_bitwise_reduce() 407 if (track->regs[priv->sreg].selector == track->regs[priv->dreg].selector && in nft_bitwise_reduce() 408 track->regs[priv->sreg].num_reg == 0 && in nft_bitwise_reduce() 411 priv->sreg == bitwise->sreg && in nft_bitwise_reduce() 423 if (track->regs[priv->sreg].bitwise || in nft_bitwise_reduce() [all …]
|
| H A D | nft_range.c | 18 u8 sreg; member 29 d1 = memcmp(®s->data[priv->sreg], &priv->data_from, priv->len); in nft_range_eval() 30 d2 = memcmp(®s->data[priv->sreg], &priv->data_to, priv->len); in nft_range_eval() 86 err = nft_parse_register_load(ctx, tb[NFTA_RANGE_SREG], &priv->sreg, in nft_range_init() 119 if (nft_dump_register(skb, NFTA_RANGE_SREG, priv->sreg)) in nft_range_dump()
|
| H A D | nft_byteorder.c | 19 u8 sreg; member 31 u32 *src = ®s->data[priv->sreg]; in nft_byteorder_eval() 142 err = nft_parse_register_load(ctx, tb[NFTA_BYTEORDER_SREG], &priv->sreg, in nft_byteorder_init() 157 if (nft_dump_register(skb, NFTA_BYTEORDER_SREG, priv->sreg)) in nft_byteorder_dump()
|
| H A D | nft_objref.c | 131 u8 sreg; member 145 ext = nft_set_do_lookup(net, set, ®s->data[priv->sreg]); in nft_objref_map_eval() 175 err = nft_parse_register_load(ctx, tb[NFTA_OBJREF_SET_SREG], &priv->sreg, in nft_objref_map_init() 195 if (nft_dump_register(skb, NFTA_OBJREF_SET_SREG, priv->sreg) || in nft_objref_map_dump()
|
| H A D | nft_hash.c | 17 u8 sreg; member 31 const void *data = ®s->data[priv->sreg]; in nft_jhash_eval() 95 err = nft_parse_register_load(ctx, tb[NFTA_HASH_SREG], &priv->sreg, len); in nft_jhash_init() 147 if (nft_dump_register(skb, NFTA_HASH_SREG, priv->sreg)) in nft_jhash_dump()
|
| H A D | nft_lookup.c | 20 u8 sreg; member 105 ext = nft_set_do_lookup(net, set, ®s->data[priv->sreg]); in nft_lookup_eval() 153 err = nft_parse_register_load(ctx, tb[NFTA_LOOKUP_SREG], &priv->sreg, in nft_lookup_init() 232 if (nft_dump_register(skb, NFTA_LOOKUP_SREG, priv->sreg)) in nft_lookup_dump()
|
| H A D | nft_meta.c | 424 u32 *sreg = ®s->data[meta->sreg]; in nft_meta_set_eval() local 425 u32 value = *sreg; in nft_meta_set_eval() 436 value8 = nft_reg_load8(sreg); in nft_meta_set_eval() 444 value8 = nft_reg_load8(sreg); in nft_meta_set_eval() 658 err = nft_parse_register_load(ctx, tb[NFTA_META_SREG], &priv->sreg, len); in nft_meta_set_init() 692 if (nft_dump_register(skb, NFTA_META_SREG, priv->sreg)) in nft_meta_set_dump()
|
| H A D | nf_tables_core.c | 78 u32 *src = ®s->data[priv->sreg]; in nft_bitwise_fast_eval() 89 if (((regs->data[priv->sreg] & priv->mask) == priv->data) ^ priv->inv) in nft_cmp_fast_eval() 98 const u64 *reg_data = (const u64 *)®s->data[priv->sreg]; in nft_cmp16_fast_eval()
|
| H A D | nft_ct.c | 232 u16 value = nft_reg_load16(®s->data[priv->sreg]); in nft_ct_set_zone_eval() 282 u32 value = regs->data[priv->sreg]; in nft_ct_set_eval() 311 ®s->data[priv->sreg], in nft_ct_set_eval() 312 ®s->data[priv->sreg], in nft_ct_set_eval() 319 u32 ctmask = regs->data[priv->sreg]; in nft_ct_set_eval() 628 err = nft_parse_register_load(ctx, tb[NFTA_CT_SREG], &priv->sreg, len); in nft_ct_set_init() 729 if (nft_dump_register(skb, NFTA_CT_SREG, priv->sreg)) in nft_ct_set_dump()
|
| H A D | nft_exthdr.c | 25 u8 sreg; member 272 ®s->data[priv->sreg]); in nft_exthdr_tcp_set_eval() 290 new.v32 = nft_reg_load_be32(®s->data[priv->sreg]); in nft_exthdr_tcp_set_eval() 591 return nft_parse_register_load(ctx, tb[NFTA_EXTHDR_SREG], &priv->sreg, in nft_exthdr_tcp_set_init() 691 if (nft_dump_register(skb, NFTA_EXTHDR_SREG, priv->sreg)) in nft_exthdr_dump_set()
|
| /linux/drivers/tty/serial/ |
| H A D | sunsab.c | 123 if (stat->sreg.isr0 & SAB82532_ISR0_RPF) { in receive_chars() 128 if (stat->sreg.isr0 & SAB82532_ISR0_TCD) { in receive_chars() 134 if (stat->sreg.isr0 & SAB82532_ISR0_TIME) { in receive_chars() 140 if (stat->sreg.isr0 & SAB82532_ISR0_RFO) in receive_chars() 154 if ((stat->sreg.isr1 & SAB82532_ISR1_BRK) && in receive_chars() 159 if (unlikely(stat->sreg.isr1 & SAB82532_ISR1_BRK)) { in receive_chars() 160 stat->sreg.isr0 &= ~(SAB82532_ISR0_PERR | in receive_chars() 173 if (unlikely(stat->sreg.isr0 & (SAB82532_ISR0_PERR | in receive_chars() 176 unlikely(stat->sreg.isr1 & SAB82532_ISR1_BRK)) { in receive_chars() 180 if (stat->sreg.isr1 & SAB82532_ISR1_BRK) { in receive_chars() [all …]
|
| /linux/arch/powerpc/include/asm/ |
| H A D | kvm_asm.h | 14 #define PPC_STD(sreg, offset, areg) std sreg, (offset)(areg) 17 #define PPC_STD(sreg, offset, areg) stw sreg, (offset+4)(areg)
|
| /linux/drivers/media/pci/cx88/ |
| H A D | cx88.h | 597 #define cx_sread(sreg) (core->shadow[sreg]) argument 598 #define cx_swrite(sreg, reg, value) \ argument 599 (core->shadow[sreg] = value, \ 600 writel(core->shadow[sreg], core->lmmio + ((reg) >> 2))) 601 #define cx_sandor(sreg, reg, mask, value) \ argument 602 (core->shadow[sreg] = (core->shadow[sreg] & ~(mask)) | \ 604 writel(core->shadow[sreg], \
|
| /linux/include/net/netfilter/ |
| H A D | nf_tables_core.h | 35 u8 sreg; member 42 u8 sreg; member 50 u8 sreg; member 70 u8 sreg; member
|
| H A D | nf_tables.h | 150 static inline u8 nft_reg_load8(const u32 *sreg) in nft_reg_load8() argument 152 return *(u8 *)sreg; in nft_reg_load8() 166 static inline u16 nft_reg_load16(const u32 *sreg) in nft_reg_load16() argument 168 return *(u16 *)sreg; in nft_reg_load16() 171 static inline __be16 nft_reg_load_be16(const u32 *sreg) in nft_reg_load_be16() argument 173 return (__force __be16)nft_reg_load16(sreg); in nft_reg_load_be16() 176 static inline __be32 nft_reg_load_be32(const u32 *sreg) in nft_reg_load_be32() argument 178 return *(__force __be32 *)sreg; in nft_reg_load_be32() 186 static inline u64 nft_reg_load64(const u32 *sreg) in nft_reg_load64() argument 188 return get_unaligned((u64 *)sreg); in nft_reg_load64() [all...] |
| H A D | nft_meta.h | 12 u8 sreg; member
|
| /linux/drivers/hwmon/ |
| H A D | ds1621.c | 144 u8 conf, new_conf, sreg, resol; in ds1621_init_client() local 163 sreg = DS1621_COM_START; in ds1621_init_client() 172 sreg = DS1721_COM_START; in ds1621_init_client() 177 sreg = DS1621_COM_START; in ds1621_init_client() 182 i2c_smbus_write_byte(client, sreg); in ds1621_init_client()
|
| /linux/drivers/scsi/ |
| H A D | esp_scsi.c | 122 p->sreg = esp->sreg; in esp_log_fill_regs() 194 p->val, p->sreg, p->seqreg, in esp_dump_cmd_log() 982 if (esp->sreg & ESP_STAT_SPAM) { in DEF_SCSI_QCMD() 990 "Gross error sreg[%02x]\n", esp->sreg); in DEF_SCSI_QCMD() 1005 esp->sreg &= ~ESP_STAT_INTR; in esp_check_spur_intr() 1009 if (!(esp->sreg & ESP_STAT_INTR)) { in esp_check_spur_intr() 1019 esp->sreg); in esp_check_spur_intr() 1070 esp->sreg = esp_read8(ESP_STATUS); in esp_reconnect_with_tag() 1074 i, esp->ireg, esp->sreg); in esp_reconnect_with_tag() 1082 if ((esp->sreg & ESP_STAT_PMASK) != ESP_MIP) { in esp_reconnect_with_tag() [all …]
|
| H A D | zorro_esp.c | 263 u8 phase = esp->sreg & ESP_STAT_PMASK; in zorro_esp_send_blz1230_dma_cmd() 314 u8 phase = esp->sreg & ESP_STAT_PMASK; in zorro_esp_send_blz1230II_dma_cmd() 359 u8 phase = esp->sreg & ESP_STAT_PMASK; in zorro_esp_send_blz2060_dma_cmd() 405 u8 phase = esp->sreg & ESP_STAT_PMASK; in zorro_esp_send_cyber_dma_cmd() 456 u8 phase = esp->sreg & ESP_STAT_PMASK; in zorro_esp_send_cyberII_dma_cmd() 498 u8 phase = esp->sreg & ESP_STAT_PMASK; in zorro_esp_send_fastlane_dma_cmd()
|
| /linux/drivers/net/ethernet/netronome/nfp/bpf/ |
| H A D | verifier.c | 529 const struct bpf_reg_state *sreg = cur_regs(env) + meta->insn.src_reg; in nfp_bpf_check_atomic() local 542 if (sreg->type != SCALAR_VALUE) { in nfp_bpf_check_atomic() 543 pr_vlog(env, "atomic add not of a scalar: %d\n", sreg->type); in nfp_bpf_check_atomic() 548 sreg->var_off.value > 0xffff || sreg->var_off.mask > 0xffff; in nfp_bpf_check_atomic() 550 (sreg->var_off.value & ~sreg->var_off.mask) <= 0xffff; in nfp_bpf_check_atomic() 559 const struct bpf_reg_state *sreg = in nfp_bpf_check_alu() local 564 meta->umin_src = min(meta->umin_src, sreg->umin_value); in nfp_bpf_check_alu() 565 meta->umax_src = max(meta->umax_src, sreg->umax_value); in nfp_bpf_check_alu()
|
| /linux/arch/x86/net/ |
| H A D | bpf_jit_comp32.c | 239 u8 sreg = sstk ? IA32_EAX : src; in emit_ia32_mov_r() local 246 EMIT3(0x89, add_2reg(0x40, IA32_EBP, sreg), STACK_VAR(dst)); in emit_ia32_mov_r() 249 EMIT2(0x89, add_2reg(0xC0, dst, sreg)); in emit_ia32_mov_r() 290 u8 sreg = sstk ? IA32_ECX : src; in emit_ia32_mul_r() local 304 EMIT2(0xF7, add_1reg(0xE0, sreg)); in emit_ia32_mul_r() 529 u8 sreg = sstk ? IA32_EAX : src; in emit_ia32_alu_r() local 544 EMIT2(0x11, add_2reg(0xC0, dreg, sreg)); in emit_ia32_alu_r() 546 EMIT2(0x01, add_2reg(0xC0, dreg, sreg)); in emit_ia32_alu_r() 551 EMIT2(0x19, add_2reg(0xC0, dreg, sreg)); in emit_ia32_alu_r() 553 EMIT2(0x29, add_2reg(0xC0, dreg, sreg)); in emit_ia32_alu_r() [all …]
|