/linux/include/asm-generic/ |
H A D | percpu.h | 65 #define raw_cpu_generic_read(pcp) \ argument 67 *raw_cpu_ptr(&(pcp)); \ 70 #define raw_cpu_generic_to_op(pcp, val, op) \ argument 72 *raw_cpu_ptr(&(pcp)) op val; \ 75 #define raw_cpu_generic_add_return(pcp, val) \ argument 77 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \ 83 #define raw_cpu_generic_xchg(pcp, nval) \ argument 85 typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \ 86 typeof(pcp) __ret; \ 92 #define __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, _cmpxchg) \ argument [all …]
|
/linux/arch/s390/include/asm/ |
H A D | percpu.h | 28 #define arch_this_cpu_to_op_simple(pcp, val, op) \ argument 30 typedef typeof(pcp) pcp_op_T__; \ 34 ptr__ = raw_cpu_ptr(&(pcp)); \ 45 #define this_cpu_add_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) argument 46 #define this_cpu_add_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) argument 47 #define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) argument 48 #define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +) argument 49 #define this_cpu_and_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &) argument 50 #define this_cpu_and_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, &) argument 51 #define this_cpu_or_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, |) argument [all …]
|
/linux/include/linux/ |
H A D | percpu-defs.h | 410 #define raw_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, pcp) argument 411 #define raw_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, pcp, val) argument 412 #define raw_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, pcp, val) argument 413 #define raw_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, pcp, val) argument 414 #define raw_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, pcp, val) argument 415 #define raw_cpu_add_return(pcp, val) __pcpu_size_call_return2(raw_cpu_add_return_, pcp, val) argument 416 #define raw_cpu_xchg(pcp, nval) __pcpu_size_call_return2(raw_cpu_xchg_, pcp, nval) argument 417 #define raw_cpu_cmpxchg(pcp, oval, nval) \ argument 418 __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval) 419 #define raw_cpu_try_cmpxchg(pcp, ovalp, nval) \ argument [all …]
|
/linux/arch/arm64/include/asm/ |
H A D | percpu.h | 149 #define _pcp_protect(op, pcp, ...) \ argument 152 op(raw_cpu_ptr(&(pcp)), __VA_ARGS__); \ 156 #define _pcp_protect_return(op, pcp, args...) \ argument 158 typeof(pcp) __retval; \ 160 __retval = (typeof(pcp))op(raw_cpu_ptr(&(pcp)), ##args); \ 165 #define this_cpu_read_1(pcp) \ argument 166 _pcp_protect_return(__percpu_read_8, pcp) 167 #define this_cpu_read_2(pcp) \ argument 168 _pcp_protect_return(__percpu_read_16, pcp) 169 #define this_cpu_read_4(pcp) \ argument [all …]
|
/linux/arch/loongarch/include/asm/ |
H A D | percpu.h | 119 #define _protect_cmpxchg_local(pcp, o, n) \ argument 121 typeof(*raw_cpu_ptr(&(pcp))) __ret; \ 123 __ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n); \ 128 #define _pcp_protect(operation, pcp, val) \ argument 130 typeof(pcp) __retval; \ 132 __retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)), \ 133 (val), sizeof(pcp)); \ 138 #define _percpu_add(pcp, val) \ argument 139 _pcp_protect(__percpu_add, pcp, val) 141 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val) argument [all …]
|
/linux/Documentation/translations/zh_CN/core-api/ |
H A D | this_cpu_ops.rst | 46 this_cpu_read(pcp) 47 this_cpu_write(pcp, val) 48 this_cpu_add(pcp, val) 49 this_cpu_and(pcp, val) 50 this_cpu_or(pcp, val) 51 this_cpu_add_return(pcp, val) 52 this_cpu_xchg(pcp, nval) 53 this_cpu_cmpxchg(pcp, oval, nval) 55 this_cpu_sub(pcp, val) 56 this_cpu_inc(pcp) [all …]
|
/linux/mm/ |
H A D | page_alloc.c | 1176 struct per_cpu_pages *pcp, in free_pcppages_bulk() argument 1187 count = min(pcp->count, count); in free_pcppages_bulk() 1202 list = &pcp->lists[pindex]; in free_pcppages_bulk() 1218 pcp->count -= nr_pages; in free_pcppages_bulk() 2338 int decay_pcp_high(struct zone *zone, struct per_cpu_pages *pcp) in decay_pcp_high() argument 2343 high_min = READ_ONCE(pcp->high_min); in decay_pcp_high() 2344 batch = READ_ONCE(pcp->batch); in decay_pcp_high() 2350 if (pcp->high > high_min) { in decay_pcp_high() 2351 pcp->high = max3(pcp->count - (batch << CONFIG_PCP_BATCH_SCALE_MAX), in decay_pcp_high() 2352 pcp->high - (pcp->high >> 3), high_min); in decay_pcp_high() [all …]
|
H A D | vmstat.c | 345 struct per_cpu_zonestat __percpu *pcp = zone->per_cpu_zonestats; in __mod_zone_page_state() local 346 s8 __percpu *p = pcp->vm_stat_diff + item; in __mod_zone_page_state() 361 t = __this_cpu_read(pcp->stat_threshold); in __mod_zone_page_state() 376 struct per_cpu_nodestat __percpu *pcp = pgdat->per_cpu_nodestats; in __mod_node_page_state() local 377 s8 __percpu *p = pcp->vm_node_stat_diff + item; in __mod_node_page_state() 397 t = __this_cpu_read(pcp->stat_threshold); in __mod_node_page_state() 434 struct per_cpu_zonestat __percpu *pcp = zone->per_cpu_zonestats; in __inc_zone_state() local 435 s8 __percpu *p = pcp->vm_stat_diff + item; in __inc_zone_state() 442 t = __this_cpu_read(pcp->stat_threshold); in __inc_zone_state() 455 struct per_cpu_nodestat __percpu *pcp = pgdat->per_cpu_nodestats; in __inc_node_state() local [all …]
|
/linux/Documentation/core-api/ |
H A D | this_cpu_ops.rst | 48 this_cpu_read(pcp) 49 this_cpu_write(pcp, val) 50 this_cpu_add(pcp, val) 51 this_cpu_and(pcp, val) 52 this_cpu_or(pcp, val) 53 this_cpu_add_return(pcp, val) 54 this_cpu_xchg(pcp, nval) 55 this_cpu_cmpxchg(pcp, oval, nval) 56 this_cpu_sub(pcp, val) 57 this_cpu_inc(pcp) [all …]
|
/linux/drivers/net/dsa/sja1105/ |
H A D | sja1105_flower.c | 123 rule->key.tc.pcp = tc; in sja1105_setup_tc_policer() 187 key->tc.pcp, rate_bytes_per_sec, in sja1105_flower_policer() 205 u16 pcp = U16_MAX; in sja1105_flower_parse_key() local 277 pcp = match.key->vlan_priority; in sja1105_flower_parse_key() 280 if (is_bcast_dmac && vid == U16_MAX && pcp == U16_MAX) { in sja1105_flower_parse_key() 284 if (dmac == U64_MAX && vid == U16_MAX && pcp != U16_MAX) { in sja1105_flower_parse_key() 286 key->tc.pcp = pcp; in sja1105_flower_parse_key() 289 if (dmac != U64_MAX && vid != U16_MAX && pcp != U16_MAX) { in sja1105_flower_parse_key() 293 key->vl.pcp = pcp; in sja1105_flower_parse_key() 484 int index = (port * SJA1105_NUM_TC) + rule->key.tc.pcp; in sja1105_cls_flower_del()
|
H A D | sja1105_vl.c | 408 vl_lookup[k].vlanprior = rule->key.vl.pcp; in sja1105_init_virtual_links() 692 ipv = key->vl.pcp; in sja1105_vl_gate() 750 vl_lookup[i].vlanprior == key->vl.pcp) in sja1105_find_vlid()
|
/linux/lib/ |
H A D | debugobjects.c | 251 struct obj_pool *pcp = this_cpu_ptr(&pool_pcpu); in pcpu_alloc() local 256 struct debug_obj *obj = __alloc_object(&pcp->objects); in pcpu_alloc() 259 pcp->cnt--; in pcpu_alloc() 266 if (unlikely(pcp->cnt < (ODEBUG_POOL_PERCPU_SIZE - ODEBUG_BATCH_SIZE) && in pcpu_alloc() 267 !(pcp->cnt % ODEBUG_BATCH_SIZE))) { in pcpu_alloc() 274 pool_move_batch(pcp, &pool_to_free); in pcpu_alloc() 282 if (!pool_move_batch(pcp, &pool_to_free)) { in pcpu_alloc() 283 if (!pool_move_batch(pcp, &pool_global)) in pcpu_alloc() 292 struct obj_pool *pcp = this_cpu_ptr(&pool_pcpu); in pcpu_free() local 297 if (!(pcp->cnt % ODEBUG_BATCH_SIZE)) { in pcpu_free() [all …]
|
/linux/net/dsa/ |
H A D | tag_vsc73xx_8021q.c | 17 u8 pcp; in vsc73xx_xmit() local 29 pcp = netdev_txq_to_tc(netdev, queue_mapping); in vsc73xx_xmit() 32 ((pcp << VLAN_PRIO_SHIFT) | tx_vid)); in vsc73xx_xmit()
|
H A D | tag_sja1105.c | 241 struct sk_buff *skb, u8 pcp) in sja1105_pvid_tag_control_pkt() argument 262 return vlan_insert_tag(skb, xmit_tpid, (pcp << VLAN_PRIO_SHIFT) | in sja1105_pvid_tag_control_pkt() 271 u8 pcp = netdev_txq_to_tc(netdev, queue_mapping); in sja1105_xmit() local 282 skb = sja1105_pvid_tag_control_pkt(dp, skb, pcp); in sja1105_xmit() 290 ((pcp << VLAN_PRIO_SHIFT) | tx_vid)); in sja1105_xmit() 299 u8 pcp = netdev_txq_to_tc(netdev, queue_mapping); in sja1110_xmit() local 314 ((pcp << VLAN_PRIO_SHIFT) | tx_vid)); in sja1110_xmit() 316 skb = sja1105_pvid_tag_control_pkt(dp, skb, pcp); in sja1110_xmit() 333 *tx_trailer = cpu_to_be32(SJA1110_TX_TRAILER_PRIO(pcp) | in sja1110_xmit()
|
H A D | tag_ocelot_8021q.c | 68 u8 pcp = netdev_txq_to_tc(netdev, queue_mapping); in ocelot_xmit() local 76 ((pcp << VLAN_PRIO_SHIFT) | tx_vid)); in ocelot_xmit()
|
/linux/tools/testing/selftests/drivers/net/microchip/ |
H A D | ksz9477_qos.sh | 390 local pcp 456 pcp=0 458 "${apptrust_order}" ${port_prio} ${dscp_prio} ${dscp} ${pcp_prio} ${pcp} 465 pcp=7 467 "${apptrust_order}" ${port_prio} ${dscp_prio} ${dscp} ${pcp_prio} ${pcp} 474 "${apptrust_order}" ${port_prio} ${dscp_prio} ${dscp} ${pcp_prio} ${pcp} 478 pcp=0 480 "${apptrust_order}" ${port_prio} ${dscp_prio} ${dscp} ${pcp_prio} ${pcp}
|
/linux/drivers/infiniband/ulp/opa_vnic/ |
H A D | opa_vnic_encap.c | 371 u8 pcp = OPA_VNIC_VLAN_PCP(vlan_tci); in opa_vnic_get_sc() local 374 sc = info->vport.pcp_to_sc_mc[pcp]; in opa_vnic_get_sc() 376 sc = info->vport.pcp_to_sc_uc[pcp]; in opa_vnic_get_sc() 394 u8 pcp = skb_vlan_tag_get(skb) >> VLAN_PRIO_SHIFT; in opa_vnic_get_vl() local 397 vl = info->vport.pcp_to_vl_mc[pcp]; in opa_vnic_get_vl() 399 vl = info->vport.pcp_to_vl_uc[pcp]; in opa_vnic_get_vl()
|
/linux/arch/sparc/kernel/ |
H A D | pcic.c | 608 struct pcidev_cookie *pcp; in pcibios_fixup_bus() local 631 pcp = pci_devcookie_alloc(); in pcibios_fixup_bus() 632 pcp->pbm = &pcic->pbm; in pcibios_fixup_bus() 633 pcp->prom_node = of_find_node_by_phandle(node); in pcibios_fixup_bus() 634 dev->sysdata = pcp; in pcibios_fixup_bus()
|
/linux/drivers/net/ethernet/microchip/lan966x/ |
H A D | lan966x_dcb.c | 58 for (int i = 0; i < ARRAY_SIZE(qos.pcp.map); i++) { in lan966x_dcb_app_update() 61 qos.pcp.map[i] = dcb_getapp(dev, &app_itr); in lan966x_dcb_app_update() 98 qos.pcp.enable = true; in lan966x_dcb_app_update()
|
/linux/drivers/net/ethernet/microchip/sparx5/ |
H A D | sparx5_port.c | 1230 sparx5_port_qos_pcp_set(port, &qos->pcp); in sparx5_port_qos_set() 1243 u8 pcp, dei; in sparx5_port_qos_pcp_rewr_set() local 1259 pcp = qos->map.map[i]; in sparx5_port_qos_pcp_rewr_set() 1260 if (pcp > SPARX5_PORT_QOS_PCP_COUNT) in sparx5_port_qos_pcp_rewr_set() 1273 spx5_rmw(REW_PCP_MAP_DE1_PCP_DE1_SET(pcp), in sparx5_port_qos_pcp_rewr_set() 1281 spx5_rmw(REW_PCP_MAP_DE0_PCP_DE0_SET(pcp), in sparx5_port_qos_pcp_rewr_set() 1299 u8 pcp, dp; in sparx5_port_qos_pcp_set() local 1310 pcp = *(pcp_itr + i); in sparx5_port_qos_pcp_set() 1312 spx5_rmw(ANA_CL_PCP_DEI_MAP_CFG_PCP_DEI_QOS_VAL_SET(pcp) | in sparx5_port_qos_pcp_set()
|
/linux/drivers/net/ethernet/mscc/ |
H A D | ocelot_vcap.c | 385 tag->pcp.value[0], tag->pcp.mask[0]); in is2_entry_set() 660 vcap_action_set(vcap, data, VCAP_IS1_ACT_PCP_VAL, a->pcp); in is1_action_set() 702 tag->pcp.value[0], tag->pcp.mask[0]); in is1_entry_set() 840 tag->pcp.value[0], tag->pcp.mask[0]); in es0_entry_set()
|
H A D | ocelot.c | 523 int err, val, pcp, dei; in ocelot_update_vlan_reclassify_rule() local 552 pcp = ANA_PORT_QOS_CFG_QOS_DEFAULT_VAL_X(val); in ocelot_update_vlan_reclassify_rule() 563 if (filter->action.pcp != pcp) { in ocelot_update_vlan_reclassify_rule() 564 filter->action.pcp = pcp; in ocelot_update_vlan_reclassify_rule() 595 filter->action.pcp = pcp; in ocelot_update_vlan_reclassify_rule()
|
/linux/include/soc/mscc/ |
H A D | ocelot_vcap.h | 500 struct ocelot_vcap_u8 pcp; /* PCP (3 bit) */ member 644 u8 pcp; member
|
/linux/drivers/net/ethernet/mellanox/mlxsw/ |
H A D | core_acl_flex_actions.h | 73 u16 vid, u8 pcp, u8 et,
|
H A D | core_acl_flex_actions.c | 1122 MLXSW_ITEM32(afa, vlan, pcp, 0x08, 8, 3); 1128 enum mlxsw_afa_vlan_cmd pcp_cmd, u8 pcp, in mlxsw_afa_vlan_pack() argument 1135 mlxsw_afa_vlan_pcp_set(payload, pcp); in mlxsw_afa_vlan_pack() 1141 u16 vid, u8 pcp, u8 et, in mlxsw_afa_block_append_vlan_modify() argument 1154 MLXSW_AFA_VLAN_CMD_SET_OUTER, pcp, in mlxsw_afa_block_append_vlan_modify()
|