Home
last modified time | relevance | path

Searched refs:pools (Results 1 – 25 of 51) sorted by relevance

123

/linux/tools/testing/selftests/drivers/net/mlxsw/
H A Dsharedbuffer_configuration.py28 def __init__(self, pools): argument
30 for pool in pools:
187 pools = PoolList()
190 pools.append(Pool(pooldict))
191 return pools
194 def do_check_pools(dlname, pools, vp): argument
195 for pool in pools:
219 def check_pools(dlname, pools): argument
221 record_vp = RecordValuePicker(pools)
224 do_check_pools(dlname, pools, RandomValuePicker(pools))
[all …]
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
H A Dpool.c25 if (!xsk->pools) { in mlx5e_xsk_get_pools()
26 xsk->pools = kcalloc(MLX5E_MAX_NUM_CHANNELS, in mlx5e_xsk_get_pools()
27 sizeof(*xsk->pools), GFP_KERNEL); in mlx5e_xsk_get_pools()
28 if (unlikely(!xsk->pools)) in mlx5e_xsk_get_pools()
41 kfree(xsk->pools); in mlx5e_xsk_put_pools()
42 xsk->pools = NULL; in mlx5e_xsk_put_pools()
54 xsk->pools[ix] = pool; in mlx5e_xsk_add_pool()
60 xsk->pools[ix] = NULL; in mlx5e_xsk_remove_pool()
H A Dpool.h12 if (!xsk || !xsk->pools) in mlx5e_xsk_get_pool()
18 return xsk->pools[ix]; in mlx5e_xsk_get_pool()
/linux/arch/sparc/kernel/
H A Diommu-common.c82 spin_lock_init(&(iommu->pools[i].lock)); in iommu_tbl_pool_init()
83 iommu->pools[i].start = start; in iommu_tbl_pool_init()
84 iommu->pools[i].hint = start; in iommu_tbl_pool_init()
86 iommu->pools[i].end = start - 1; in iommu_tbl_pool_init()
131 pool = &(iommu->pools[pool_nr]); in iommu_tbl_range_alloc()
161 pool = &(iommu->pools[0]); in iommu_tbl_range_alloc()
193 pool = &(iommu->pools[pool_nr]); in iommu_tbl_range_alloc()
237 p = &tbl->pools[pool_nr]; in get_pool()
/linux/drivers/net/ethernet/chelsio/libcxgb/
H A Dlibcxgb_ppm.c348 struct cxgbi_ppm_pool __percpu *pools; in ppm_alloc_cpu_pool() local
350 unsigned int max = (PCPU_MIN_UNIT_SIZE - sizeof(*pools)) << 3; in ppm_alloc_cpu_pool()
367 alloc_sz = sizeof(*pools) + sizeof(unsigned long) * bmap; in ppm_alloc_cpu_pool()
368 pools = __alloc_percpu(alloc_sz, __alignof__(struct cxgbi_ppm_pool)); in ppm_alloc_cpu_pool()
370 if (!pools) in ppm_alloc_cpu_pool()
374 struct cxgbi_ppm_pool *ppool = per_cpu_ptr(pools, cpu); in ppm_alloc_cpu_pool()
384 return pools; in ppm_alloc_cpu_pool()
/linux/drivers/net/ethernet/freescale/dpaa2/
H A Ddpaa2-xsk.c162 pools_params->pools[curr_bp].priority_mask |= (1 << j); in dpaa2_xsk_set_bp_per_qdbin()
163 if (!pools_params->pools[curr_bp].priority_mask) in dpaa2_xsk_set_bp_per_qdbin()
166 pools_params->pools[curr_bp].dpbp_id = priv->bp[i]->bpid; in dpaa2_xsk_set_bp_per_qdbin()
167 pools_params->pools[curr_bp].buffer_size = priv->rx_buf_size; in dpaa2_xsk_set_bp_per_qdbin()
168 pools_params->pools[curr_bp++].backup_pool = 0; in dpaa2_xsk_set_bp_per_qdbin()
/linux/drivers/soc/ti/
H A Dknav_qmss.h203 struct list_head pools; member
304 struct list_head pools; member
363 list_for_each_entry(pool, &kdev->pools, list)
H A Dknav_qmss_queue.c817 node = &region->pools; in knav_pool_create()
818 list_for_each_entry(iter, &region->pools, region_inst) { in knav_pool_create()
832 list_add_tail(&pool->list, &kdev->pools); in knav_pool_create()
1034 list_add(&pool->region_inst, &region->pools); in knav_queue_setup_region()
1119 INIT_LIST_HEAD(&region->pools); in knav_queue_setup_regions()
1361 list_for_each_entry_safe(pool, tmp, &region->pools, region_inst) in knav_queue_free_regions()
1808 INIT_LIST_HEAD(&kdev->pools); in knav_queue_probe()
/linux/Documentation/devicetree/bindings/soc/ti/
H A Dkeystone-navigator-qmss.txt6 processors(PDSP), linking RAM, descriptor pools and infrastructure
49 - queue-pools : child node classifying the queue ranges into pools.
50 Queue ranges are grouped into 3 type of pools:
151 queue-pools {
/linux/Documentation/networking/
H A Dpage_pool.rst46 Information about page pools on the system can be accessed via the netdev
51 The number of pools created **must** match the number of hardware queues
106 with fragmented page pools.
/linux/arch/sparc/include/asm/
H A Diommu-common.h26 struct iommu_pool pools[IOMMU_NR_POOLS]; member
/linux/Documentation/networking/device_drivers/ethernet/freescale/dpaa2/
H A Dethernet-driver.rst26 - buffer pools
69 DPBPs represent hardware buffer pools. Packet I/O is performed in the context
124 The role of hardware buffer pools is storage of ingress frame data. Each network
H A Doverview.rst25 The MC uses DPAA2 hardware resources such as queues, buffer pools, and
56 | -buffer pools -DPMCP |
360 - DPBPs for network buffer pools
/linux/Documentation/arch/arm/keystone/
H A Dknav-qmss.rst12 processors(PDSP), linking RAM, descriptor pools and infrastructure
25 allocate descriptor pools, map the descriptors, push/pop to queues etc. For
/linux/kernel/dma/
H A Dswiotlb.c89 .pools = LIST_HEAD_INIT(io_tlb_default_mem.pools),
308 list_add_rcu(&pool->node, &mem->pools); in add_mem_pool()
781 list_for_each_entry_rcu(pool, &mem->pools, node) { in __swiotlb_find_pool()
1149 list_for_each_entry_rcu(pool, &mem->pools, node) { in swiotlb_search_area()
1330 list_for_each_entry_rcu(pool, &mem->pools, node) in mem_used()
1839 INIT_LIST_HEAD_RCU(&mem->pools); in rmem_swiotlb_device_init()
H A DKconfig99 pools as needed. To reduce run-time kernel memory requirements, you
118 This enables support for restricted DMA pools which provide a level of
/linux/drivers/net/ethernet/wangxun/libwx/
H A Dwx_hw.c756 static int wx_set_rar(struct wx *wx, u32 index, u8 *addr, u64 pools, in wx_set_rar() argument
772 wr32(wx, WX_PSR_MAC_SWC_VM_L, pools & 0xFFFFFFFF); in wx_set_rar()
775 wr32(wx, WX_PSR_MAC_SWC_VM_H, pools >> 32); in wx_set_rar()
950 wx->mac_table[i].pools, in wx_sync_mac_table()
968 wx->mac_table[i].pools, in wx_full_sync_mac_table()
981 wx->mac_table[0].pools = BIT(VMDQ_P(0)); in wx_mac_set_default_filter()
984 wx->mac_table[0].pools, in wx_mac_set_default_filter()
1000 wx->mac_table[i].pools = 0; in wx_flush_sw_mac_table()
1016 if (wx->mac_table[i].pools != (1ULL << pool)) { in wx_add_mac_filter()
1018 wx->mac_table[i].pools |= (1ULL << pool); in wx_add_mac_filter()
[all …]
/linux/drivers/nvme/host/
H A Dpci.c448 struct nvme_descriptor_pools *pools = &dev->descriptor_pools[numa_node]; in nvme_setup_descriptor_pools() local
451 if (pools->small) in nvme_setup_descriptor_pools()
452 return pools; /* already initialized */ in nvme_setup_descriptor_pools()
454 pools->large = dma_pool_create_node("nvme descriptor page", dev->dev, in nvme_setup_descriptor_pools()
456 if (!pools->large) in nvme_setup_descriptor_pools()
462 pools->small = dma_pool_create_node("nvme descriptor small", dev->dev, in nvme_setup_descriptor_pools()
464 if (!pools->small) { in nvme_setup_descriptor_pools()
465 dma_pool_destroy(pools->large); in nvme_setup_descriptor_pools()
466 pools->large = NULL; in nvme_setup_descriptor_pools()
470 return pools; in nvme_setup_descriptor_pools()
[all …]
/linux/drivers/md/
H A Ddm-table.c1058 struct dm_md_mempools *pools; in dm_table_alloc_md_mempools() local
1066 pools = kzalloc_node(sizeof(*pools), GFP_KERNEL, md->numa_node_id); in dm_table_alloc_md_mempools()
1067 if (!pools) in dm_table_alloc_md_mempools()
1091 if (bioset_init(&pools->io_bs, pool_size, io_front_pad, bioset_flags)) in dm_table_alloc_md_mempools()
1094 if (bioset_init(&pools->bs, pool_size, front_pad, 0)) in dm_table_alloc_md_mempools()
1097 t->mempools = pools; in dm_table_alloc_md_mempools()
1101 dm_free_md_mempools(pools); in dm_table_alloc_md_mempools()
/linux/arch/powerpc/kernel/
H A Diommu.c254 pool = &(tbl->pools[pool_nr]); in iommu_range_alloc()
282 pool = &(tbl->pools[0]); in iommu_range_alloc()
304 pool = &tbl->pools[pool_nr]; in iommu_range_alloc()
431 p = &tbl->pools[pool_nr]; in get_pool()
757 p = &tbl->pools[i]; in iommu_init_table()
/linux/arch/arm/boot/dts/ti/keystone/
H A Dkeystone-k2g-netcp.dtsi36 queue-pools {
/linux/drivers/soc/fsl/qbman/
H A Dqman_priv.h177 u32 pools; member
H A Dqman_portal.c249 pcfg->pools = qm_get_pools_sdqcr(); in qman_portal_probe()
/linux/include/linux/
H A Dswiotlb.h118 struct list_head pools; member
/linux/Documentation/core-api/
H A Dmm-api.rst88 Memory pools

123