Home
last modified time | relevance | path

Searched refs:nnodes (Results 1 – 14 of 14) sorted by relevance

/linux/fs/btrfs/
H A Dulist.c52 ulist->nnodes = 0; in ulist_init()
154 BUG_ON(ulist->nnodes == 0); in ulist_rbtree_erase()
155 ulist->nnodes--; in ulist_rbtree_erase()
234 ulist->nnodes++; in ulist_add_merge()
H A Dulist.h40 unsigned long nnodes; member
H A Dqgroup.c2922 if (!roots || roots->nnodes == 0) in maybe_fs_roots()
2960 nr_new_roots = new_roots->nnodes; in btrfs_qgroup_account_extent()
2965 nr_old_roots = old_roots->nnodes; in btrfs_qgroup_account_extent()
H A Dbackref.c1919 const unsigned long prev_ref_count = ctx->refs.nnodes; in btrfs_is_data_extent_shared()
1966 if ((ctx->refs.nnodes - prev_ref_count) > 1) in btrfs_is_data_extent_shared()
/linux/drivers/clk/zynqmp/
H A Dclkc.c412 u32 *nnodes) in __zynqmp_clock_get_topology() argument
421 topology[*nnodes].type = type; in __zynqmp_clock_get_topology()
422 topology[*nnodes].flag = FIELD_GET(CLK_TOPOLOGY_FLAGS, in __zynqmp_clock_get_topology()
424 topology[*nnodes].type_flag = in __zynqmp_clock_get_topology()
427 topology[*nnodes].custom_type_flag = in __zynqmp_clock_get_topology()
430 (*nnodes)++; in __zynqmp_clock_get_topology()
/linux/drivers/vfio/pci/pds/
H A Ddirty.h38 struct rb_root_cached *ranges, u32 nnodes,
/linux/drivers/vfio/
H A Dvfio_main.c1059 u32 nnodes; in vfio_ioctl_device_feature_logging_start() local
1074 nnodes = control.num_ranges; in vfio_ioctl_device_feature_logging_start()
1075 if (!nnodes) in vfio_ioctl_device_feature_logging_start()
1078 if (nnodes > LOG_MAX_RANGES) in vfio_ioctl_device_feature_logging_start()
1082 nodes = kmalloc_array(nnodes, sizeof(struct interval_tree_node), in vfio_ioctl_device_feature_logging_start()
1087 for (i = 0; i < nnodes; i++) { in vfio_ioctl_device_feature_logging_start()
1115 ret = device->log_ops->log_start(device, &root, nnodes, in vfio_ioctl_device_feature_logging_start()
/linux/kernel/time/
H A Dtimer_migration.c1794 unsigned int nnodes = num_possible_nodes(); in tmigr_init() local
1814 cpus_per_node = DIV_ROUND_UP(ncpus, nnodes); in tmigr_init()
1821 nodelvl = DIV_ROUND_UP(order_base_2(nnodes), in tmigr_init()
/linux/drivers/vfio/pci/mlx5/
H A Dcmd.h236 struct rb_root_cached *ranges, u32 nnodes, u64 *page_size);
H A Dcmd.c855 struct rb_root_cached *ranges, u32 nnodes) in mlx5vf_create_tracker() argument
864 u32 num_ranges = nnodes; in mlx5vf_create_tracker()
875 vfio_combine_iova_ranges(ranges, nnodes, max_num_range); in mlx5vf_create_tracker()
1510 struct rb_root_cached *ranges, u32 nnodes, in mlx5vf_start_page_tracker() argument
1599 err = mlx5vf_create_tracker(mdev, mvdev, ranges, nnodes); in mlx5vf_start_page_tracker()
/linux/mm/
H A Dmempolicy.c2031 unsigned int target, nnodes; in interleave_nid() local
2035 nnodes = read_once_policy_nodemask(pol, &nodemask); in interleave_nid()
2036 if (!nnodes) in interleave_nid()
2038 target = ilx % nnodes; in interleave_nid()
2424 int nnodes, node; in alloc_pages_bulk_weighted_interleave() local
2436 nnodes = read_once_policy_nodemask(pol, &nodes); in alloc_pages_bulk_weighted_interleave()
2440 if (!nnodes) in alloc_pages_bulk_weighted_interleave()
2493 for (i = 0; i < nnodes; i++) { in alloc_pages_bulk_weighted_interleave()
/linux/include/linux/
H A Dvfio.h229 struct rb_root_cached *ranges, u32 nnodes, u64 *page_size);
/linux/Documentation/bpf/
H A Dmap_lru_hash_update.dot44 label="Able to free\nLOCAL_FREE_TARGET\nnodes?"]
/linux/samples/vfio-mdev/
H A Dmtty.c1289 u32 nnodes, u64 *page_size) in mtty_log_start() argument