| /linux/fs/tests/ |
| H A D | exec_kunit.c | 12 { { .p = ULONG_MAX, .rlim_stack.rlim_cur = ULONG_MAX, 14 { { .p = ULONG_MAX, .rlim_stack.rlim_cur = ULONG_MAX, 16 { { .p = ULONG_MAX, .rlim_stack.rlim_cur = ULONG_MAX, 19 { { .p = ULONG_MAX, .rlim_stack.rlim_cur = ULONG_MAX, 21 { { .p = ULONG_MAX, .rlim_stack.rlim_cur = ULONG_MAX, 23 { { .p = ULONG_MAX, .rlim_stack.rlim_cur = ULONG_MAX, 25 { { .p = ULONG_MAX, .rlim_stack.rlim_cur = ULONG_MAX, 32 { { .p = ULONG_MAX, .rlim_stack.rlim_cur = ULONG_MAX, 36 { { .p = sizeof(void *), .rlim_stack.rlim_cur = ULONG_MAX, 43 { { .p = ULONG_MAX, .rlim_stack.rlim_cur = 0, [all …]
|
| /linux/lib/ |
| H A D | test_maple_tree.c | 404 top = ULONG_MAX; in check_find() 442 mas_for_each(&mas, entry, ULONG_MAX) { in check_find() 458 mas_for_each(&mas, entry, ULONG_MAX) { in check_find() 489 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL)); in check_find() 490 mt_for_each(mt, entry, index, ULONG_MAX) { in check_find() 498 val = ULONG_MAX; in check_find() 510 mtree_erase_index(mt, ULONG_MAX); in check_find() 525 MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL)); in check_find() 530 mas_for_each(&mas, entry, ULONG_MAX) { in check_find() 540 val = ULONG_MAX; in check_find() [all …]
|
| H A D | test_xarray.c | 111 XA_BUG_ON(xa, xas_find(&xas, ULONG_MAX) != xa_mk_value(0)); in check_xas_retry() 118 XA_BUG_ON(xa, xas_next_entry(&xas, ULONG_MAX) != xa_mk_value(0)); in check_xas_retry() 127 XA_BUG_ON(xa, xas_next_entry(&xas, ULONG_MAX) != xa_mk_value(0)); in check_xas_retry() 138 xas_for_each(&xas, entry, ULONG_MAX) { in check_xas_retry() 230 xas_for_each(&xas, entry, ULONG_MAX) in check_xa_mark_1() 239 xas_for_each_marked(&xas, entry, ULONG_MAX, XA_MARK_0) in check_xa_mark_1() 276 xas_for_each_marked(&xas, entry, ULONG_MAX, XA_MARK_0) in check_xa_mark_2() 282 xas_for_each(&xas, entry, ULONG_MAX) { in check_xa_mark_2() 303 xas_for_each_marked(&xas, entry, ULONG_MAX, XA_MARK_0) { in check_xa_mark_3() 361 XA_BUG_ON(xa, xa_store_index(xa, ULONG_MAX, GFP_KERNEL) != in check_xa_shrink() [all …]
|
| /linux/drivers/iommu/iommufd/ |
| H A D | ioas.c | 88 ULONG_MAX) { in iommufd_ioas_iova_ranges() 178 while ((node = interval_tree_iter_first(&allowed_iova, 0, ULONG_MAX))) { in iommufd_ioas_allow_iovas() 217 if (cmd->iova >= ULONG_MAX || cmd->length >= ULONG_MAX) in iommufd_ioas_map_file() 257 if (cmd->iova >= ULONG_MAX || cmd->length >= ULONG_MAX) in iommufd_ioas_map() 300 if (cmd->length >= ULONG_MAX || cmd->src_iova >= ULONG_MAX || in iommufd_ioas_copy() 301 cmd->dst_iova >= ULONG_MAX) in iommufd_ioas_copy() 356 if (cmd->iova >= ULONG_MAX || cmd->length >= ULONG_MAX) { in iommufd_ioas_unmap() 513 for (_area = iopt_area_iter_first(&_ioas->iopt, 0, ULONG_MAX); \ 515 _area = iopt_area_iter_next(_area, 0, ULONG_MAX))
|
| H A D | io_pagetable.c | 132 if (length == 0 || length >= ULONG_MAX / 2) in iopt_alloc_iova() 157 PAGE_SIZE, ULONG_MAX - PAGE_SIZE) { in iopt_alloc_iova() 160 allowed_span.last_used = ULONG_MAX - PAGE_SIZE; in iopt_alloc_iova() 611 if (bitmap->iova > ULONG_MAX || last_iova > ULONG_MAX) in iommufd_check_iova_range() 659 for (area = iopt_area_iter_first(iopt, 0, ULONG_MAX); area; in iopt_clear_dirty_data() 660 area = iopt_area_iter_next(area, 0, ULONG_MAX)) { in iopt_clear_dirty_data() 855 return iopt_unmap_iova_range(iopt, 0, ULONG_MAX, unmapped); in iopt_unmap_all() 867 for (allowed = iopt_allowed_iter_first(iopt, 0, ULONG_MAX); allowed; in iopt_set_allow_iova() 868 allowed = iopt_allowed_iter_next(allowed, 0, ULONG_MAX)) { in iopt_set_allow_iova() 907 for (reserved = iopt_reserved_iter_first(iopt, 0, ULONG_MAX); reserved; in __iopt_remove_reserved_iova() [all …]
|
| /linux/arch/riscv/kernel/ |
| H A D | kexec_elf.c | 71 unsigned long lowest_paddr = ULONG_MAX; in elf_find_pbase() 72 unsigned long lowest_vaddr = ULONG_MAX; in elf_find_pbase() 88 kbuf.buf_max = ULONG_MAX; in elf_find_pbase() 115 unsigned long old_kernel_pbase = ULONG_MAX; in elf_kexec_load()
|
| /linux/mm/ |
| H A D | debug_vm_pgtable.c | 130 page = (args->pte_pfn != ULONG_MAX) ? pfn_to_page(args->pte_pfn) : NULL; in pte_advanced_tests() 228 page = (args->pmd_pfn != ULONG_MAX) ? pfn_to_page(args->pmd_pfn) : NULL; in pmd_advanced_tests() 350 page = (args->pud_pfn != ULONG_MAX) ? pfn_to_page(args->pud_pfn) : NULL; in pud_advanced_tests() 611 page = (args->pte_pfn != ULONG_MAX) ? pfn_to_page(args->pte_pfn) : NULL; in pte_clear_tests() 859 page = (args->pte_pfn != ULONG_MAX) ? pfn_to_page(args->pte_pfn) : NULL; in swap_migration_tests() 981 args->pud_pfn != ULONG_MAX) { in destroy_args() 990 args->pud_pfn = ULONG_MAX; in destroy_args() 991 args->pmd_pfn = ULONG_MAX; in destroy_args() 992 args->pte_pfn = ULONG_MAX; in destroy_args() 997 args->pmd_pfn != ULONG_MAX) { in destroy_args() [all …]
|
| H A D | execmem.c | 162 MA_STATE(mas, free_areas, 0, ULONG_MAX); in execmem_cache_clean() 166 mas_for_each(&mas, area, ULONG_MAX) { in execmem_cache_clean() 198 area = mas_next(&mas, ULONG_MAX); in execmem_cache_add_locked() 232 MA_STATE(mas_free, free_areas, 0, ULONG_MAX); in __execmem_cache_alloc() 233 MA_STATE(mas_busy, busy_areas, 0, ULONG_MAX); in __execmem_cache_alloc() 240 mas_for_each(&mas_free, area, ULONG_MAX) { in __execmem_cache_alloc() 380 MA_STATE(mas, busy_areas, 0, ULONG_MAX); in execmem_cache_free_slow() 388 mas_for_each(&mas, area, ULONG_MAX) { in execmem_cache_free_slow()
|
| /linux/drivers/clocksource/ |
| H A D | timer-riscv.c | 38 csr_write(CSR_STIMECMP, ULONG_MAX); in riscv_clock_event_stop() 40 csr_write(CSR_STIMECMPH, ULONG_MAX); in riscv_clock_event_stop() 53 csr_write(CSR_STIMECMP, ULONG_MAX); in riscv_clock_next_event() 121 clockevents_config_and_register(ce, riscv_timebase, 100, ULONG_MAX); in riscv_timer_starting_cpu()
|
| H A D | timer-gx6605s.c | 61 writel_relaxed(ULONG_MAX - delta, base + TIMER_INI); in gx6605s_timer_set_next_event() 109 ULONG_MAX); in gx6605s_clkevt_init()
|
| /linux/Documentation/translations/zh_CN/admin-guide/ |
| H A D | cpu-load.rst | 78 hog (ULONG_MAX); 79 for (i = 0; i < HIST; ++i) v[i] = ULONG_MAX - hog (ULONG_MAX);
|
| /linux/include/vdso/ |
| H A D | limits.h | 13 #define ULONG_MAX (~0UL) macro 17 #define UINTPTR_MAX ULONG_MAX
|
| /linux/Documentation/translations/zh_TW/admin-guide/ |
| H A D | cpu-load.rst | 84 hog (ULONG_MAX); 85 for (i = 0; i < HIST; ++i) v[i] = ULONG_MAX - hog (ULONG_MAX);
|
| /linux/security/integrity/ima/ |
| H A D | ima_kexec.c | 139 .buf_min = 0, .buf_max = ULONG_MAX, in ima_add_kexec_buffer() 162 if (binary_runtime_size >= ULONG_MAX - PAGE_SIZE) in ima_add_kexec_buffer() 163 kexec_segment_size = ULONG_MAX; in ima_add_kexec_buffer() 167 if ((kexec_segment_size == ULONG_MAX) || in ima_add_kexec_buffer()
|
| /linux/include/uapi/linux/ |
| H A D | shm.h | 23 #define SHMMAX (ULONG_MAX - (1UL << 24)) /* max shared seg size (bytes) */ 24 #define SHMALL (ULONG_MAX - (1UL << 24)) /* max shm system wide (pages) */
|
| /linux/arch/arm/mach-omap2/ |
| H A D | pm.c | 53 .startup_time = ULONG_MAX, 54 .shutdown_time = ULONG_MAX,
|
| /linux/tools/include/nolibc/ |
| H A D | stdint.h | 117 #ifndef ULONG_MAX 118 #define ULONG_MAX ((unsigned long)(__LONG_MAX__) * 2 + 1) macro
|
| /linux/tools/testing/selftests/arm64/fp/ |
| H A D | vlset.c | 74 vl = ULONG_MAX; in parse_options() 77 if (vl == ULONG_MAX && errno) { in parse_options()
|
| /linux/arch/x86/mm/pat/ |
| H A D | memtype_interval.c | 135 entry_match = interval_iter_first(&memtype_rbroot, 0, ULONG_MAX); in memtype_copy_nth_element() 138 entry_match = interval_iter_next(entry_match, 0, ULONG_MAX); in memtype_copy_nth_element()
|
| /linux/Documentation/admin-guide/ |
| H A D | cpu-load.rst | 90 hog (ULONG_MAX); 91 for (i = 0; i < HIST; ++i) v[i] = ULONG_MAX - hog(ULONG_MAX);
|
| /linux/tools/testing/radix-tree/ |
| H A D | iteration_check.c | 77 xas_for_each_marked(&xas, entry, ULONG_MAX, TAG) { in tagged_iteration_fn() 110 xas_for_each(&xas, entry, ULONG_MAX) { in untagged_iteration_fn()
|
| /linux/drivers/vfio/ |
| H A D | vfio_main.c | 998 comb_start = interval_tree_iter_first(root, 0, ULONG_MAX); in vfio_combine_iova_ranges() 1008 curr = interval_tree_iter_next(curr, 0, ULONG_MAX); in vfio_combine_iova_ranges() 1019 min_gap = ULONG_MAX; in vfio_combine_iova_ranges() 1020 curr = interval_tree_iter_first(root, 0, ULONG_MAX); in vfio_combine_iova_ranges() 1031 curr = interval_tree_iter_next(curr, 0, ULONG_MAX); in vfio_combine_iova_ranges() 1035 if (WARN_ON_ONCE(min_gap == ULONG_MAX)) in vfio_combine_iova_ranges() 1103 iova_end > ULONG_MAX) { in vfio_ioctl_device_feature_logging_start() 1190 iova_end > ULONG_MAX) in vfio_ioctl_device_feature_logging_report() 1675 if (iova > ULONG_MAX) in vfio_pin_pages() 1714 if (WARN_ON(iova > ULONG_MAX)) in vfio_unpin_pages() [all …]
|
| /linux/arch/powerpc/kernel/ |
| H A D | setup_64.c | 700 return ULONG_MAX; in ppc64_bolted_size() 748 sp = alloc_stack(ULONG_MAX, i); in exc_lvl_early_init() 752 sp = alloc_stack(ULONG_MAX, i); in exc_lvl_early_init() 756 sp = alloc_stack(ULONG_MAX, i); in exc_lvl_early_init()
|
| /linux/arch/loongarch/include/asm/ |
| H A D | efi.h | 25 return ULONG_MAX; in efi_get_max_initrd_addr()
|
| /linux/arch/arm64/kernel/ |
| H A D | machine_kexec_file.c | 123 kbuf.buf_max = ULONG_MAX; in load_other_segments() 179 kbuf.buf_max = ULONG_MAX; in load_other_segments()
|