/linux/net/netfilter/ |
H A D | nft_last.c | 17 struct nft_last *last; member 29 struct nft_last *last; in nft_last_init() local 33 last = kzalloc(sizeof(*last), GFP_KERNEL_ACCOUNT); in nft_last_init() 34 if (!last) in nft_last_init() 38 last->set = ntohl(nla_get_be32(tb[NFTA_LAST_SET])); in nft_last_init() 40 if (last->set && tb[NFTA_LAST_MSECS]) { in nft_last_init() 45 last->jiffies = jiffies - (unsigned long)last_jiffies; in nft_last_init() 47 priv->last = last; in nft_last_init() 51 kfree(last); in nft_last_init() 60 struct nft_last *last = priv->last; in nft_last_eval() local [all …]
|
H A D | nft_set_pipapo_avx2.c | 142 * @last: Return index of first set bit, if this is the last field 150 * Return: first set bit index if @last, index of first filled word otherwise. 154 union nft_pipapo_map_bucket *mt, bool last) in nft_pipapo_avx2_refill() argument 164 if (last) \ in nft_pipapo_avx2_refill() 193 * @last: Last field: stop at the first match and return bit index 211 * Return: -1 on no match, rule index of match if @last, otherwise first long 217 bool first, bool last) in nft_pipapo_avx2_lookup_4b_2() argument 243 b = nft_pipapo_avx2_refill(i_ul, &map[i_ul], fill, f->mt, last); in nft_pipapo_avx2_lookup_4b_2() 244 if (last) in nft_pipapo_avx2_lookup_4b_2() 268 * @last: Last field: stop at the first match and return bit index [all …]
|
/linux/arch/xtensa/kernel/ |
H A D | vmlinux.lds.S | 233 #undef LAST 234 #define LAST .dummy macro 243 LAST) 244 #undef LAST 245 #define LAST .WindowVectors.text macro 250 LAST) 251 #undef LAST 252 #define LAST .DebugInterruptVector.text macro 257 LAST) 258 # undef LAST [all …]
|
/linux/drivers/vhost/ |
H A D | iotlb.c | 17 #define LAST(map) ((map)->last) macro 21 START, LAST, static inline, vhost_iotlb_itree); 42 * @last: last of IOVA range 47 * Returns an error last is smaller than start or memory allocation 51 u64 start, u64 last, in vhost_iotlb_add_range_ctx() argument 57 if (last < start) in vhost_iotlb_add_range_ctx() 63 if (start == 0 && last == ULONG_MAX) { in vhost_iotlb_add_range_ctx() 64 u64 mid = last / 2; in vhost_iotlb_add_range_ctx() 87 map->size = last - start + 1; in vhost_iotlb_add_range_ctx() 88 map->last = last; in vhost_iotlb_add_range_ctx() [all …]
|
/linux/drivers/infiniband/hw/usnic/ |
H A D | usnic_uiom_interval_tree.c | 43 #define LAST(node) ((node)->last) macro 70 usnic_uiom_interval_node_alloc(long int start, long int last, int ref_cnt, in usnic_uiom_interval_node_alloc() argument 79 interval->last = last; in usnic_uiom_interval_node_alloc() 105 unsigned long start, unsigned long last, in find_intervals_intersection_sorted() argument 112 for (node = usnic_uiom_interval_tree_iter_first(root, start, last); in find_intervals_intersection_sorted() 114 node = usnic_uiom_interval_tree_iter_next(node, start, last)) in find_intervals_intersection_sorted() 120 int usnic_uiom_get_intervals_diff(unsigned long start, unsigned long last, in usnic_uiom_get_intervals_diff() argument 132 find_intervals_intersection_sorted(root, start, last, in usnic_uiom_get_intervals_diff() 148 if (pivot > interval->last) { in usnic_uiom_get_intervals_diff() 150 } else if (pivot <= interval->last && in usnic_uiom_get_intervals_diff() [all …]
|
H A D | usnic_uiom_interval_tree.h | 43 unsigned long last; member 58 unsigned long last); 61 unsigned long start, unsigned long last); 63 * Inserts {start...last} into {root}. If there are overlaps, 67 unsigned long start, unsigned long last, 70 * Removed {start...last} from {root}. The nodes removed are returned in 75 unsigned long start, unsigned long last, 78 * Returns {start...last} - {root} (relative complement of {start...last} in 82 unsigned long last, int flags,
|
/linux/arch/mips/include/uapi/asm/ |
H A D | msgbuf.h | 19 long msg_stime; /* last msgsnd time */ 20 long msg_rtime; /* last msgrcv time */ 21 long msg_ctime; /* last change time */ 25 __kernel_pid_t msg_lspid; /* pid of last msgsnd */ 26 __kernel_pid_t msg_lrpid; /* last receive pid */ 34 unsigned long msg_stime; /* last msgsnd time */ 36 unsigned long msg_rtime; /* last msgrcv time */ 38 unsigned long msg_ctime; /* last change time */ 42 __kernel_pid_t msg_lspid; /* pid of last msgsnd */ 43 __kernel_pid_t msg_lrpid; /* last receive pid */ [all …]
|
/linux/tools/include/linux/ |
H A D | interval_tree_generic.h | 18 * ITSUBTREE: name of ITTYPE field within ITSTRUCT holding last-in-subtree 20 * ITLAST(n): last endpoint of ITSTRUCT node n 42 ITTYPE start = ITSTART(node), last = ITLAST(node); \ 49 if (parent->ITSUBTREE < last) \ 50 parent->ITSUBTREE = last; \ 59 node->ITSUBTREE = last; \ 72 * Iterate over intervals intersecting [start;last] \ 74 * Note that a node's interval intersects [start;last] iff: \ 75 * Cond1: ITSTART(node) <= last \ 81 ITPREFIX ## _subtree_search(ITSTRUCT *node, ITTYPE start, ITTYPE last) \ [all …]
|
/linux/include/linux/ |
H A D | interval_tree_generic.h | 18 * ITSUBTREE: name of ITTYPE field within ITSTRUCT holding last-in-subtree 20 * ITLAST(n): last endpoint of ITSTRUCT node n 42 ITTYPE start = ITSTART(node), last = ITLAST(node); \ 49 if (parent->ITSUBTREE < last) \ 50 parent->ITSUBTREE = last; \ 59 node->ITSUBTREE = last; \ 72 * Iterate over intervals intersecting [start;last] \ 74 * Note that a node's interval intersects [start;last] iff: \ 75 * Cond1: ITSTART(node) <= last \ 81 ITPREFIX ## _subtree_search(ITSTRUCT *node, ITTYPE start, ITTYPE last) \ [all …]
|
/linux/io_uring/ |
H A D | slist.h | 30 list->last = node; in wq_list_add_after() 38 list->last = node; in wq_list_add_tail() 41 list->last->next = node; in wq_list_add_tail() 42 list->last = node; in wq_list_add_tail() 51 list->last = node; in wq_list_add_head() 56 struct io_wq_work_node *last, in wq_list_cut() argument 61 WRITE_ONCE(list->first, last->next); in wq_list_cut() 63 prev->next = last->next; in wq_list_cut() 65 if (last == list->last) in wq_list_cut() 66 list->last = prev; in wq_list_cut() [all …]
|
/linux/drivers/video/fbdev/core/ |
H A D | sysfillrect.c | 28 unsigned long first, last; in bitfill_aligned() local 34 last = ~(FB_SHIFT_HIGH(p, ~0UL, (dst_idx+n) % bits)); in bitfill_aligned() 38 if (last) in bitfill_aligned() 39 first &= last; in bitfill_aligned() 57 if (last) in bitfill_aligned() 58 *dst = comp(pat, *dst, last); in bitfill_aligned() 74 unsigned long first, last; in bitfill_unaligned() local 80 last = ~(FB_SHIFT_HIGH(p, ~0UL, (dst_idx+n) % bits)); in bitfill_unaligned() 84 if (last) in bitfill_unaligned() 85 first &= last; in bitfill_unaligned() [all …]
|
H A D | cfbfillrect.c | 38 unsigned long first, last; in bitfill_aligned() local 44 last = ~fb_shifted_pixels_mask_long(p, (dst_idx+n) % bits, bswapmask); in bitfill_aligned() 48 if (last) in bitfill_aligned() 49 first &= last; in bitfill_aligned() 78 if (last) in bitfill_aligned() 79 FB_WRITEL(comp(pat, FB_READL(dst), last), dst); in bitfill_aligned() 95 unsigned long first, last; in bitfill_unaligned() local 101 last = ~(FB_SHIFT_HIGH(p, ~0UL, (dst_idx+n) % bits)); in bitfill_unaligned() 105 if (last) in bitfill_unaligned() 106 first &= last; in bitfill_unaligned() [all …]
|
/linux/kernel/bpf/ |
H A D | range_tree.c | 108 #define LAST(node) ((node)->rn_last) macro 111 __rn_subtree_last, START, LAST, in INTERVAL_TREE_DEFINE() argument 131 range_it_iter_first(struct range_tree *rt, u32 start, u32 last) in range_it_iter_first() argument 133 return __range_it_iter_first(&rt->it_root, start, last); in range_it_iter_first() 139 u32 last = start + len - 1; in range_tree_clear() local 143 while ((rn = range_it_iter_first(rt, start, last))) { in range_tree_clear() 144 if (rn->rn_start < start && rn->rn_last > last) { in range_tree_clear() 158 new_rn->rn_start = last + 1; in range_tree_clear() 166 } else if (rn->rn_last > last) { in range_tree_clear() 169 rn->rn_start = last + 1; in range_tree_clear() [all …]
|
/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_svm.c | 77 svm_range_check_vm(struct kfd_process *p, uint64_t start, uint64_t last, 95 prange, prange->start, prange->last); in svm_range_unlink() 104 if (prange->it_node.start != 0 && prange->it_node.last != 0) in svm_range_unlink() 112 prange, prange->start, prange->last); in svm_range_add_notifier_locked() 131 prange, prange->start, prange->last); in svm_range_add_to_svms() 135 prange->it_node.last = prange->last; in svm_range_add_to_svms() 144 prange->notifier.interval_tree.last >> PAGE_SHIFT); in svm_range_remove_notifier() 147 prange->notifier.interval_tree.last != 0) in svm_range_remove_notifier() 281 uint64_t size = (prange->last - prange->start + 1) << PAGE_SHIFT; in svm_range_free() 286 prange->start, prange->last); in svm_range_free() [all …]
|
/linux/tools/perf/util/ |
H A D | ordered-events.c | 20 struct ordered_event *last = oe->last; in queue_event() local 25 oe->last = new; in queue_event() 29 if (!last) { in queue_event() 36 * last event might point to some random place in the list as it's in queue_event() 37 * the last queued event. We expect that the new event is close to in queue_event() 40 if (last->timestamp <= timestamp) { in queue_event() 41 while (last->timestamp <= timestamp) { in queue_event() 42 p = last->list.next; in queue_event() 48 last = list_entry(p, struct ordered_event, list); in queue_event() 50 list_add_tail(&new->list, &last->list); in queue_event() [all …]
|
/linux/arch/mips/sibyte/common/ |
H A D | cfe_console.c | 16 int i, last, written; in cfe_console_write() local 18 for (i=0, last=0; i<count; i++) { in cfe_console_write() 24 written = cfe_write(cfe_cons_handle, &str[last], i-last); in cfe_console_write() 27 last += written; in cfe_console_write() 28 } while (last < i); in cfe_console_write() 33 if (last != count) { in cfe_console_write() 35 written = cfe_write(cfe_cons_handle, &str[last], count-last); in cfe_console_write() 38 last += written; in cfe_console_write() 39 } while (last < count); in cfe_console_write()
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_range_fence.c | 14 #define XE_RANGE_TREE_LAST(_node) ((_node)->last) 49 * @last: last address of range fence 57 u64 start, u64 last, struct dma_fence *fence) in xe_range_fence_insert() argument 68 rfence->last = last; in xe_range_fence_insert() 129 * @last: last address of range fence 135 u64 last) in xe_range_fence_tree_first() argument 137 return xe_range_fence_tree_iter_first(&tree->root, start, last); in xe_range_fence_tree_first() 144 * @last: last address of range fence 149 xe_range_fence_tree_next(struct xe_range_fence *rfence, u64 start, u64 last) in xe_range_fence_tree_next() argument 151 return xe_range_fence_tree_iter_next(rfence, start, last); in xe_range_fence_tree_next()
|
/linux/include/net/ |
H A D | nl802154.h | 182 /* keep last */ 237 /* keep last */ 277 /* keep last */ 306 /* keep last */ 325 /* keep last */ 352 /* keep last */ 367 /* keep last */ 389 /* keep last */ 400 /* keep last */ 415 /* keep last */ [all …]
|
/linux/drivers/crypto/marvell/cesa/ |
H A D | tdma.c | 76 dreq->chain.last = NULL; in mv_cesa_dma_cleanup() 99 if (engine->chain.first == NULL && engine->chain.last == NULL) { in mv_cesa_tdma_chain() 101 engine->chain.last = dreq->chain.last; in mv_cesa_tdma_chain() 103 struct mv_cesa_tdma_desc *last; in mv_cesa_tdma_chain() local 105 last = engine->chain.last; in mv_cesa_tdma_chain() 106 last->next = dreq->chain.first; in mv_cesa_tdma_chain() 107 engine->chain.last = dreq->chain.last; in mv_cesa_tdma_chain() 111 * the last element of the current chain, or if the request in mv_cesa_tdma_chain() 115 if (!(last->flags & CESA_TDMA_BREAK_CHAIN) && in mv_cesa_tdma_chain() 117 last->next_dma = cpu_to_le32(dreq->chain.first->cur_dma); in mv_cesa_tdma_chain() [all …]
|
/linux/include/trace/events/ |
H A D | maple_tree.h | 24 __field(unsigned long, last) 33 __entry->last = mas->last; 43 (unsigned long) __entry->last 57 __field(unsigned long, last) 66 __entry->last = mas->last; 76 (unsigned long) __entry->last 92 __field(unsigned long, last) 103 __entry->last = mas->last; 115 (unsigned long) __entry->last,
|
/linux/lib/math/ |
H A D | prime_numbers.c | 11 unsigned long last, sz; member 17 .last = 61, 42 .last = 31, 133 if (x < p->last) { in expand_to_next_prime() 145 new->last = clear_multiples(y, new->primes, p->sz, sz); in expand_to_next_prime() 148 BUG_ON(new->last <= x); in expand_to_next_prime() 192 while (x >= p->last) { in next_prime_number() 201 x = find_next_bit(p->primes, p->last, x + 1); in next_prime_number() 254 pr_info("primes.{last=%lu, .sz=%lu, .primes[]=...x%lx} = %s\n", in dump_primes() 255 p->last, p->sz, p->primes[BITS_TO_LONGS(p->sz) - 1], buf); in dump_primes() [all …]
|
/linux/arch/um/drivers/ |
H A D | umcast_kern.c | 74 char *last; in mcast_setup() local 90 init->lport = simple_strtoul(port_str, &last, 10); in mcast_setup() 91 if ((*last != '\0') || (last == port_str)) { in mcast_setup() 99 init->ttl = simple_strtoul(ttl_str, &last, 10); in mcast_setup() 100 if ((*last != '\0') || (last == ttl_str)) { in mcast_setup() 120 char *last; in ucast_setup() local 136 init->lport = simple_strtoul(lport_str, &last, 10); in ucast_setup() 137 if ((*last != '\0') || (last == lport_str)) { in ucast_setup() 145 init->rport = simple_strtoul(rport_str, &last, 10); in ucast_setup() 146 if ((*last != '\0') || (last == rport_str)) { in ucast_setup()
|
/linux/drivers/staging/media/ipu3/ |
H A D | ipu3-css-pool.c | 49 pool->last = IPU3_CSS_POOL_SIZE; in imgu_css_pool_init() 64 u32 n = (pool->last + 1) % IPU3_CSS_POOL_SIZE; in imgu_css_pool_get() 67 pool->last = n; in imgu_css_pool_get() 75 pool->entry[pool->last].valid = false; in imgu_css_pool_put() 76 pool->last = (pool->last + IPU3_CSS_POOL_SIZE - 1) % IPU3_CSS_POOL_SIZE; in imgu_css_pool_put() 80 * imgu_css_pool_last - Retrieve the nth pool entry from last 83 * @n: the distance to the last index. 86 * The nth entry from last or null map to indicate no frame stored. 92 int i = (pool->last + IPU3_CSS_POOL_SIZE - n) % IPU3_CSS_POOL_SIZE; in imgu_css_pool_last()
|
/linux/arch/powerpc/include/uapi/asm/ |
H A D | msgbuf.h | 16 long msg_stime; /* last msgsnd time */ 17 long msg_rtime; /* last msgrcv time */ 18 long msg_ctime; /* last change time */ 21 unsigned long msg_stime; /* last msgsnd time */ 23 unsigned long msg_rtime; /* last msgrcv time */ 25 unsigned long msg_ctime; /* last change time */ 30 __kernel_pid_t msg_lspid; /* pid of last msgsnd */ 31 __kernel_pid_t msg_lrpid; /* last receive pid */
|
/linux/lib/ |
H A D | interval_tree.c | 8 #define LAST(node) ((node)->last) macro 12 START, LAST,, interval_tree) 22 * span of nodes. This makes nodes[0]->last the end of that contiguous used span 24 * first node starting the next used span. A hole span is between nodes[0]->last 34 if (cur->last > state->nodes[0]->last) in interval_tree_span_iter_next_gap() 38 } while (cur && (state->nodes[0]->last >= cur->start || in interval_tree_span_iter_next_gap() 39 state->nodes[0]->last + 1 == cur->start)); in interval_tree_span_iter_next_gap() 73 iter->last_used = iter->nodes[0]->last; in interval_tree_span_iter_first() 91 iter->last_used = iter->nodes[0]->last; in interval_tree_span_iter_next() 103 iter->start_hole = iter->nodes[0]->last + 1; in interval_tree_span_iter_next() [all …]
|