| /linux/net/netfilter/ |
| H A D | nft_last.c | 17 struct nft_last *last; member 29 struct nft_last *last; in nft_last_init() local 33 last = kzalloc(sizeof(*last), GFP_KERNEL_ACCOUNT); in nft_last_init() 34 if (!last) in nft_last_init() 38 last->set = ntohl(nla_get_be32(tb[NFTA_LAST_SET])); in nft_last_init() 40 if (last->set && tb[NFTA_LAST_MSECS]) { in nft_last_init() 45 last->jiffies = jiffies - (unsigned long)last_jiffies; in nft_last_init() 47 priv->last = last; in nft_last_init() 51 kfree(last); in nft_last_init() 60 struct nft_last *last = priv->last; in nft_last_eval() local [all …]
|
| H A D | nft_set_pipapo_avx2.c | 154 union nft_pipapo_map_bucket *mt, bool last) in nft_pipapo_avx2_refill() argument 164 if (last) \ in nft_pipapo_avx2_refill() 217 bool first, bool last) in nft_pipapo_avx2_lookup_4b_2() argument 243 b = nft_pipapo_avx2_refill(i_ul, &map[i_ul], fill, f->mt, last); in nft_pipapo_avx2_lookup_4b_2() 244 if (last) in nft_pipapo_avx2_lookup_4b_2() 280 bool first, bool last) in nft_pipapo_avx2_lookup_4b_4() argument 320 b = nft_pipapo_avx2_refill(i_ul, &map[i_ul], fill, f->mt, last); in nft_pipapo_avx2_lookup_4b_4() 321 if (last) in nft_pipapo_avx2_lookup_4b_4() 357 bool first, bool last) in nft_pipapo_avx2_lookup_4b_8() argument 415 b = nft_pipapo_avx2_refill(i_ul, &map[i_ul], fill, f->mt, last); in nft_pipapo_avx2_lookup_4b_8() [all …]
|
| /linux/scripts/ |
| H A D | decode_stacktrace.sh | 271 local last=$(( ${#words[@]} - 1 )) 286 if [[ ${words[$last]} =~ \([A-Z]*\) ]]; then 287 info_str=${words[$last]} 288 unset words[$last] spaces[$last] 289 last=$(( $last - 1 )) 294 if [[ ${words[$last]} =~ ^[0-9a-f]+\] ]]; then 295 words[$last-1]="${words[$last-1]} ${words[$last]}" 296 unset words[$last] spaces[$last] 297 last=$(( $last - 1 )) 300 if [[ ${words[$last]} =~ \[([^]]+)\] ]]; then [all …]
|
| /linux/drivers/infiniband/hw/usnic/ |
| H A D | usnic_uiom_interval_tree.c | 43 #define LAST(node) ((node)->last) 70 usnic_uiom_interval_node_alloc(long int start, long int last, int ref_cnt, in usnic_uiom_interval_node_alloc() argument 79 interval->last = last; in usnic_uiom_interval_node_alloc() 105 unsigned long start, unsigned long last, in find_intervals_intersection_sorted() argument 112 for (node = usnic_uiom_interval_tree_iter_first(root, start, last); in find_intervals_intersection_sorted() 114 node = usnic_uiom_interval_tree_iter_next(node, start, last)) in find_intervals_intersection_sorted() 120 int usnic_uiom_get_intervals_diff(unsigned long start, unsigned long last, in usnic_uiom_get_intervals_diff() argument 132 find_intervals_intersection_sorted(root, start, last, in usnic_uiom_get_intervals_diff() 148 if (pivot > interval->last) { in usnic_uiom_get_intervals_diff() 150 } else if (pivot <= interval->last && in usnic_uiom_get_intervals_diff() [all …]
|
| /linux/rust/syn/ |
| H A D | punctuated.rs | 51 last: Option<Box<T>>, field 59 last: None, in new() 66 self.inner.len() == 0 && self.last.is_none() in is_empty() 74 self.inner.len() + if self.last.is_some() { 1 } else { 0 } in len() 88 pub fn last(&self) -> Option<&T> { in last() method 102 self.last.as_deref() in get() 114 self.last.as_deref_mut() in get_mut() 125 last: self.last.as_ref().map(Box::as_ref).into_iter(), in iter() 136 last: self.last.as_mut().map(Box::as_mut).into_iter(), in iter_mut() 146 last: self.last.as_ref().map(Box::as_ref).into_iter(), in pairs() [all …]
|
| /linux/arch/mips/sibyte/common/ |
| H A D | cfe_console.c | 16 int i, last, written; in cfe_console_write() local 18 for (i=0, last=0; i<count; i++) { in cfe_console_write() 24 written = cfe_write(cfe_cons_handle, &str[last], i-last); in cfe_console_write() 27 last += written; in cfe_console_write() 28 } while (last < i); in cfe_console_write() 33 if (last != count) { in cfe_console_write() 35 written = cfe_write(cfe_cons_handle, &str[last], count-last); in cfe_console_write() 38 last += written; in cfe_console_write() 39 } while (last < count); in cfe_console_write()
|
| /linux/drivers/vhost/ |
| H A D | iotlb.c | 17 #define LAST(map) ((map)->last) 51 u64 start, u64 last, in vhost_iotlb_add_range_ctx() argument 57 if (last < start) in vhost_iotlb_add_range_ctx() 63 if (start == 0 && last == ULONG_MAX) { in vhost_iotlb_add_range_ctx() 64 u64 mid = last / 2; in vhost_iotlb_add_range_ctx() 87 map->size = last - start + 1; in vhost_iotlb_add_range_ctx() 88 map->last = last; in vhost_iotlb_add_range_ctx() 104 u64 start, u64 last, in vhost_iotlb_add_range() argument 107 return vhost_iotlb_add_range_ctx(iotlb, start, last, in vhost_iotlb_add_range() 118 void vhost_iotlb_del_range(struct vhost_iotlb *iotlb, u64 start, u64 last) in vhost_iotlb_del_range() argument [all …]
|
| /linux/tools/testing/selftests/vfio/lib/ |
| H A D | iova_allocator.c | 58 iova_t iova, last; in iova_allocator_alloc() local 67 if (check_add_overflow(iova, size - 1, &last) || in iova_allocator_alloc() 68 last > range->last) in iova_allocator_alloc() 72 iova = last & ~(size - 1); in iova_allocator_alloc() 75 if (check_add_overflow(iova, size - 1, &last) || in iova_allocator_alloc() 76 last > range->last) in iova_allocator_alloc() 79 if (last == range->last) { in iova_allocator_alloc() 83 allocator->range_offset = last - range->start + 1; in iova_allocator_alloc()
|
| /linux/kernel/bpf/ |
| H A D | range_tree.c | 130 range_it_iter_first(struct range_tree *rt, u32 start, u32 last) 132 return __range_it_iter_first(&rt->it_root, start, last); in range_it_iter_first() 138 u32 last = start + len - 1; in range_tree_clear() 142 while ((rn = range_it_iter_first(rt, start, last))) { in range_tree_clear() 143 if (rn->rn_start < start && rn->rn_last > last) { in range_tree_clear() 155 new_rn->rn_start = last + 1; in range_tree_clear() 163 } else if (rn->rn_last > last) { in range_tree_clear() 166 rn->rn_start = last + 1; in range_tree_clear() 181 u32 last = start + len - 1; in range_tree_clear() 185 left = range_it_iter_first(rt, start, last); in is_range_tree_set() 131 range_it_iter_first(struct range_tree * rt,u32 start,u32 last) range_it_iter_first() argument 139 u32 last = start + len - 1; range_tree_clear() local 186 u32 last = start + len - 1; is_range_tree_set() local 199 u32 last = start + len - 1; range_tree_set() local [all...] |
| /linux/io_uring/ |
| H A D | slist.h | 30 list->last = node; in wq_list_add_after() 38 list->last = node; in wq_list_add_tail() 41 list->last->next = node; in wq_list_add_tail() 42 list->last = node; in wq_list_add_tail() 51 list->last = node; in wq_list_add_head() 56 struct io_wq_work_node *last, in wq_list_cut() argument 61 WRITE_ONCE(list->first, last->next); in wq_list_cut() 63 prev->next = last->next; in wq_list_cut() 65 if (last == list->last) in wq_list_cut() [all...] |
| /linux/drivers/gpu/drm/amd/amdkfd/ |
| H A D | kfd_svm.c | 77 svm_range_check_vm(struct kfd_process *p, uint64_t start, uint64_t last, 95 prange, prange->start, prange->last); in svm_range_unlink() 104 if (prange->it_node.start != 0 && prange->it_node.last != 0) in svm_range_unlink() 112 prange, prange->start, prange->last); in svm_range_add_notifier_locked() 131 prange, prange->start, prange->last); in svm_range_add_to_svms() 135 prange->it_node.last = prange->last; in svm_range_add_to_svms() 144 prange->notifier.interval_tree.last >> PAGE_SHIFT); in svm_range_remove_notifier() 147 prange->notifier.interval_tree.last != 0) in svm_range_remove_notifier() 281 uint64_t size = (prange->last - prange->start + 1) << PAGE_SHIFT; in svm_range_free() 286 prange->start, prange->last); in svm_range_free() [all …]
|
| /linux/include/trace/events/ |
| H A D | maple_tree.h | 24 __field(unsigned long, last) 33 __entry->last = mas->last; 43 (unsigned long) __entry->last 57 __field(unsigned long, last) 66 __entry->last = mas->last; 76 (unsigned long) __entry->last 92 __field(unsigned long, last) 103 __entry->last = mas->last; 115 (unsigned long) __entry->last,
|
| /linux/tools/perf/util/ |
| H A D | ordered-events.c | 20 struct ordered_event *last = oe->last; in queue_event() local 25 oe->last = new; in queue_event() 29 if (!last) { in queue_event() 40 if (last->timestamp <= timestamp) { in queue_event() 41 while (last->timestamp <= timestamp) { in queue_event() 42 p = last->list.next; in queue_event() 48 last = list_entry(p, struct ordered_event, list); in queue_event() 50 list_add_tail(&new->list, &last->list); in queue_event() 52 while (last->timestamp > timestamp) { in queue_event() 53 p = last->list.prev; in queue_event() [all …]
|
| /linux/drivers/video/fbdev/core/ |
| H A D | fb_fillrect.h | 111 unsigned long first, last; in bitfill() local 115 last = ~fb_pixel_mask(end & (BITS_PER_LONG-1), pattern->reverse); in bitfill() 118 last = last ? (last & first) : first; in bitfill() 120 if (last == ~0UL) in bitfill() 122 else if (last) in bitfill() 123 fb_modify_offset(first, last, 0, dst); in bitfill() 147 if (last) in bitfill() 148 fb_modify_offset(get(pattern), last, offset, dst); in bitfill() 159 unsigned long first, last; in bitinvert() local 164 last = ~fb_pixel_mask(end & (BITS_PER_LONG-1), pattern->reverse); in bitinvert() [all …]
|
| H A D | fbsysfs.c | 146 char ** last = NULL; in store_bpp() local 150 var.bits_per_pixel = simple_strtoul(buf, last, 0); in store_bpp() 169 char **last = NULL; in store_rotate() local 173 var.rotate = simple_strtoul(buf, last, 0); in store_rotate() 196 char *last = NULL; in store_virtual() local 200 var.xres_virtual = simple_strtoul(buf, &last, 0); in store_virtual() 201 last++; in store_virtual() 202 if (last - buf >= count) in store_virtual() 204 var.yres_virtual = simple_strtoul(last, &last, 0); in store_virtual() 231 char *last = NULL; in store_blank() local [all …]
|
| /linux/mm/ |
| H A D | interval_tree.c | 34 unsigned long last = vma_last_pgoff(node); in vma_interval_tree_insert_after() local 44 if (parent->shared.rb_subtree_last < last) in vma_interval_tree_insert_after() 45 parent->shared.rb_subtree_last = last; in vma_interval_tree_insert_after() 49 if (parent->shared.rb_subtree_last < last) in vma_interval_tree_insert_after() 50 parent->shared.rb_subtree_last = last; in vma_interval_tree_insert_after() 55 node->shared.rb_subtree_last = last; in vma_interval_tree_insert_after() 93 unsigned long first, unsigned long last) in anon_vma_interval_tree_iter_first() argument 95 return __anon_vma_interval_tree_iter_first(root, first, last); in anon_vma_interval_tree_iter_first() 100 unsigned long first, unsigned long last) in anon_vma_interval_tree_iter_next() argument 102 return __anon_vma_interval_tree_iter_next(node, first, last); in anon_vma_interval_tree_iter_next()
|
| /linux/net/sunrpc/xprtrdma/ |
| H A D | frwr_ops.c | 499 struct ib_send_wr *first, **prev, *last; in frwr_unmap_sync() local 516 last = &mr->mr_invwr; in frwr_unmap_sync() 517 last->next = NULL; in frwr_unmap_sync() 518 last->wr_cqe = &mr->mr_cqe; in frwr_unmap_sync() 519 last->sg_list = NULL; in frwr_unmap_sync() 520 last->num_sge = 0; in frwr_unmap_sync() 521 last->opcode = IB_WR_LOCAL_INV; in frwr_unmap_sync() 522 last->send_flags = IB_SEND_SIGNALED; in frwr_unmap_sync() 523 last->ex.invalidate_rkey = mr->mr_handle; in frwr_unmap_sync() 525 last->wr_cqe->done = frwr_wc_localinv; in frwr_unmap_sync() [all …]
|
| /linux/crypto/asymmetric_keys/ |
| H A D | pkcs7_trust.c | 27 struct x509_certificate *x509, *last = NULL, *p; in pkcs7_validate_trust_one() local 74 last = x509; in pkcs7_validate_trust_one() 75 sig = last->sig; in pkcs7_validate_trust_one() 81 if (last && (last->sig->auth_ids[0] || last->sig->auth_ids[1])) { in pkcs7_validate_trust_one() 83 last->sig->auth_ids[0], in pkcs7_validate_trust_one() 84 last->sig->auth_ids[1], in pkcs7_validate_trust_one() 87 x509 = last; in pkcs7_validate_trust_one()
|
| /linux/drivers/gpu/drm/i915/gt/uc/ |
| H A D | selftest_guc.c | 54 struct i915_request *last[3] = {NULL, NULL, NULL}, *rq; in intel_guc_scrub_ctbs() local 95 last[i] = rq; in intel_guc_scrub_ctbs() 99 ret = i915_request_wait(last[i], 0, HZ); in intel_guc_scrub_ctbs() 104 i915_request_put(last[i]); in intel_guc_scrub_ctbs() 105 last[i] = NULL; in intel_guc_scrub_ctbs() 124 if (last[i]) in intel_guc_scrub_ctbs() 125 i915_request_put(last[i]); in intel_guc_scrub_ctbs() 153 struct i915_request *spin_rq = NULL, *rq, *last = NULL; in intel_guc_steal_guc_ids() local 207 if ((ret != -EAGAIN) || !last) { in intel_guc_steal_guc_ids() 209 last ? "" : "first ", context_index, ERR_PTR(ret)); in intel_guc_steal_guc_ids() [all …]
|
| /linux/drivers/memory/tegra/ |
| H A D | tegra210-emc-cc-r21021.c | 136 struct tegra210_emc_timing *curr = emc->last; in tegra210_emc_get_clktree_delay() 183 struct tegra210_emc_timing *last, in periodic_compensation_handler() argument 198 if (last->periodic_training && in periodic_compensation_handler() 207 __COPY_EMA(next, last, idx); in periodic_compensation_handler() 258 struct tegra210_emc_timing *last = emc->last; in tegra210_emc_r21021_periodic_compensation() local 261 if (last->periodic_training) { in tegra210_emc_r21021_periodic_compensation() 304 last, last)) { in tegra210_emc_r21021_periodic_compensation() 310 value = tegra210_emc_compensate(last, list[i]); in tegra210_emc_r21021_periodic_compensation() 359 struct tegra210_emc_timing *fake, *last = emc->last, *next = emc->next; in tegra210_emc_r21021_set_clock() local 379 fake = tegra210_emc_find_timing(emc, last->rate * 1000UL); in tegra210_emc_r21021_set_clock() [all …]
|
| /linux/drivers/infiniband/hw/cxgb4/ |
| H A D | id_table.c | 51 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in c4iw_id_alloc() 57 alloc->last += get_random_u32_below(RANDOM_SKIP); in c4iw_id_alloc() 59 alloc->last = obj + 1; in c4iw_id_alloc() 60 if (alloc->last >= alloc->max) in c4iw_id_alloc() 61 alloc->last = 0; in c4iw_id_alloc() 88 alloc->last = get_random_u32_below(RANDOM_SKIP); in c4iw_id_table_alloc() 90 alloc->last = 0; in c4iw_id_table_alloc()
|
| /linux/drivers/iommu/iommufd/ |
| H A D | io_pagetable.h | 90 return area->pages_node.last; in iopt_area_last_index() 100 return area->node.last; in iopt_area_last_iova() 105 return (area->node.last - area->node.start) + 1; in iopt_area_length() 132 unsigned long last) \ 138 last); \ 145 unsigned long last) \ 149 node = interval_tree_iter_next(&last_node->node, start, last); \ 284 unsigned long last, struct page **out_pages); 286 unsigned long last, struct page **out_pages); 288 unsigned long last); [all …]
|
| /linux/drivers/staging/media/ipu3/ |
| H A D | ipu3-css-pool.c | 49 pool->last = IPU3_CSS_POOL_SIZE; in imgu_css_pool_init() 64 u32 n = (pool->last + 1) % IPU3_CSS_POOL_SIZE; in imgu_css_pool_get() 67 pool->last = n; in imgu_css_pool_get() 75 pool->entry[pool->last].valid = false; in imgu_css_pool_put() 76 pool->last = (pool->last + IPU3_CSS_POOL_SIZE - 1) % IPU3_CSS_POOL_SIZE; in imgu_css_pool_put() 92 int i = (pool->last + IPU3_CSS_POOL_SIZE - n) % IPU3_CSS_POOL_SIZE; in imgu_css_pool_last()
|
| /linux/drivers/net/team/ |
| H A D | team_mode_broadcast.c | 17 struct team_port *last = NULL; in bc_transmit() local 24 if (last) { in bc_transmit() 27 ret = !team_dev_queue_xmit(team, last, in bc_transmit() 33 last = cur; in bc_transmit() 36 if (last) { in bc_transmit() 37 ret = !team_dev_queue_xmit(team, last, skb); in bc_transmit()
|
| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_range_fence.c | 14 #define XE_RANGE_TREE_LAST(_node) ((_node)->last) 57 u64 start, u64 last, struct dma_fence *fence) in xe_range_fence_insert() argument 68 rfence->last = last; in xe_range_fence_insert() 135 u64 last) in xe_range_fence_tree_first() argument 137 return xe_range_fence_tree_iter_first(&tree->root, start, last); in xe_range_fence_tree_first() 149 xe_range_fence_tree_next(struct xe_range_fence *rfence, u64 start, u64 last) in xe_range_fence_tree_next() argument 151 return xe_range_fence_tree_iter_next(rfence, start, last); in xe_range_fence_tree_next()
|