| /linux/tools/perf/util/ |
| H A D | intel-pt.c | 183 struct intel_pt *pt; member 233 static void intel_pt_dump(struct intel_pt *pt __maybe_unused, in intel_pt_dump() 273 static void intel_pt_dump_event(struct intel_pt *pt, unsigned char *buf, in intel_pt_dump_event() argument 277 intel_pt_dump(pt, buf, len); in intel_pt_dump_event() 293 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_dump_sample() local 297 intel_pt_dump(pt, sample->aux_sample.data, sample->aux_sample.size); in intel_pt_dump_sample() 300 static bool intel_pt_log_events(struct intel_pt *pt, u64 tm) in intel_pt_log_events() argument 302 struct perf_time_interval *range = pt->synth_opts.ptime_range; in intel_pt_log_events() 303 int n = pt->synth_opts.range_num; in intel_pt_log_events() 305 if (pt->synth_opts.log_plus_flags & AUXTRACE_LOG_FLG_ALL_PERF_EVTS) in intel_pt_log_events() [all …]
|
| /linux/sound/i2c/other/ |
| H A D | pt2258.c | 32 int snd_pt2258_reset(struct snd_pt2258 *pt) in snd_pt2258_reset() argument 39 snd_i2c_lock(pt->i2c_bus); in snd_pt2258_reset() 40 if (snd_i2c_sendbytes(pt->i2c_dev, bytes, 1) != 1) in snd_pt2258_reset() 42 snd_i2c_unlock(pt->i2c_bus); in snd_pt2258_reset() 45 pt->mute = 1; in snd_pt2258_reset() 47 snd_i2c_lock(pt->i2c_bus); in snd_pt2258_reset() 48 if (snd_i2c_sendbytes(pt->i2c_dev, bytes, 1) != 1) in snd_pt2258_reset() 50 snd_i2c_unlock(pt->i2c_bus); in snd_pt2258_reset() 54 pt->volume[i] = 0; in snd_pt2258_reset() 57 snd_i2c_lock(pt->i2c_bus); in snd_pt2258_reset() [all …]
|
| /linux/drivers/gpu/drm/i915/gt/ |
| H A D | intel_ppgtt.c | 18 struct i915_page_table *pt; in alloc_pt() local 20 pt = kmalloc(sizeof(*pt), I915_GFP_ALLOW_FAIL); in alloc_pt() 21 if (unlikely(!pt)) in alloc_pt() 24 pt->base = vm->alloc_pt_dma(vm, sz); in alloc_pt() 25 if (IS_ERR(pt->base)) { in alloc_pt() 26 kfree(pt); in alloc_pt() 30 pt->is_compact = false; in alloc_pt() 31 atomic_set(&pt->used, 0); in alloc_pt() 32 return pt; in alloc_pt() 61 pd->pt.base = vm->alloc_pt_dma(vm, I915_GTT_PAGE_SIZE_4K); in alloc_pd() [all …]
|
| H A D | gen6_ppgtt.c | 21 const struct i915_page_table *pt) in gen6_write_pde() argument 23 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde() 85 struct i915_page_table * const pt = in gen6_ppgtt_clear_range() local 92 GEM_BUG_ON(count > atomic_read(&pt->used)); in gen6_ppgtt_clear_range() 93 if (!atomic_sub_return(count, &pt->used)) in gen6_ppgtt_clear_range() 103 vaddr = px_vaddr(pt); in gen6_ppgtt_clear_range() 153 struct i915_page_table *pt; in gen6_flush_pd() local 161 gen6_for_each_pde(pt, pd, start, end, pde) in gen6_flush_pd() 162 gen6_write_pde(ppgtt, pde, pt); in gen6_flush_pd() 178 struct i915_page_table *pt; in gen6_alloc_va_range() local [all …]
|
| H A D | gen8_ppgtt.c | 145 #define as_pd(x) container_of((x), typeof(struct i915_page_directory), pt) 219 free_px(vm, &pd->pt, lvl); in __gen8_ppgtt_cleanup() 255 struct i915_page_table *pt = pd->entry[idx]; in __gen8_ppgtt_clear() local 257 if (atomic_fetch_inc(&pt->used) >> gen8_pd_shift(1) && in __gen8_ppgtt_clear() 262 __gen8_ppgtt_cleanup(vm, as_pd(pt), I915_PDES, lvl); in __gen8_ppgtt_clear() 268 start = __gen8_ppgtt_clear(vm, as_pd(pt), in __gen8_ppgtt_clear() 280 atomic_read(&pt->used)); in __gen8_ppgtt_clear() 281 GEM_BUG_ON(!count || count >= atomic_read(&pt->used)); in __gen8_ppgtt_clear() 284 if (pt->is_compact) { in __gen8_ppgtt_clear() 291 vaddr = px_vaddr(pt); in __gen8_ppgtt_clear() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
| H A D | base.c | 34 struct nvkm_mmu_pt *pt; member 42 nvkm_mmu_ptp_put(struct nvkm_mmu *mmu, bool force, struct nvkm_mmu_pt *pt) in nvkm_mmu_ptp_put() argument 44 const int slot = pt->base >> pt->ptp->shift; in nvkm_mmu_ptp_put() 45 struct nvkm_mmu_ptp *ptp = pt->ptp; in nvkm_mmu_ptp_put() 56 nvkm_mmu_ptc_put(mmu, force, &ptp->pt); in nvkm_mmu_ptp_put() 61 kfree(pt); in nvkm_mmu_ptp_put() 67 struct nvkm_mmu_pt *pt; in nvkm_mmu_ptp_get() local 71 if (!(pt = kzalloc(sizeof(*pt), GFP_KERNEL))) in nvkm_mmu_ptp_get() 78 kfree(pt); in nvkm_mmu_ptp_get() 82 ptp->pt = nvkm_mmu_ptc_get(mmu, 0x1000, 0x1000, false); in nvkm_mmu_ptp_get() [all …]
|
| H A D | vmmnv44.c | 27 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_fill() argument 33 tmp[0] = nvkm_ro32(pt->memory, pteo + 0x0); in nv44_vmm_pgt_fill() 34 tmp[1] = nvkm_ro32(pt->memory, pteo + 0x4); in nv44_vmm_pgt_fill() 35 tmp[2] = nvkm_ro32(pt->memory, pteo + 0x8); in nv44_vmm_pgt_fill() 36 tmp[3] = nvkm_ro32(pt->memory, pteo + 0xc); in nv44_vmm_pgt_fill() 66 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); in nv44_vmm_pgt_fill() 67 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); in nv44_vmm_pgt_fill() 68 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); in nv44_vmm_pgt_fill() 69 VMM_WO032(pt, vmm, pteo + 0xc, tmp[3] | 0x40000000); in nv44_vmm_pgt_fill() 73 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_pte() argument [all …]
|
| H A D | vmmgf100.c | 32 gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_pte() argument 44 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte() 51 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_pte() 58 gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_sgl() argument 61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_sgl() 65 gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in gf100_vmm_pgt_dma() argument 70 nvkm_kmap(pt->memory); in gf100_vmm_pgt_dma() 73 VMM_WO064(pt, vmm, ptei++ * 8, data); in gf100_vmm_pgt_dma() 76 nvkm_done(pt->memory); in gf100_vmm_pgt_dma() 80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_dma() [all …]
|
| H A D | vmmnv41.c | 27 nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_pte() argument 32 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_pte() 38 nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_sgl() argument 41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl() 45 nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv41_vmm_pgt_dma() argument 49 nvkm_kmap(pt->memory); in nv41_vmm_pgt_dma() 52 VMM_WO032(pt, vmm, ptei++ * 4, data); in nv41_vmm_pgt_dma() 54 nvkm_done(pt->memory); in nv41_vmm_pgt_dma() 56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma() 62 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv41_vmm_pgt_unmap() argument [all …]
|
| H A D | vmmnv04.c | 28 nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_pte() argument 33 VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); in nv04_vmm_pgt_pte() 39 nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_sgl() argument 42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl() 46 nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv04_vmm_pgt_dma() argument 50 nvkm_kmap(pt->memory); in nv04_vmm_pgt_dma() 52 VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003); in nv04_vmm_pgt_dma() 53 nvkm_done(pt->memory); in nv04_vmm_pgt_dma() 55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma() 61 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv04_vmm_pgt_unmap() argument [all …]
|
| /linux/tools/testing/selftests/bpf/progs/ |
| H A D | kfunc_call_fail.c | 41 struct prog_test_ref_kfunc *pt; in kfunc_call_test_get_mem_fail_rdonly() local 46 pt = bpf_kfunc_call_test_acquire(&s); in kfunc_call_test_get_mem_fail_rdonly() 47 if (pt) { in kfunc_call_test_get_mem_fail_rdonly() 48 p = bpf_kfunc_call_test_get_rdonly_mem(pt, 2 * sizeof(int)); in kfunc_call_test_get_mem_fail_rdonly() 54 bpf_kfunc_call_test_release(pt); in kfunc_call_test_get_mem_fail_rdonly() 62 struct prog_test_ref_kfunc *pt; in kfunc_call_test_get_mem_fail_use_after_free() local 67 pt = bpf_kfunc_call_test_acquire(&s); in kfunc_call_test_get_mem_fail_use_after_free() 68 if (pt) { in kfunc_call_test_get_mem_fail_use_after_free() 69 p = bpf_kfunc_call_test_get_rdwr_mem(pt, 2 * sizeof(int)); in kfunc_call_test_get_mem_fail_use_after_free() 77 bpf_kfunc_call_test_release(pt); in kfunc_call_test_get_mem_fail_use_after_free() [all …]
|
| H A D | kfunc_call_test.c | 63 struct prog_test_ref_kfunc *pt; in kfunc_call_test_ref_btf_id() local 67 pt = bpf_kfunc_call_test_acquire(&s); in kfunc_call_test_ref_btf_id() 68 if (pt) { in kfunc_call_test_ref_btf_id() 69 if (pt->a != 42 || pt->b != 108) in kfunc_call_test_ref_btf_id() 71 bpf_kfunc_call_test_release(pt); in kfunc_call_test_ref_btf_id() 142 struct prog_test_ref_kfunc *pt; in kfunc_call_test_get_mem() local 147 pt = bpf_kfunc_call_test_acquire(&s); in kfunc_call_test_get_mem() 148 if (pt) { in kfunc_call_test_get_mem() 149 p = bpf_kfunc_call_test_get_rdwr_mem(pt, 2 * sizeof(int)); in kfunc_call_test_get_mem() 158 p = bpf_kfunc_call_test_get_rdonly_mem(pt, 2 * sizeof(int)); in kfunc_call_test_get_mem() [all …]
|
| /linux/drivers/dma/amd/ptdma/ |
| H A D | ptdma-dmaengine.c | 60 ae4_log_error(cmd_q->pt, cmd_q->cmd_error); in ae4_check_status_error() 92 struct pt_device *pt = desc->pt; in pt_do_cleanup() local 94 kmem_cache_free(pt->dma_desc_cache, desc); in pt_do_cleanup() 97 static struct pt_cmd_queue *pt_get_cmd_queue(struct pt_device *pt, struct pt_dma_chan *chan) in pt_get_cmd_queue() argument 103 if (pt->ver == AE4_DMA_VERSION) { in pt_get_cmd_queue() 104 ae4 = container_of(pt, struct ae4_device, pt); in pt_get_cmd_queue() 108 cmd_q = &pt->cmd_q; in pt_get_cmd_queue() 164 struct pt_device *pt; in pt_dma_start_desc() local 171 pt = pt_cmd->pt; in pt_dma_start_desc() 173 cmd_q = pt_get_cmd_queue(pt, chan); in pt_dma_start_desc() [all …]
|
| H A D | ptdma.h | 165 struct pt_device *pt; member 177 struct pt_device *pt; member 186 struct pt_device *pt; member 191 struct pt_device *pt; member 315 int pt_dmaengine_register(struct pt_device *pt); 316 void pt_dmaengine_unregister(struct pt_device *pt); 318 void ptdma_debugfs_setup(struct pt_device *pt); 319 int pt_core_init(struct pt_device *pt); 320 void pt_core_destroy(struct pt_device *pt); 325 void pt_check_status_trans(struct pt_device *pt, struct pt_cmd_queue *cmd_q); [all …]
|
| /linux/drivers/hv/ |
| H A D | mshv_eventfd.c | 227 struct mshv_partition *pt = rp->rsmplr_partn; in mshv_irqfd_resampler_shutdown() local 229 mutex_lock(&pt->irqfds_resampler_lock); in mshv_irqfd_resampler_shutdown() 232 synchronize_srcu(&pt->pt_irq_srcu); in mshv_irqfd_resampler_shutdown() 236 mshv_unregister_irq_ack_notifier(pt, &rp->rsmplr_notifier); in mshv_irqfd_resampler_shutdown() 240 mutex_unlock(&pt->irqfds_resampler_lock); in mshv_irqfd_resampler_shutdown() 301 struct mshv_partition *pt = irqfd->irqfd_partn; in mshv_irqfd_wakeup() local 308 idx = srcu_read_lock(&pt->pt_irq_srcu); in mshv_irqfd_wakeup() 318 srcu_read_unlock(&pt->pt_irq_srcu, idx); in mshv_irqfd_wakeup() 327 spin_lock_irqsave(&pt->pt_irqfds_lock, flags); in mshv_irqfd_wakeup() 341 spin_unlock_irqrestore(&pt->pt_irqfds_lock, flags); in mshv_irqfd_wakeup() [all …]
|
| /linux/drivers/dma-buf/ |
| H A D | sw_sync.c | 155 struct sync_pt *pt = dma_fence_to_sync_pt(fence); in timeline_fence_release() local 160 if (!list_empty(&pt->link)) { in timeline_fence_release() 161 list_del(&pt->link); in timeline_fence_release() 162 rb_erase(&pt->node, &parent->pt_tree); in timeline_fence_release() 179 struct sync_pt *pt = dma_fence_to_sync_pt(fence); in timeline_fence_set_deadline() local 184 if (ktime_before(deadline, pt->deadline)) in timeline_fence_set_deadline() 185 pt->deadline = deadline; in timeline_fence_set_deadline() 187 pt->deadline = deadline; in timeline_fence_set_deadline() 212 struct sync_pt *pt, *next; in sync_timeline_signal() local 220 list_for_each_entry_safe(pt, next, &obj->pt_list, link) { in sync_timeline_signal() [all …]
|
| /linux/lib/ |
| H A D | nlattr.c | 119 void nla_get_range_unsigned(const struct nla_policy *pt, in nla_get_range_unsigned() argument 122 WARN_ON_ONCE(pt->validation_type != NLA_VALIDATE_RANGE_PTR && in nla_get_range_unsigned() 123 (pt->min < 0 || pt->max < 0)); in nla_get_range_unsigned() 127 switch (pt->type) { in nla_get_range_unsigned() 150 switch (pt->validation_type) { in nla_get_range_unsigned() 153 range->min = pt->min; in nla_get_range_unsigned() 154 range->max = pt->max; in nla_get_range_unsigned() 157 *range = *pt->range; in nla_get_range_unsigned() 160 range->min = pt->min; in nla_get_range_unsigned() 163 range->max = pt->max; in nla_get_range_unsigned() [all …]
|
| /linux/net/netlink/ |
| H A D | policy.c | 210 int netlink_policy_dump_attr_size_estimate(const struct nla_policy *pt) in netlink_policy_dump_attr_size_estimate() argument 215 switch (pt->type) { in netlink_policy_dump_attr_size_estimate() 256 const struct nla_policy *pt, in __netlink_policy_dump_write_attr() argument 259 int estimate = netlink_policy_dump_attr_size_estimate(pt); in __netlink_policy_dump_write_attr() 267 switch (pt->type) { in __netlink_policy_dump_write_attr() 278 if (pt->type == NLA_NESTED_ARRAY) in __netlink_policy_dump_write_attr() 280 if (state && pt->nested_policy && pt->len && in __netlink_policy_dump_write_attr() 283 pt->nested_policy, in __netlink_policy_dump_write_attr() 284 pt in __netlink_policy_dump_write_attr() 417 netlink_policy_dump_write_attr(struct sk_buff * skb,const struct nla_policy * pt,int nestattr) netlink_policy_dump_write_attr() argument 433 const struct nla_policy *pt; netlink_policy_dump_write() local [all...] |
| /linux/arch/alpha/kernel/ |
| H A D | process.c | 286 dump_elf_thread(elf_greg_t *dest, struct pt_regs *pt, struct thread_info *ti) in dump_elf_thread() argument 289 struct switch_stack * sw = ((struct switch_stack *) pt) - 1; in dump_elf_thread() 291 dest[ 0] = pt->r0; in dump_elf_thread() 292 dest[ 1] = pt->r1; in dump_elf_thread() 293 dest[ 2] = pt->r2; in dump_elf_thread() 294 dest[ 3] = pt->r3; in dump_elf_thread() 295 dest[ 4] = pt->r4; in dump_elf_thread() 296 dest[ 5] = pt->r5; in dump_elf_thread() 297 dest[ 6] = pt->r6; in dump_elf_thread() 298 dest[ 7] = pt->r7; in dump_elf_thread() [all …]
|
| /linux/drivers/media/common/saa7146/ |
| H A D | saa7146_core.c | 166 void *saa7146_vmalloc_build_pgtable(struct pci_dev *pci, long length, struct saa7146_pgtable *pt) in saa7146_vmalloc_build_pgtable() argument 175 if (!(pt->slist = vmalloc_to_sg(mem, pages))) in saa7146_vmalloc_build_pgtable() 178 if (saa7146_pgtable_alloc(pci, pt)) in saa7146_vmalloc_build_pgtable() 181 pt->nents = pages; in saa7146_vmalloc_build_pgtable() 182 slen = dma_map_sg(&pci->dev, pt->slist, pt->nents, DMA_FROM_DEVICE); in saa7146_vmalloc_build_pgtable() 186 if (0 != saa7146_pgtable_build_single(pci, pt, pt->slist, slen)) in saa7146_vmalloc_build_pgtable() 192 dma_unmap_sg(&pci->dev, pt->slist, pt->nents, DMA_FROM_DEVICE); in saa7146_vmalloc_build_pgtable() 194 saa7146_pgtable_free(pci, pt); in saa7146_vmalloc_build_pgtable() 196 kfree(pt->slist); in saa7146_vmalloc_build_pgtable() 197 pt->slist = NULL; in saa7146_vmalloc_build_pgtable() [all …]
|
| /linux/arch/sparc/lib/ |
| H A D | NG2memcpy.S | 188 ba,pt %xcc, __restore_asi 192 ba,pt %xcc, __restore_asi 196 ba,pt %xcc, __restore_asi 200 ba,pt %xcc, __restore_asi 205 ba,pt %xcc, __restore_asi 210 ba,pt %xcc, __restore_asi 215 ba,pt %xcc, __restore_asi 219 ba,pt %xcc, __restore_fp 224 ba,pt %xcc, __restore_fp 229 ba,pt %xcc, __restore_asi [all …]
|
| /linux/tools/perf/util/intel-pt-decoder/ |
| H A D | Build | 1 perf-util-y += intel-pt-pkt-decoder.o intel-pt-insn-decoder.o intel-pt-log.o intel-pt-decoder.o 6 $(OUTPUT)util/intel-pt-decoder/inat-tables.c: $(inat_tables_script) $(inat_tables_maps) 12 $(OUTPUT)util/intel-pt-decoder/inat.o: $(srctree)/tools/arch/x86/lib/inat.c $(OUTPUT)util/intel-pt-… 16 CFLAGS_inat.o += -I$(OUTPUT)util/intel-pt-decoder 18 $(OUTPUT)util/intel-pt-decoder/insn.o: $(srctree)/tools/arch/x86/lib/insn.c
|
| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_pt.c | 28 struct xe_pt pt; member 48 static struct xe_pt_dir *as_xe_pt_dir(struct xe_pt *pt) in as_xe_pt_dir() argument 50 return container_of(pt, struct xe_pt_dir, pt); in as_xe_pt_dir() 77 static void xe_pt_free(struct xe_pt *pt) in xe_pt_free() argument 79 if (pt->level) in xe_pt_free() 80 kfree(as_xe_pt_dir(pt)); in xe_pt_free() 82 kfree(pt); in xe_pt_free() 104 struct xe_pt *pt; in xe_pt_create() local 112 pt = (dir) ? &dir->pt : NULL; in xe_pt_create() 114 pt = kzalloc(sizeof(*pt), GFP_KERNEL); in xe_pt_create() [all …]
|
| /linux/arch/sparc/kernel/ |
| H A D | cherrs.S | 107 ba,pt %xcc, etrap_irq 116 ba,a,pt %xcc, rtrap_irq 149 ba,pt %xcc, etrap_irq 158 ba,a,pt %xcc, rtrap_irq 192 ble,pt %icc, 1b ! Not yet 213 bge,pt %icc, 2b 216 bge,pt %icc, 1b 218 ba,a,pt %xcc, dcpe_icpe_tl1_common 222 ba,pt %xcc, etraptl1 227 ba,a,pt %xcc, rtrap [all …]
|
| /linux/tools/lib/bpf/ |
| H A D | nlattr.c | 48 struct libbpf_nla_policy *pt; in validate_nla() local 55 pt = &policy[type]; in validate_nla() 57 if (pt->type > LIBBPF_NLA_TYPE_MAX) in validate_nla() 60 if (pt->minlen) in validate_nla() 61 minlen = pt->minlen; in validate_nla() 62 else if (pt->type != LIBBPF_NLA_UNSPEC) in validate_nla() 63 minlen = nla_attr_minlen[pt->type]; in validate_nla() 68 if (pt->maxlen && libbpf_nla_len(nla) > pt->maxlen) in validate_nla() 71 if (pt in validate_nla() [all...] |