Searched refs:allocated_stack (Results 1 – 4 of 4) sorted by relevance
342 int allocated_stack; member469 (((slot < frame->allocated_stack / BPF_REG_SIZE) && \476 iter < frame->allocated_stack / BPF_REG_SIZE; \
277 __naked void allocated_stack(void) in allocated_stack() function
612 int allocated_slots = state->allocated_stack / BPF_REG_SIZE; in is_spi_bounds_valid()856 for (i = 1; i < state->allocated_stack / BPF_REG_SIZE; i++) { in unmark_stack_slots_dynptr()1446 size_t n = src->allocated_stack / BPF_REG_SIZE; in copy_stack_state()1453 dst->allocated_stack = src->allocated_stack; in copy_stack_state()1473 size_t old_n = state->allocated_stack / BPF_REG_SIZE, n; in grow_stack_state()1486 state->allocated_stack = size; in grow_stack_state()4635 for (j = 0; j < func->allocated_stack / BPF_REG_SIZE; j++) { in mark_all_scalars_precise()4665 for (j = 0; j < func->allocated_stack / BPF_REG_SIZE; j++) { in mark_all_scalars_imprecise()4899 if (verifier_bug_if(i >= func->allocated_stack / BPF_REG_SIZE, in __mark_chain_precision()4901 i, func->allocated_stack / BPF_REG_SIZE)) in __mark_chain_precision()[all …]
777 for (i = 0; i < state->allocated_stack / BPF_REG_SIZE; i++) { in print_verifier_state()