Searched refs:AARCH64_INSN_SIZE (Results 1 – 10 of 10) sorted by relevance
/linux/arch/arm64/include/asm/ |
H A D | uprobes.h | 14 #define UPROBE_SWBP_INSN_SIZE AARCH64_INSN_SIZE 15 #define UPROBE_XOL_SLOT_BYTES AARCH64_INSN_SIZE
|
H A D | insn-def.h | 9 #define AARCH64_INSN_SIZE 4 macro
|
H A D | jump_label.h | 17 #define JUMP_LABEL_NOP_SIZE AARCH64_INSN_SIZE
|
H A D | alternative-macros.h | 203 nops (662b-661b) / AARCH64_INSN_SIZE
|
/linux/arch/arm64/kernel/ |
H A D | patching.c | 59 ret = copy_from_kernel_nofault(&val, addr, AARCH64_INSN_SIZE); in aarch64_insn_read() 75 ret = copy_to_kernel_nofault(waddr, &insn, AARCH64_INSN_SIZE); in __aarch64_insn_write() 192 (uintptr_t)tp + AARCH64_INSN_SIZE); in aarch64_insn_patch_text_nosync()
|
H A D | alternative.c | 167 nr_inst = alt->orig_len / AARCH64_INSN_SIZE; in __apply_alternatives()
|
H A D | cpufeature.c | 3906 arm64_skip_faulting_instruction(regs, AARCH64_INSN_SIZE); in do_emulate_mrs()
|
/linux/arch/arm64/kernel/probes/ |
H A D | uprobes.c | 50 else if (!IS_ALIGNED(addr, AARCH64_INSN_SIZE)) in arch_uprobe_analyze_insn()
|
H A D | simulate-insn.c | 203 arm64_skip_faulting_instruction(regs, AARCH64_INSN_SIZE); in simulate_nop()
|
/linux/arch/arm64/net/ |
H A D | bpf_jit_comp.c | 855 if ((ctx->idx + PLT_TARGET_OFFSET / AARCH64_INSN_SIZE) % 2) in build_plt() 860 emit(A64_LDR64LIT(tmp, 2 * AARCH64_INSN_SIZE), ctx); in build_plt() 958 fixup_offset = (long)&ex->fixup - (pc + AARCH64_INSN_SIZE); in add_exception_handler() 1940 ctx.offset[i] *= AARCH64_INSN_SIZE; in bpf_int_jit_compile() 2260 emit(A64_ADR(A64_LR, AARCH64_INSN_SIZE * 2), ctx); in prepare_trampoline() 2361 return ret < 0 ? ret : ret * AARCH64_INSN_SIZE; in arch_bpf_trampoline_size() 2416 ret *= AARCH64_INSN_SIZE; in arch_prepare_bpf_trampoline() 2561 ip = image + POKE_OFFSET * AARCH64_INSN_SIZE; in bpf_arch_text_poke()
|