/linux/arch/arc/mm/ |
H A D | mmap.c | 67 [VM_EXEC] = PAGE_U_X_R, 68 [VM_EXEC | VM_READ] = PAGE_U_X_R, 69 [VM_EXEC | VM_WRITE] = PAGE_U_X_R, 70 [VM_EXEC | VM_WRITE | VM_READ] = PAGE_U_X_R, 75 [VM_SHARED | VM_EXEC] = PAGE_U_X_R, 76 [VM_SHARED | VM_EXEC | VM_READ] = PAGE_U_X_R, 77 [VM_SHARED | VM_EXEC | VM_WRITE] = PAGE_U_X_W_R, 78 [VM_SHARED | VM_EXEC | VM_WRITE | VM_READ] = PAGE_U_X_W_R
|
/linux/arch/microblaze/mm/ |
H A D | init.c | 248 [VM_EXEC] = PAGE_READONLY, 249 [VM_EXEC | VM_READ] = PAGE_READONLY_X, 250 [VM_EXEC | VM_WRITE] = PAGE_COPY, 251 [VM_EXEC | VM_WRITE | VM_READ] = PAGE_COPY_X, 256 [VM_SHARED | VM_EXEC] = PAGE_READONLY, 257 [VM_SHARED | VM_EXEC | VM_READ] = PAGE_READONLY_X, 258 [VM_SHARED | VM_EXEC | VM_WRITE] = PAGE_SHARED, 259 [VM_SHARED | VM_EXEC | VM_WRITE | VM_READ] = PAGE_SHARED_X
|
/linux/arch/sh/mm/ |
H A D | mmap.c | 27 [VM_EXEC] = PAGE_EXECREAD, 28 [VM_EXEC | VM_READ] = PAGE_EXECREAD, 29 [VM_EXEC | VM_WRITE] = PAGE_COPY, 30 [VM_EXEC | VM_WRITE | VM_READ] = PAGE_COPY, 35 [VM_SHARED | VM_EXEC] = PAGE_EXECREAD, 36 [VM_SHARED | VM_EXEC | VM_READ] = PAGE_EXECREAD, 37 [VM_SHARED | VM_EXEC | VM_WRITE] = PAGE_RWX, 38 [VM_SHARED | VM_EXEC | VM_WRITE | VM_READ] = PAGE_RWX
|
/linux/arch/openrisc/mm/ |
H A D | init.c | 247 [VM_EXEC] = PAGE_READONLY, 248 [VM_EXEC | VM_READ] = PAGE_READONLY_X, 249 [VM_EXEC | VM_WRITE] = PAGE_COPY, 250 [VM_EXEC | VM_WRITE | VM_READ] = PAGE_COPY_X, 255 [VM_SHARED | VM_EXEC] = PAGE_READONLY, 256 [VM_SHARED | VM_EXEC | VM_READ] = PAGE_READONLY_X, 257 [VM_SHARED | VM_EXEC | VM_WRITE] = PAGE_SHARED, 258 [VM_SHARED | VM_EXEC | VM_WRITE | VM_READ] = PAGE_SHARED_X
|
/linux/arch/loongarch/mm/ |
H A D | cache.c | 174 [VM_EXEC] = __pgprot(_CACHE_CC | _PAGE_VALID | 176 [VM_EXEC | VM_READ] = __pgprot(_CACHE_CC | _PAGE_VALID | 178 [VM_EXEC | VM_WRITE] = __pgprot(_CACHE_CC | _PAGE_VALID | 180 [VM_EXEC | VM_WRITE | VM_READ] = __pgprot(_CACHE_CC | _PAGE_VALID | 194 [VM_SHARED | VM_EXEC] = __pgprot(_CACHE_CC | _PAGE_VALID | 196 [VM_SHARED | VM_EXEC | VM_READ] = __pgprot(_CACHE_CC | _PAGE_VALID | 198 [VM_SHARED | VM_EXEC | VM_WRITE] = __pgprot(_CACHE_CC | _PAGE_VALID | 201 [VM_SHARED | VM_EXEC | VM_WRITE | VM_READ] = __pgprot(_CACHE_CC | _PAGE_VALID |
|
/linux/arch/m68k/mm/ |
H A D | motorola.c | 411 [VM_EXEC] = PAGE_READONLY_C, 412 [VM_EXEC | VM_READ] = PAGE_READONLY_C, 413 [VM_EXEC | VM_WRITE] = PAGE_COPY_C, 414 [VM_EXEC | VM_WRITE | VM_READ] = PAGE_COPY_C, 419 [VM_SHARED | VM_EXEC] = PAGE_READONLY_C, 420 [VM_SHARED | VM_EXEC | VM_READ] = PAGE_READONLY_C, 421 [VM_SHARED | VM_EXEC | VM_WRITE] = PAGE_SHARED_C, 422 [VM_SHARED | VM_EXEC | VM_WRITE | VM_READ] = PAGE_SHARED_C
|
/linux/arch/nios2/mm/ |
H A D | cacheflush.c | 138 if (vma == NULL || (vma->vm_flags & VM_EXEC)) in flush_cache_range() 159 if (vma->vm_flags & VM_EXEC) in flush_cache_page() 236 if (vma->vm_flags & VM_EXEC) in update_mmu_cache_range() 268 if (vma->vm_flags & VM_EXEC) in copy_from_user_page() 279 if (vma->vm_flags & VM_EXEC) in copy_to_user_page()
|
/linux/arch/arm/mm/ |
H A D | cache-v4wt.S | 67 mov r2, #VM_EXEC 70 tst r2, #VM_EXEC 92 tst r2, #VM_EXEC
|
H A D | cache-fa.S | 64 mov r2, #VM_EXEC 67 tst r2, #VM_EXEC 91 1: tst r2, #VM_EXEC 97 tst r2, #VM_EXEC
|
H A D | proc-arm922.S | 131 mov r2, #VM_EXEC 141 tst r2, #VM_EXEC 164 tst r2, #VM_EXEC 169 tst r2, #VM_EXEC
|
H A D | proc-feroceon.S | 150 mov r2, #VM_EXEC 162 tst r2, #VM_EXEC 184 1: tst r2, #VM_EXEC 193 tst r2, #VM_EXEC 464 @ if r2 contains the VM_EXEC bit then the next 2 ops are done already 465 tst r2, #VM_EXEC
|
H A D | proc-arm946.S | 105 mov r2, #VM_EXEC 119 tst r2, #VM_EXEC 142 1: tst r2, #VM_EXEC 160 tst r2, #VM_EXEC
|
H A D | proc-mohawk.S | 114 mov r2, #VM_EXEC 118 tst r2, #VM_EXEC 141 1: tst r2, #VM_EXEC 150 tst r2, #VM_EXEC
|
H A D | proc-arm920.S | 129 mov r2, #VM_EXEC 139 tst r2, #VM_EXEC 162 tst r2, #VM_EXEC 167 tst r2, #VM_EXEC
|
H A D | proc-arm925.S | 165 mov r2, #VM_EXEC 177 tst r2, #VM_EXEC 198 1: tst r2, #VM_EXEC 216 tst r2, #VM_EXEC
|
H A D | proc-arm926.S | 131 mov r2, #VM_EXEC 140 tst r2, #VM_EXEC 161 1: tst r2, #VM_EXEC 179 tst r2, #VM_EXEC
|
H A D | cache-v4wb.S | 112 tst r2, #VM_EXEC @ executable region? 123 tst r2, #VM_EXEC
|
H A D | mmu.c | 424 [VM_EXEC] = __PAGE_READONLY_EXEC, 425 [VM_EXEC | VM_READ] = __PAGE_READONLY_EXEC, 426 [VM_EXEC | VM_WRITE] = __PAGE_COPY_EXEC, 427 [VM_EXEC | VM_WRITE | VM_READ] = __PAGE_COPY_EXEC, 432 [VM_SHARED | VM_EXEC] = __PAGE_READONLY_EXEC, 433 [VM_SHARED | VM_EXEC | VM_READ] = __PAGE_READONLY_EXEC, 434 [VM_SHARED | VM_EXEC | VM_WRITE] = __PAGE_SHARED_EXEC, 435 [VM_SHARED | VM_EXEC | VM_WRITE | VM_READ] = __PAGE_SHARED_EXEC
|
/linux/arch/csky/abiv1/ |
H A D | cacheflush.c | 63 if (vma->vm_flags & VM_EXEC) in update_mmu_cache_range() 73 if (vma->vm_flags & VM_EXEC) in flush_cache_range()
|
/linux/tools/testing/selftests/bpf/progs/ |
H A D | find_vma.c | 13 #define VM_EXEC 0x00000004 macro 31 if (vma->vm_flags & VM_EXEC) in check_vma()
|
/linux/arch/arm64/kernel/ |
H A D | vdso.c | 132 VM_READ|VM_EXEC|gp_flags| in __setup_additional_pages() 259 VM_READ | VM_EXEC | in aarch32_kuser_helpers_setup() 283 VM_READ | VM_EXEC | VM_MAYREAD | in aarch32_sigreturn_setup()
|
/linux/tools/testing/vma/ |
H A D | vma_internal.h | 50 #define VM_EXEC 0x00000004 macro 72 #define VM_ACCESS_FLAGS (VM_READ | VM_WRITE | VM_EXEC) 92 #define TASK_EXEC ((current->personality & READ_IMPLIES_EXEC) ? VM_EXEC : 0) 696 ((flags & (VM_EXEC | VM_WRITE | VM_STACK)) == VM_EXEC) 1362 if (!(new & VM_EXEC)) in map_deny_write_exec() 1370 if (!(old & VM_EXEC)) in map_deny_write_exec()
|
/linux/arch/sparc/mm/ |
H A D | fault_64.c | 402 if ((fault_code & FAULT_CODE_ITLB) && !(vma->vm_flags & VM_EXEC)) { in do_sparc64_fault() 417 (vma->vm_flags & VM_EXEC) != 0 && in do_sparc64_fault() 425 if (!(vma->vm_flags & (VM_READ | VM_EXEC))) in do_sparc64_fault()
|
/linux/arch/x86/um/ |
H A D | mem_32.c | 19 vm_flags_init(&gate_vma, VM_READ | VM_MAYREAD | VM_EXEC | VM_MAYEXEC); in gate_vma_init()
|
/linux/arch/alpha/include/asm/ |
H A D | cacheflush.h | 41 if (vma->vm_flags & VM_EXEC) { in flush_icache_user_page()
|