/linux/arch/x86/power/ |
H A D | hibernate_asm_64.S | 33 movq mmu_cr4_features(%rip), %rax 34 movq %rax, %rdx 39 movq %rax, %cr4; # turn PGE back on 42 movq $saved_context, %rax 43 movq pt_regs_sp(%rax), %rsp 44 movq pt_regs_bp(%rax), %rbp 45 movq pt_regs_si(%rax), %rsi 46 movq pt_regs_di(%rax), %rdi 47 movq pt_regs_bx(%rax), %rbx 48 movq pt_regs_cx(%rax), %rcx [all …]
|
/linux/arch/x86/kernel/ |
H A D | relocate_kernel_64.S | 70 movq %cr0, %rax 71 movq %rax, CR0(%r11) 72 movq %cr3, %rax 73 movq %rax, CR3(%r11) 74 movq %cr4, %rax 75 movq %rax, CR4(%r11) 78 movq %rax, %r13 129 movq %cr4, %rax 130 andq $~(X86_CR4_CET), %rax 131 movq %rax, %cr4 [all …]
|
H A D | head_64.S | 75 leaq .Lon_kernel_cs(%rip), %rax 76 pushq %rax 109 addq %rcx, %rax 112 mov %rax, %rdi 127 movq %rax, %cr3 173 movq phys_base(%rip), %rax 174 addq $(init_top_pgt - __START_KERNEL_map), %rax 181 addq sme_me_mask(%rip), %rax 187 movq %rax, %cr3 296 movq trampoline_lock(%rip), %rax [all …]
|
H A D | ftrace_64.S | 83 movq %rax, RAX(%rsp) 128 movq RAX(%rsp), %rax 183 movq RIP(%rsp), %rax 184 movq %rax, MCOUNT_REG_SIZE(%rsp) 247 movq EFLAGS(%rsp), %rax 248 movq %rax, MCOUNT_REG_SIZE(%rsp) 251 movq RIP(%rsp), %rax 252 movq %rax, MCOUNT_REG_SIZE+8(%rsp) 262 movq ORIG_RAX(%rsp), %rax 263 movq %rax, MCOUNT_REG_SIZE-8(%rsp) [all …]
|
/linux/tools/arch/x86/lib/ |
H A D | memset_64.S | 38 movq %r9,%rax 51 movabs $0x0101010101010101,%rax 52 imulq %rcx,%rax 67 movq %rax,(%rdi) 68 movq %rax,8(%rdi) 69 movq %rax,16(%rdi) 70 movq %rax,24(%rdi) 71 movq %rax,32(%rdi) 72 movq %rax,40(%rdi) 73 movq %rax,48(%rdi) [all …]
|
/linux/arch/x86/lib/ |
H A D | memset_64.S | 38 movq %r9,%rax 51 movabs $0x0101010101010101,%rax 52 imulq %rcx,%rax 67 movq %rax,(%rdi) 68 movq %rax,8(%rdi) 69 movq %rax,16(%rdi) 70 movq %rax,24(%rdi) 71 movq %rax,32(%rdi) 72 movq %rax,40(%rdi) 73 movq %rax,48(%rdi) [all …]
|
H A D | hweight.S | 49 movabsq $0x5555555555555555, %rax 51 andq %rdx, %rax # t &= 0x5555555555555555 53 subq %rax, %rdi # w -= t 55 movq %rdi, %rax # w -> t 57 andq %rdx, %rax # t &= 0x3333333333333333 59 addq %rdx, %rax # w = w_tmp + t 61 movq %rax, %rdx # w -> t 63 addq %rdx, %rax # w_tmp += t 65 andq %rdx, %rax # w_tmp &= 0x0f0f0f0f0f0f0f0f 67 imulq %rdx, %rax # w_tmp *= 0x0101010101010101 [all …]
|
H A D | clear_page_64.S | 31 #define PUT(x) movq %rax,x*8(%rdi) 32 movq %rax,(%rdi) 86 1: movq %rax,(%rdi) 96 10: movq %rax,(%rdi) 97 11: movq %rax,8(%rdi) 98 12: movq %rax,16(%rdi) 99 13: movq %rax,24(%rdi) 100 14: movq %rax,32(%rdi) 101 15: movq %rax,40(%rdi) 102 16: movq %rax,48(%rdi) [all …]
|
H A D | csum-copy_64.S | 95 adcq %rbx, %rax 96 adcq %r8, %rax 97 adcq %r11, %rax 98 adcq %rdx, %rax 99 adcq %r10, %rax 100 adcq %r15, %rax 101 adcq %r14, %rax 102 adcq %r13, %rax 129 adcq %r9, %rax 143 adcq %rbx, %rax [all …]
|
/linux/arch/x86/kernel/cpu/sgx/ |
H A D | encls.h | 67 #define __encls_ret_N(rax, inputs...) \ argument 75 : "a"(rax), inputs \ 80 #define __encls_ret_1(rax, rcx) \ argument 82 __encls_ret_N(rax, "c"(rcx)); \ 85 #define __encls_ret_2(rax, rbx, rcx) \ argument 87 __encls_ret_N(rax, "b"(rbx), "c"(rcx)); \ 90 #define __encls_ret_3(rax, rbx, rcx, rdx) \ argument 92 __encls_ret_N(rax, "b"(rbx), "c"(rcx), "d"(rdx)); \ 110 #define __encls_N(rax, rbx_out, inputs...) \ argument 119 : "a"(rax), inputs \ [all …]
|
/linux/arch/x86/platform/efi/ |
H A D | efi_thunk_64.S | 37 movq %rsp, %rax 39 push %rax 45 movq 0x18(%rax), %rbp 46 movq 0x20(%rax), %rbx 47 movq 0x28(%rax), %rax 55 movq $__START_KERNEL_map, %rax 56 subq phys_base(%rip), %rax 60 subq %rax, %rbp 61 subq %rax, %rbx
|
/linux/arch/x86/entry/ |
H A D | entry_64.S | 107 pushq %rax /* pt_regs->orig_ax */ 109 PUSH_AND_CLEAR_REGS rax=$-ENOSYS 240 movq %rax, %rdi /* prev */ 264 pushq %rax 270 popq %rax 484 movq %rax, %rsp /* Switch to new stack */ 610 push %rax 611 SWITCH_TO_USER_CR3 scratch_reg=%rdi scratch_reg2=%rax 612 pop %rax 689 movq %rax, (0*8)(%rdi) /* user RAX */ [all …]
|
H A D | entry_64_compat.S | 56 pushq %rax 57 SWITCH_TO_KERNEL_CR3 scratch_reg=%rax 58 popq %rax 86 pushq %rax /* pt_regs->orig_ax */ 87 PUSH_AND_CLEAR_REGS rax=$-ENOSYS 209 pushq %rax /* pt_regs->orig_ax */ 210 PUSH_AND_CLEAR_REGS rcx=%rbp rax=$-ENOSYS 240 popq %rax /* pt_regs->rax */
|
H A D | calling.h | 68 .macro PUSH_REGS rdx=%rdx rcx=%rcx rax=%rax save_ret=0 unwind_hint=1 79 pushq \rax /* pt_regs->ax */ 125 .macro PUSH_AND_CLEAR_REGS rdx=%rdx rcx=%rcx rax=%rax save_ret=0 clear_bp=1 unwind_hint=1 126 PUSH_REGS rdx=\rdx, rcx=\rcx, rax=\rax, save_ret=\save_ret unwind_hint=\unwind_hint 141 popq %rax 223 pushq %rax 224 SWITCH_TO_USER_CR3 scratch_reg=\scratch_reg scratch_reg2=%rax 225 popq %rax 312 or %rdx, %rax 313 mov %rax, \save_reg [all …]
|
/linux/arch/x86/crypto/ |
H A D | poly1305-x86_64-cryptogams.pl | 187 mov %rax,$d2 188 mov $r0,%rax 192 mov %rax,$h0 # future $h0 193 mov $r0,%rax 197 add %rax,$d2 198 mov $s1,%rax 203 add %rax,$h0 213 mov \$-4,%rax # mask value 216 and $d3,%rax # last reduction step 220 add $d3,%rax [all …]
|
H A D | blake2s-core.S | 71 movd (%rsi,%rax,4),%xmm4 73 movd (%rsi,%rax,4),%xmm5 75 movd (%rsi,%rax,4),%xmm6 77 movd (%rsi,%rax,4),%xmm7 92 movd (%rsi,%rax,4),%xmm5 94 movd (%rsi,%rax,4),%xmm6 96 movd (%rsi,%rax,4),%xmm7 98 movd (%rsi,%rax,4),%xmm4 116 movd (%rsi,%rax,4),%xmm6 118 movd (%rsi,%rax,4),%xmm7 [all …]
|
H A D | aes-gcm-aesni-x86_64.S | 216 mov (\src, %rcx), %rax // Load last 8 bytes 219 shr %cl, %rax // Discard overlapping bytes 220 pinsrq $1, %rax, \dst 241 or \tmp64, %rax // Combine the two parts 243 movq %rax, \dst 254 pextrq $1, \src, %rax 257 ror %cl, %rax 258 mov %rax, (\dst, %rsi) // Store last LEN - 8 bytes 436 movdqa OFFSETOF_H_POWERS(KEY,%rax,2), TMP0 440 movdqu (DST,%rax,2), TMP1 [all …]
|
H A D | chacha-avx2-x86_64.S | 67 mov %rcx,%rax 142 cmp $0x10,%rax 149 cmp $0x20,%rax 156 cmp $0x30,%rax 163 cmp $0x40,%rax 171 cmp $0x50,%rax 177 cmp $0x60,%rax 183 cmp $0x70,%rax 189 cmp $0x80,%rax 200 mov %rax,%r9 [all …]
|
H A D | aes-gcm-avx10-x86_64.S | 351 lea 6*16(KEY,%rax,4), RNDKEYLAST_PTR 744 lea 16(KEY), %rax 746 vbroadcasti32x4 (%rax), RNDKEY 748 add $16, %rax 749 cmp %rax, RNDKEYLAST_PTR 883 neg %rax 884 and $~15, %rax // -round_up(DATALEN, 16) 885 lea OFFSETOFEND_H_POWERS(KEY,%rax), POWERS_PTR 908 mov $-1, %rax 909 bzhi DATALEN64, %rax, %rax [all …]
|
/linux/tools/testing/selftests/kvm/x86_64/ |
H A D | xen_vmcall_test.c | 31 unsigned long rax = INPUTVALUE; in guest_code() local 42 "=a"(rax) : in guest_code() 43 "a"(rax), "D"(rdi), "S"(rsi), "d"(rdx), in guest_code() 45 GUEST_ASSERT(rax == RETVALUE); in guest_code() 63 __asm__ __volatile__("call *%1" : "=a"(rax) : in guest_code() 65 "a"(rax), "D"(rdi), "S"(rsi), "d"(rdx), in guest_code() 67 GUEST_ASSERT(rax == RETVALUE); in guest_code() 70 rax = 0; in guest_code() 73 __asm__ __volatile__("call *%1" : "=a"(rax) : in guest_code() 75 "a"(rax), "c"(rcx), "d"(rdx), in guest_code() [all …]
|
/linux/arch/x86/boot/compressed/ |
H A D | head_64.S | 327 addq %rax, %rbp 328 notq %rax 329 andq %rax, %rbp 366 leaq gdt64(%rip), %rax 367 addq %rax, 2(%rax) 368 lgdt (%rax) 372 leaq .Lon_kernel_cs(%rip), %rax 373 pushq %rax 402 movq %cr4, %rax 404 movq %rax, %cr4 [all …]
|
/linux/tools/testing/selftests/sgx/ |
H A D | test_encl_bootstrap.S | 47 lea __encl_base(%rip), %rax 48 sub %rax, %rbx 49 lea encl_stack(%rip), %rax 50 add %rbx, %rax 55 lea -1(%rbx), %rax 57 xchg %rsp, %rax 58 push %rax 72 pop %rax 73 mov %rax, %rsp 76 mov $4, %rax
|
/linux/arch/x86/virt/vmx/tdx/ |
H A D | tdxcall.S | 49 mov %rdi, %rax 123 pushq %rax /* save the TDCALL/SEAMCALL return code */ 124 movq 8(%rsp), %rax /* restore the structure pointer */ 125 movq %rsi, TDX_MODULE_rsi(%rax) /* save RSI */ 126 popq %rax /* restore the return code */ 195 mov $TDX_SEAMCALL_VMFAILINVALID, %rax 208 orq %rdi, %rax
|
/linux/arch/x86/entry/vdso/ |
H A D | vsgx.S | 107 mov %rbx, %rax 118 push %rax 121 push %rax 130 mov SGX_ENCLAVE_RUN_USER_HANDLER(%rax), %rax 132 call *%rax
|
/linux/arch/x86/mm/ |
H A D | mem_encrypt_boot.S | 37 movq %rcx, %rax /* Workarea stack page */ 38 leaq PAGE_SIZE(%rax), %rsp /* Set new stack pointer */ 39 addq $PAGE_SIZE, %rax /* Workarea encryption routine */ 47 movq %rax, %rdi /* Workarea encryption routine */ 57 movq %rax, %r8 /* Workarea encryption routine */ 61 call *%rax /* Call the encryption routine */
|