/linux/arch/x86/lib/ |
H A D | memmove_64.S | 37 add %rdx, %r8 41 #define CHECK_LEN cmp $0x20, %rdx; jb 1f 42 #define MEMMOVE_BYTES movq %rdx, %rcx; rep movsb; RET 52 cmp $680, %rdx 61 sub $0x20, %rdx 66 sub $0x20, %rdx 79 addq $0x20, %rdx 86 movq %rdx, %rcx 87 movq -8(%rsi, %rdx), %r11 88 lea -8(%rdi, %rdx), %r10 [all …]
|
H A D | memcpy_64.S | 37 movq %rdx, %rcx 49 cmpq $0x20, %rdx 58 subq $0x20, %rdx 60 subq $0x20, %rdx 84 addq %rdx, %rsi 85 addq %rdx, %rdi 86 subq $0x20, %rdx 93 subq $0x20, %rdx 110 subq %rdx, %rsi 111 subq %rdx, %rdi [all …]
|
H A D | hweight.S | 46 pushq %rdx 48 movq %rdi, %rdx # w -> t 50 shrq %rdx # t >>= 1 51 andq %rdx, %rax # t &= 0x5555555555555555 52 movabsq $0x3333333333333333, %rdx 57 andq %rdx, %rax # t &= 0x3333333333333333 58 andq %rdi, %rdx # w_tmp &= 0x3333333333333333 59 addq %rdx, %rax # w = w_tmp + t 61 movq %rax, %rdx # w -> t 62 shrq $4, %rdx # t >>= 4 [all …]
|
H A D | getuser.S | 42 movq $0x0123456789abcdef,%rdx 47 cmp %rax, %rdx 48 sbb %rdx, %rdx 49 or %rdx, %rax 102 UACCESS movq (%_ASM_AX),%rdx 148 UACCESS movq (%_ASM_AX),%rdx
|
H A D | memset_64.S | 36 movq %rdx,%rcx 60 movq %rdx,%rcx 108 cmpq $7,%rdx 114 subq %r8,%rdx
|
H A D | copy_page_64.S | 35 movq 0x8*2(%rsi), %rdx 46 movq %rdx, 0x8*2(%rdi) 65 movq 0x8*2(%rsi), %rdx 74 movq %rdx, 0x8*2(%rdi)
|
/linux/tools/arch/x86/lib/ |
H A D | memcpy_64.S | 36 movq %rdx, %rcx 48 cmpq $0x20, %rdx 57 subq $0x20, %rdx 59 subq $0x20, %rdx 83 addq %rdx, %rsi 84 addq %rdx, %rdi 85 subq $0x20, %rdx 92 subq $0x20, %rdx 109 subq %rdx, %rsi 110 subq %rdx, %rdi [all …]
|
H A D | memset_64.S | 36 movq %rdx,%rcx 60 movq %rdx,%rcx 108 cmpq $7,%rdx 114 subq %r8,%rdx
|
/linux/arch/x86/crypto/ |
H A D | sm4-aesni-avx-asm_64.S | 157 vmovdqu 0*16(%rdx), RA0; 163 vmovdqu 1*16(%rdx), RA1; 165 vmovdqu 2*16(%rdx), RA2; 168 vmovdqu 3*16(%rdx), RA3; 376 vmovdqu (0 * 16)(%rdx), RA0; 377 vmovdqu (1 * 16)(%rdx), RA1; 378 vmovdqu (2 * 16)(%rdx), RA2; 379 vmovdqu (3 * 16)(%rdx), RA3; 380 vmovdqu (4 * 16)(%rdx), RB0; 385 vmovdqu (5 * 16)(%rdx), RB1; [all …]
|
H A D | sm4-aesni-avx2-asm_64.S | 367 vpxor (0 * 32)(%rdx), RA0, RA0; 368 vpxor (1 * 32)(%rdx), RA1, RA1; 369 vpxor (2 * 32)(%rdx), RA2, RA2; 370 vpxor (3 * 32)(%rdx), RA3, RA3; 371 vpxor (4 * 32)(%rdx), RB0, RB0; 372 vpxor (5 * 32)(%rdx), RB1, RB1; 373 vpxor (6 * 32)(%rdx), RB2, RB2; 374 vpxor (7 * 32)(%rdx), RB3, RB3; 405 vmovdqu (0 * 32)(%rdx), RA0; 406 vmovdqu (1 * 32)(%rdx), RA1; [all …]
|
H A D | cast5-avx-x86_64-asm_64.S | 63 #define RGI1 %rdx 376 vmovdqu (0*4*4)(%rdx), RL1; 377 vmovdqu (1*4*4)(%rdx), RR1; 378 vmovdqu (2*4*4)(%rdx), RL2; 379 vmovdqu (3*4*4)(%rdx), RR2; 380 vmovdqu (4*4*4)(%rdx), RL3; 381 vmovdqu (5*4*4)(%rdx), RR3; 382 vmovdqu (6*4*4)(%rdx), RL4; 383 vmovdqu (7*4*4)(%rdx), RR4; 414 vmovdqu (0*4*4)(%rdx), RL1; [all …]
|
H A D | chacha-avx512vl-x86_64.S | 30 # %rdx: up to 2 data blocks input, i 117 vpxord 0x00(%rdx),%xmm7,%xmm6 124 vpxord 0x10(%rdx),%xmm7,%xmm6 131 vpxord 0x20(%rdx),%xmm7,%xmm6 138 vpxord 0x30(%rdx),%xmm7,%xmm6 146 vpxord 0x40(%rdx),%xmm7,%xmm6 152 vpxord 0x50(%rdx),%xmm7,%xmm6 158 vpxord 0x60(%rdx),%xmm7,%xmm6 164 vpxord 0x70(%rdx),%xmm7,%xmm6 184 vmovdqu8 (%rdx,%r9),%xmm1{%k1}{z} [all …]
|
H A D | chacha-avx2-x86_64.S | 40 # %rdx: up to 2 data blocks input, i 144 vpxor 0x00(%rdx),%xmm7,%xmm6 151 vpxor 0x10(%rdx),%xmm7,%xmm6 158 vpxor 0x20(%rdx),%xmm7,%xmm6 165 vpxor 0x30(%rdx),%xmm7,%xmm6 173 vpxor 0x40(%rdx),%xmm7,%xmm6 179 vpxor 0x50(%rdx),%xmm7,%xmm6 185 vpxor 0x60(%rdx),%xmm7,%xmm6 191 vpxor 0x70(%rdx),%xmm7,%xmm6 211 lea (%rdx,%rax),%rsi [all …]
|
H A D | chacha-ssse3-x86_64.S | 117 # %rdx: up to 1 data block input, i 139 movdqu 0x00(%rdx),%xmm4 147 movdqu 0x10(%rdx),%xmm0 155 movdqu 0x20(%rdx),%xmm0 163 movdqu 0x30(%rdx),%xmm0 184 lea (%rdx,%rax),%rsi 226 # %rdx: up to 4 data blocks input, i 654 movdqu 0x00(%rdx),%xmm1 661 movdqu 0x10(%rdx),%xmm1 668 movdqu 0x20(%rdx),%xmm1 [all …]
|
/linux/arch/x86/mm/ |
H A D | mem_encrypt_boot.S | 44 movq %rdx, %r12 /* Area length */ 55 movq %r8, %rdx /* Pagetables used for encryption */ 99 mov %rdx, %cr3 102 mov %cr4, %rdx 103 andq $~X86_CR4_PGE, %rdx 104 mov %rdx, %cr4 105 orq $X86_CR4_PGE, %rdx 106 mov %rdx, %cr4 118 mov %rdx, %r15 /* Save original PAT value */ 151 mov %r15, %rdx /* Restore original PAT value */
|
/linux/arch/x86/power/ |
H A D | hibernate_asm_64.S | 34 movq %rax, %rdx 35 andq $~(X86_CR4_PGE), %rdx 36 movq %rdx, %cr4; # turn off PGE 49 movq pt_regs_dx(%rax), %rdx 80 movq %rdx, pt_regs_dx(%rax) 112 movq restore_pblist(%rip), %rdx 132 testq %rdx, %rdx 136 movq pbe_address(%rdx), %rsi 137 movq pbe_orig_address(%rdx), %rdi 143 movq pbe_next(%rdx), %rdx
|
/linux/arch/x86/entry/ |
H A D | calling.h | 68 .macro PUSH_REGS rdx=%rdx rcx=%rcx rax=%rax save_ret=0 unwind_hint=1 77 pushq \rdx /* pt_regs->dx */ 125 .macro PUSH_AND_CLEAR_REGS rdx=%rdx rcx=%rcx rax=%rax save_ret=0 clear_bp=1 unwind_hint=1 126 PUSH_REGS rdx=\rdx, rcx=\rcx, rax=\rax, save_ret=\save_ret unwind_hint=\unwind_hint 143 popq %rdx 311 shl $32, %rdx 312 or %rdx, %rax 321 movq PER_CPU_VAR(x86_spec_ctrl_current), %rdx 323 shr $32, %rdx 339 mov \save_reg, %rdx [all …]
|
H A D | entry_64.S | 242 movq %rbx, %rdx /* fn */ 1150 pushq %rdx 1168 SWITCH_TO_KERNEL_CR3 scratch_reg=%rdx 1169 movq %rsp, %rdx 1171 UNWIND_HINT_IRET_REGS base=%rdx offset=8 1172 pushq 5*8(%rdx) /* pt_regs->ss */ 1173 pushq 4*8(%rdx) /* pt_regs->rsp */ 1174 pushq 3*8(%rdx) /* pt_regs->flags */ 1175 pushq 2*8(%rdx) /* pt_regs->cs */ 1176 pushq 1*8(%rdx) /* pt_regs->rip */ [all …]
|
/linux/arch/x86/kernel/ |
H A D | sev_verify_cbit.S | 38 movq %rsi, %rdx 39 andq $(~X86_CR4_PGE), %rdx 40 movq %rdx, %cr4 49 1: rdrand %rdx 53 movq %rdx, sev_check_data(%rip) 65 cmpq %rdx, sev_check_data(%rip)
|
H A D | ftrace_64.S | 85 movq %rdx, RDX(%rsp) 96 movq MCOUNT_REG_SIZE-8(%rsp), %rdx 98 movq %rbp, %rdx 100 movq %rdx, RBP(%rsp) 126 movq RDX(%rsp), %rdx 167 movq function_trace_op(%rip), %rdx 212 movq function_trace_op(%rip), %rdx 355 movq %rdx, 8(%rsp) 362 movq 8(%rsp), %rdx
|
H A D | relocate_kernel_64.S | 122 pushq %rdx 214 popq %rdx 217 call *%rdx 293 movq %rdi, %rdx /* Save destination page to %rdx */ 303 movq %rdx, %rsi 308 movq %rdx, %rdi
|
/linux/arch/x86/kernel/acpi/ |
H A D | madt_playdead.S | 18 movq %cr4, %rdx 19 andq $~(X86_CR4_PGE), %rdx 20 movq %rdx, %cr4
|
/linux/arch/x86/kvm/svm/ |
H A D | vmenter.S | 321 mov %rbp, SEV_ES_RBP (%rdx) 322 mov %r15, SEV_ES_R15 (%rdx) 323 mov %r14, SEV_ES_R14 (%rdx) 324 mov %r13, SEV_ES_R13 (%rdx) 325 mov %r12, SEV_ES_R12 (%rdx) 326 mov %rbx, SEV_ES_RBX (%rdx) 332 mov %rdi, SEV_ES_RDI (%rdx) 333 mov %rsi, SEV_ES_RSI (%rdx)
|
/linux/tools/testing/selftests/kvm/x86_64/ |
H A D | xen_vmcall_test.c | 34 unsigned long rdx = ARGVALUE(3); in guest_code() local 43 "a"(rax), "D"(rdi), "S"(rsi), "d"(rdx), in guest_code() 65 "a"(rax), "D"(rdi), "S"(rsi), "d"(rdx), in guest_code() 72 rdx = 0x5a5a5a5a; /* ingpa (badly aligned) */ in guest_code() 75 "a"(rax), "c"(rcx), "d"(rdx), in guest_code()
|
/linux/arch/x86/kernel/cpu/ |
H A D | vmware.c | 73 unsigned long out0, rbx, rcx, rdx, rsi, rdi; in vmware_hypercall_slow() local 79 "=d" (rdx), "=S" (rsi), "=D" (rdi) in vmware_hypercall_slow() 91 "=d" (rdx), "=S" (rsi), "=D" (rdi) in vmware_hypercall_slow() 103 "=d" (rdx), "=S" (rsi), "=D" (rdi) in vmware_hypercall_slow() 120 *out3 = rdx; in vmware_hypercall_slow() 516 args.rdx = in3; in vmware_tdx_hypercall() 533 *out3 = args.rdx; in vmware_tdx_hypercall()
|