/linux/arch/x86/lib/ |
H A D | atomic64_386_32.S | 38 movl 4(v), %edx 54 movl 4(v), %edx 64 adcl %edx, 4(v) 72 adcl 4(v), %edx 74 movl %edx, 4(v) 82 sbbl %edx, 4(v) 89 negl %edx 91 sbbl $0, %edx 93 adcl 4(v), %edx 95 movl %edx, 4(v) [all …]
|
H A D | copy_user_uncached_64.S | 35 cmp $64,%edx 59 sub $64,%edx 60 cmp $64,%edx 100 cmp $8,%edx 106 sub $8,%edx 125 sub $4,%edx 134 sub $2,%edx 140 dec %edx 142 mov %edx,%eax 168 test %edx,%edx [all …]
|
H A D | getuser.S | 53 sbb %edx, %edx /* array_index_mask_nospec() */ 54 and %edx, %eax 68 UACCESS movzbl (%_ASM_AX),%edx 78 UACCESS movzwl (%_ASM_AX),%edx 88 UACCESS movl (%_ASM_AX),%edx 104 UACCESS movl (%_ASM_AX),%edx 117 UACCESS movzbl (%_ASM_AX),%edx 127 UACCESS movzwl (%_ASM_AX),%edx 137 UACCESS movl (%_ASM_AX),%edx 151 UACCESS movl (%_ASM_AX),%edx [all …]
|
H A D | atomic64_cx8_32.S | 13 movl %ecx, %edx 21 movl 4(\reg), %edx 56 movl %edx, %edi 62 movl %edx, %ecx 71 movl %ecx, %edx 90 movl %edx, %ecx 99 movl %ecx, %edx 114 movl %edx, %ecx 124 movl %ecx, %edx 137 movl %edx, %edi [all …]
|
H A D | hweight.S | 17 movl %eax, %edx # w -> t 18 shrl %edx # t >>= 1 19 andl $0x55555555, %edx # t &= 0x55555555 20 subl %edx, %eax # w -= t 22 movl %eax, %edx # w -> t 24 andl $0x33333333, %edx # t &= 0x33333333 26 addl %edx, %eax # w = w_tmp + t 28 movl %eax, %edx # w -> t 29 shrl $4, %edx # t >>= 4 30 addl %edx, %eax # w_tmp += t
|
H A D | copy_mc_64.S | 21 cmpl $8, %edx 35 subl %ecx, %edx 46 movl %edx, %ecx 47 andl $7, %edx 62 andl %edx, %edx 66 movl %edx, %ecx 90 addl %edx, %ecx 102 addl %edx, %ecx 103 movl %ecx, %edx
|
/linux/arch/x86/math-emu/ |
H A D | wm_shrx.S | 48 movl 4(%esi),%edx /* msl */ 51 shrd %cl,%edx,%ebx 52 shr %cl,%edx 54 movl %edx,4(%esi) 66 movl 4(%esi),%edx /* msl */ 67 shrd %cl,%edx,%eax 68 shr %cl,%edx 69 movl %edx,(%esi) 82 xorl %edx,%edx 83 movl %edx,(%esi) [all …]
|
H A D | reg_u_div.S | 93 movswl EXP(%esi),%edx 95 subl %eax,%edx 96 addl EXP_BIAS,%edx 99 cmpl EXP_WAY_UNDER,%edx 103 movl EXP_WAY_UNDER,%edx 122 movl SIGH(%esi),%edx /* Dividend */ 125 cmpl %ecx,%edx 129 subl %ecx,%edx /* Prevent the overflow */ 191 movl SIGH(%esi),%edx 198 cmpl SIGH(%ebx),%edx /* Test for imminent overflow */ [all …]
|
H A D | round_Xsig.S | 35 movl 8(%esi),%edx 41 orl %edx,%edx /* ms bits */ 45 movl %ebx,%edx 52 bsrl %edx,%ecx /* get the required shift in %ecx */ 56 shld %cl,%ebx,%edx 65 adcl $0,%edx 68 movl $0x80000000,%edx 72 movl %edx,8(%esi) 95 movl 8(%esi),%edx 101 orl %edx,%edx /* ms bits */ [all …]
|
H A D | div_Xsig.S | 130 movl FPU_accum_3,%edx 143 mov %edx,%eax 156 sbbl %edx,FPU_accum_3 162 sbbl %edx,FPU_accum_2 174 movl XsigH(%ebx),%edx 176 sbbl %edx,FPU_accum_2 188 movl FPU_accum_2,%edx /* get the reduced num */ 192 cmpl XsigH(%ebx),%edx 203 sbbl XsigH(%ebx),%edx 204 movl %edx,FPU_accum_2 [all …]
|
H A D | reg_u_sub.S | 68 movl PARAM6,%edx 71 xorl %edx,%edx /* register extension */ 83 shrd %cl,%ebx,%edx 95 shrd %cl,%eax,%edx 100 orl $1,%edx /* record the fact in the extension */ 108 movl %ebx,%edx 118 movl %eax,%edx 122 orl $1,%edx 130 movl %eax,%edx 131 rcrl %edx [all …]
|
H A D | reg_round.S | 122 movl PARAM2,%edx 205 orl %edx,%ecx 222 orl %edx,%edx 241 orl %edx,%ecx 284 orl %edx,%ecx 298 orl %edx,%edx 316 orl %edx,%ecx 349 orl %edx,%edx 357 orl %edx,%edx 362 cmpl $0x80000000,%edx [all …]
|
H A D | mul_Xsig.S | 43 movl %edx,-12(%ebp) 48 adcl %edx,-8(%ebp) 54 adcl %edx,-4(%ebp) 84 movl %edx,-12(%ebp) 88 addl %edx,-12(%ebp) 95 adcl %edx,-8(%ebp) 101 adcl %edx,-8(%ebp) 107 adcl %edx,-4(%ebp) 138 movl %edx,-12(%ebp) 142 addl %edx,-12(%ebp) [all …]
|
H A D | wm_sqrt.S | 92 xorl %edx,%edx 101 rcrl $1,%edx 109 movl %edx,FPU_fsqrt_arg_0 116 shll %edx /* max result was 7fff... */ 117 testl $0x80000000,%edx /* but min was 3fff... */ 120 movl $0x80000000,%edx /* round up */ 123 movl %edx,%esi /* Our first guess */ 140 movl %ecx,%edx /* msw of the arg / 2 */ 145 movl %ecx,%edx 150 movl %ecx,%edx [all …]
|
H A D | shr_Xsig.S | 38 movl 8(%esi),%edx /* msl */ 40 shrd %cl,%edx,%ebx 41 shr %cl,%edx 44 movl %edx,8(%esi) 56 movl 8(%esi),%edx /* msl */ 57 shrd %cl,%edx,%eax 58 shr %cl,%edx 60 movl %edx,4(%esi) 73 xorl %edx,%edx 75 movl %edx,4(%esi) [all …]
|
H A D | reg_norm.S | 32 movl SIGH(%ebx),%edx 35 orl %edx,%edx /* ms bits */ 42 movl %eax,%edx 48 bsrl %edx,%ecx /* get the required shift in %ecx */ 51 shld %cl,%eax,%edx 55 movl %edx,SIGH(%ebx) 110 movl SIGH(%ebx),%edx 113 orl %edx,%edx /* ms bits */ 120 movl %eax,%edx 126 bsrl %edx,%ecx /* get the required shift in %ecx */ [all …]
|
H A D | reg_u_add.S | 46 movl %ecx,%edx 55 movl PARAM7,%edx 68 xorl %edx,%edx /* clear the extension */ 83 shrd %cl,%ebx,%edx 95 shrd %cl,%eax,%edx 100 orl $1,%edx /* record the fact in the extension */ 108 movl %ebx,%edx 117 movl %eax,%edx 121 orl $1,%edx 125 movl $1,%edx /* The shifted nr always at least one '1' */ [all …]
|
/linux/arch/x86/um/ |
H A D | setjmp_32.S | 24 movl %eax,%edx 26 movl 4(%esp),%edx 30 movl %ebx,(%edx) 31 movl %esp,4(%edx) # Post-return %esp! 33 movl %ebp,8(%edx) 34 movl %esi,12(%edx) 35 movl %edi,16(%edx) 36 movl %ecx,20(%edx) # Return address 47 xchgl %eax,%edx 49 movl 4(%esp),%edx # jmp_ptr address [all …]
|
/linux/arch/x86/include/asm/ |
H A D | cpuid.h | 14 u32 eax, ebx, ecx, edx; member 33 unsigned int *ecx, unsigned int *edx) in native_cpuid() argument 40 "=d" (*edx) in native_cpuid() 48 unsigned int eax = op, ebx, ecx = 0, edx; \ 50 native_cpuid(&eax, &ebx, &ecx, &edx); \ 61 native_cpuid_reg(edx) in native_cpuid_reg() 76 unsigned int *ecx, unsigned int *edx) in native_cpuid_reg() 80 __cpuid(eax, ebx, ecx, edx); in native_cpuid_reg() 86 unsigned int *ecx, unsigned int *edx) in cpuid_count() argument 90 __cpuid(eax, ebx, ecx, edx); in cpuid_count() [all …]
|
/linux/arch/x86/platform/efi/ |
H A D | efi_stub_32.S | 23 push %edx 30 movl $1f, %edx 31 subl $__PAGE_OFFSET, %edx 32 jmp *%edx 36 movl %cr0, %edx 37 andl $0x7fffffff, %edx 38 movl %edx, %cr0 49 movl 36(%esp), %edx // &efi.runtime 50 movl %ecx, (%edx) 53 movl %cr0, %edx [all …]
|
/linux/arch/x86/boot/compressed/ |
H A D | mem_encrypt.S | 97 pushl %edx 107 movl %ebx, %edx # CPUID fn 111 movl %edx, 12(%esp) # Store result 114 movl %ebx, %edx # CPUID fn 118 movl %edx, 8(%esp) # Store result 121 movl %ebx, %edx # CPUID fn 125 movl %edx, 4(%esp) # Store result 128 movl %ebx, %edx # CPUID fn 132 movl %edx, 0(%esp) # Store result 154 popl %edx [all …]
|
/linux/tools/arch/x86/kcpuid/ |
H A D | cpuid.csv | 18 0, 0, edx, 31:0, cpu_vendorid_1 , CPU vendor ID string bytes 4 - 7 64 1, 0, edx, 0, fpu , Floating-Point Unit on-chip (x87) 65 1, 0, edx, 1, vme , Virtual-8086 Mode Extensions 66 1, 0, edx, 2, de , Debugging Extensions 67 1, 0, edx, 3, pse , Page Size Extension 68 1, 0, edx, 4, tsc , Time Stamp Counter 69 …1, 0, edx, 5, msr , Model-Specific Registers (RDMSR and WRMSR… 70 1, 0, edx, 6, pae , Physical Address Extensions 71 1, 0, edx, 7, mce , Machine Check Exception 72 1, 0, edx, 8, cx8 , CMPXCHG8B instruction [all …]
|
/linux/arch/x86/kernel/ |
H A D | ftrace_32.S | 46 pushl %edx 51 movl 4*4(%esp), %edx 54 lea 4*4(%esp), %edx 59 movl 4(%edx), %edx /* edx has ebp */ 69 popl %edx 127 pushl %edx 135 movl 21*4(%esp), %edx # 2nd argument: parent ip 155 popl %edx 175 pushl %edx 178 lea 4*4(%esp), %edx [all …]
|
/linux/tools/testing/selftests/mm/ |
H A D | pkey-x86.h | 47 unsigned int eax, edx; in __read_pkey_reg() local 52 : "=a" (eax), "=d" (edx) in __read_pkey_reg() 62 unsigned int edx = 0; in __write_pkey_reg() local 67 : : "a" (eax), "c" (ecx), "d" (edx)); in __write_pkey_reg() 80 unsigned int edx; in cpu_has_pkeys() local 82 __cpuid_count(0x7, 0x0, eax, ebx, ecx, edx); in cpu_has_pkeys() 101 unsigned int edx; in cpu_max_xsave_size() local 103 __cpuid_count(XSTATE_CPUID, 0, eax, ebx, ecx, edx); in cpu_max_xsave_size() 121 unsigned int edx; in pkey_reg_xstate_offset() local 130 __cpuid_count(XSTATE_CPUID, leaf, eax, ebx, ecx, edx); in pkey_reg_xstate_offset()
|
/linux/arch/x86/realmode/rm/ |
H A D | reboot.S | 39 xorl %edx, %edx 99 movl %cr0, %edx 100 andl $0x00000011, %edx 101 orl $0x60000000, %edx 102 movl %edx, %cr0 104 movl %cr0, %edx 105 testl $0x60000000, %edx /* If no cache bits -> no wbinvd */ 110 movl %edx, %cr0
|