/illumos-gate/usr/src/test/util-tests/tests/dis/i386/ |
H A D | 64.vbmi.s | 29 vpermb (%rax), %xmm1, %xmm2 30 vpermb 0x10(%rax), %xmm11, %xmm12{%k1} 31 vpermb 0x12345(%rax), %xmm11, %xmm22{%k2}{z} 32 vpermb (%rax,%rbx,4), %xmm1, %xmm2 33 vpermb 0x14(%rax,%rcx,8), %xmm11, %xmm12{%k1} 38 vpermb (%rax), %ymm1, %ymm2 39 vpermb 0x10(%rax), %ymm11, %ymm12{%k1} 40 vpermb 0x12345(%rax), %ymm11, %ymm22{%k2}{z} 41 vpermb (%rax,%rbx,4), %ymm1, %ymm2 42 vpermb 0x14(%rax,%rcx,8), %ymm11, %ymm12{%k1} [all …]
|
H A D | 64.bmi1.s | 26 andn (%rax), %ebx, %edx 27 andn 0x40(%rax), %ebx, %edx 29 bextr %ebx, (%rax), %edx 30 bextr %ebx, 0x40(%rax), %edx 32 blsi (%rax), %edx 33 blsi 0x40(%rax), %edx 35 blsmsk (%rax), %edx 36 blsmsk 0x40(%rax), %edx 38 blsr (%rax), %edx 39 blsr 0x40(%rax), %edx [all …]
|
H A D | 64.ssse3.s | 26 pabsb (%rax), %mm1 28 pabsb (%rax), %xmm1 30 pabsd (%rax), %mm1 32 pabsd (%rax), %xmm1 34 pabsw (%rax), %mm1 36 pabsw (%rax), %xmm1 38 palignr $0x23, (%rax), %mm1 40 palignr $0x23, (%rax), %xmm1 42 phaddd (%rax), %mm1 44 phaddd (%rax), %xmm1 [all …]
|
H A D | 64.fma-pd.s | 26 vfmadd132pd (%rax), %xmm1, %xmm2 28 vfmadd132pd (%rax), %ymm1, %ymm2 30 vfmadd213pd (%rax), %xmm1, %xmm2 32 vfmadd213pd (%rax), %ymm1, %ymm2 34 vfmadd231pd (%rax), %xmm1, %xmm2 36 vfmadd231pd (%rax), %ymm1, %ymm2 38 vfmaddsub132pd (%rax), %xmm1, %xmm2 40 vfmaddsub132pd (%rax), %ymm1, %ymm2 42 vfmaddsub213pd (%rax), %xmm1, %xmm2 44 vfmaddsub213pd (%rax), %ymm1, %ymm2 [all …]
|
H A D | 64.fma-ps.s | 26 vfmadd132ps (%rax), %xmm1, %xmm2 28 vfmadd132ps (%rax), %ymm1, %ymm2 30 vfmadd213ps (%rax), %xmm1, %xmm2 32 vfmadd213ps (%rax), %ymm1, %ymm2 34 vfmadd231ps (%rax), %xmm1, %xmm2 36 vfmadd231ps (%rax), %ymm1, %ymm2 38 vfmaddsub132ps (%rax), %xmm1, %xmm2 40 vfmaddsub132ps (%rax), %ymm1, %ymm2 42 vfmaddsub213ps (%rax), %xmm1, %xmm2 44 vfmaddsub213ps (%rax), %ymm1, %ymm2 [all …]
|
H A D | 64.avx2.s | 25 vbroadcasti128 (%rax), %ymm0 29 vinserti128 $0x42, (%rax), %ymm3, %ymm4 30 vmovntdqa (%rax), %ymm0 32 vpblendd $0x42, (%rax), %xmm3, %xmm4 34 vpblendd $0x42, (%rax), %ymm3, %ymm4 36 vpbroadcastb (%rax), %xmm2 38 vpbroadcastb (%rax), %ymm2 40 vpbroadcastd (%rax), %xmm2 42 vpbroadcastd (%rax), %ymm2 44 vpbroadcastq (%rax), %xmm2 [all …]
|
H A D | 64.vbmi2.s | 28 vpshldw $0x42, (%rax), %xmm4, %xmm5 29 vpshldw $0x42, 0x23(%rax), %xmm4, %xmm5{%k3} 35 vpshldw $0x42, (%rax), %ymm4, %ymm5 36 vpshldw $0x42, 0x23(%rax), %ymm4, %ymm5{%k3} 42 vpshldw $0x42, (%rax), %zmm4, %zmm5 43 vpshldw $0x42, 0x23(%rax), %zmm4, %zmm5{%k3} 49 vpshldd $0x42, (%rax), %xmm4, %xmm5 50 vpshldd $0x42, 0x23(%rax), %xmm4, %xmm5{%k3} 58 vpshldd $0x42, (%rax), %ymm4, %ymm5 59 vpshldd $0x42, 0x23(%rax), %ymm4, %ymm5{%k3} [all …]
|
H A D | 64.ifma.s | 28 vpmadd52huq (%rax), %xmm16, %xmm17{%k5}{z} 29 vpmadd52huq 0x23(%rax), %xmm18, %xmm19{%k3} 30 vpmadd52huq 0x123(%rax,%rbx,4), %xmm3, %xmm30 31 vpmadd52huq (%rax){1to2}, %xmm6, %xmm7 32 vpmadd52huq 0x54(%rax){1to2}, %xmm6, %xmm7{%k4} 37 vpmadd52huq (%rax), %ymm16, %ymm17{%k5}{z} 38 vpmadd52huq 0x23(%rax), %ymm18, %ymm19{%k3} 39 vpmadd52huq 0x123(%rax,%rbx,4), %ymm3, %ymm30 40 vpmadd52huq (%rax){1to4}, %ymm6, %ymm7 41 vpmadd52huq 0x54(%rax){1to4}, %ymm6, %ymm7{%k4} [all …]
|
H A D | 64.bmi1.out | 2 libdis_test+0x5: c4 e2 60 f2 10 andn (%rax),%ebx,%edx 3 libdis_test+0xa: c4 e2 60 f2 50 40 andn 0x40(%rax),%ebx,%edx 5 libdis_test+0x15: c4 e2 60 f7 10 bextr %ebx,(%rax),%edx 6 libdis_test+0x1a: c4 e2 60 f7 50 40 bextr %ebx,0x40(%rax),%edx 8 libdis_test+0x25: c4 e2 68 f3 18 blsi (%rax),%edx 9 libdis_test+0x2a: c4 e2 68 f3 58 40 blsi 0x40(%rax),%edx 11 libdis_test+0x35: c4 e2 68 f3 10 blsmsk (%rax),%edx 12 libdis_test+0x3a: c4 e2 68 f3 50 40 blsmsk 0x40(%rax),%edx 14 libdis_test+0x45: c4 e2 68 f3 08 blsr (%rax),%edx 15 libdis_test+0x4a: c4 e2 68 f3 48 40 blsr 0x40(%rax),%edx [all …]
|
H A D | 64.bmi2.s | 28 mulx (%rax), %ebx, %ecx 30 pdep (%rax), %ebx, %ecx 32 pext (%rax), %ebx, %ecx 34 rorx $0x3, (%rax), %ebx 42 bzhi %rax, %rbx, %rcx 43 bzhi %rax, (%rbx), %rcx 44 mulx %rax, %rbx, %rcx 45 mulx (%rax), %rbx, %rcx 46 pdep %rax, %rbx, %rcx 47 pdep (%rax), %rbx, %rcx [all …]
|
H A D | 64.vbmi.out | 4 libdis_test+0x12: 62 f2 75 08 8d 10 vpermb (%rax),%xmm1,%xmm2 5 libdis_test+0x18: 62 72 25 09 8d 60 vpermb 0x10(%rax),%xmm11,%xmm12{%k1} 7 libdis_test+0x1f: 62 e2 25 8a 8d b0 vpermb 0x12345(%rax),%xmm11,%xmm22{%k2}{z} 9 libdis_test+0x29: 62 f2 75 08 8d 14 vpermb (%rax,%rbx,4),%xmm1,%xmm2 11 libdis_test+0x30: 62 72 25 09 8d a4 vpermb 0x14(%rax,%rcx,8),%xmm11,%xmm12{%k1} 16 libdis_test+0x4d: 62 f2 75 28 8d 10 vpermb (%rax),%ymm1,%ymm2 17 libdis_test+0x53: 62 72 25 29 8d a0 vpermb 0x10(%rax),%ymm11,%ymm12{%k1} 19 libdis_test+0x5d: 62 e2 25 aa 8d b0 vpermb 0x12345(%rax),%ymm11,%ymm22{%k2}{z} 21 libdis_test+0x67: 62 f2 75 28 8d 14 vpermb (%rax,%rbx,4),%ymm1,%ymm2 23 libdis_test+0x6e: 62 72 25 29 8d a4 vpermb 0x14(%rax,%rcx,8),%ymm11,%ymm12{%k1} [all …]
|
H A D | 64.avx512cd.s | 28 vpconflictd (%rax), %xmm6 29 vpconflictd 0x167(%rax), %xmm16{%k3} 30 vpconflictd -0x23(%rax,%rbx,4), %xmm17{%k4} 38 vpconflictd (%rax), %ymm6 39 vpconflictd 0x167(%rax), %ymm16{%k3} 40 vpconflictd -0x23(%rax,%rbx,4), %ymm17{%k4} 48 vpconflictd (%rax), %zmm6 49 vpconflictd 0x167(%rax), %zmm16{%k3} 50 vpconflictd -0x23(%rax,%rbx,4), %zmm17{%k4} 58 vpconflictq (%rax), %xmm6 [all …]
|
H A D | 64.ssse3.out | 2 libdis_test+0x4: 0f 38 1c 08 pabsb (%rax),%mm1 4 libdis_test+0xd: 66 0f 38 1c 08 pabsb (%rax),%xmm1 6 libdis_test+0x16: 0f 38 1e 08 pabsd (%rax),%mm1 8 libdis_test+0x1f: 66 0f 38 1e 08 pabsd (%rax),%xmm1 10 libdis_test+0x28: 0f 38 1d 08 pabsw (%rax),%mm1 12 libdis_test+0x31: 66 0f 38 1d 08 pabsw (%rax),%xmm1 14 libdis_test+0x3b: 0f 3a 0f 08 23 palignr $0x23,(%rax),%mm1 16 libdis_test+0x46: 66 0f 3a 0f 08 23 palignr $0x23,(%rax),%xmm1 18 libdis_test+0x50: 0f 38 02 08 phaddd (%rax),%mm1 20 libdis_test+0x59: 66 0f 38 02 08 phaddd (%rax),%xmm1 [all …]
|
H A D | 64.avx.s | 26 vaddpd (%rax), %xmm3, %xmm4 33 vaddps (%rax), %xmm3, %xmm4 40 vaddsd (%rax), %xmm3, %xmm4 44 vaddss (%rax), %xmm3, %xmm4 48 vaddsubpd (%rax), %xmm3, %xmm4 55 vaddsubps (%rax), %xmm3, %xmm4 62 vaesdec (%rax), %xmm3, %xmm4 66 vaesdeclast (%rax), %xmm3, %xmm4 70 vaesenc (%rax), %xmm3, %xmm4 74 vaesenclast (%rax), %xmm3, %xmm4 [all …]
|
H A D | 64.avx2.out | 1 libdis_test: c4 e2 7d 5a 00 vbroadcasti128 (%rax),%ymm0 5 libdis_test+0x17: c4 e3 65 38 20 42 vinserti128 $0x42,(%rax),%ymm3,%ymm4 6 libdis_test+0x1d: c4 e2 7d 2a 00 vmovntdqa (%rax),%ymm0 8 libdis_test+0x28: c4 e3 61 02 20 42 vpblendd $0x42,(%rax),%xmm3,%xmm4 10 libdis_test+0x34: c4 e3 65 02 20 42 vpblendd $0x42,(%rax),%ymm3,%ymm4 12 libdis_test+0x3f: c4 e2 79 78 10 vpbroadcastb (%rax),%xmm2 14 libdis_test+0x49: c4 e2 7d 78 10 vpbroadcastb (%rax),%ymm2 16 libdis_test+0x53: c4 e2 79 58 10 vpbroadcastd (%rax),%xmm2 18 libdis_test+0x5d: c4 e2 7d 58 10 vpbroadcastd (%rax),%ymm2 20 libdis_test+0x67: c4 e2 79 59 10 vpbroadcastq (%rax),%xmm2 [all …]
|
/illumos-gate/usr/src/common/bignum/amd64/ |
H A D | bignum_amd64_asm.S | 238 xorq %rax, %rax / if (len == 0) return (0) 248 movq 0(%rsi), %rax / rax = a[0] 252 addq %r10, %rax 255 addq %r9, %rax 257 movq %rax, 0(%rdi) / r[0] = lo(p) 260 movq %r11, %rax 263 addq %r10, %rax 266 addq %r9, %rax 268 movq %rax, 8(%rdi) / r[1] = lo(p) 271 movq %r11, %rax [all …]
|
/illumos-gate/usr/src/uts/intel/kdi/ |
H A D | kdi_asm.S | 52 movq %rbx, %rax; \ 57 addq (%rdx), %rax 70 movq %r11, KRS_IDT(%rax); \ 72 movq %r11, KRS_GDT(%rax); \ 89 orq %rax, %rdx; \ 94 orq %rax, %rdx; \ 102 movq %rdx, %rax; \ 119 movq %rax, REG_OFF(KDIREG_RAX)(base); \ 129 movq REG_OFF(KDIREG_RIP)(base), %rax; \ 130 movq %rax, REG_OFF(KDIREG_SAVPC)(base); \ [all …]
|
/illumos-gate/usr/src/uts/intel/ml/ |
H A D | swtch.S | 125 movq %rax, %r14; 140 movq T_INTR_START(thread_t), %rax; \ 148 movq %gs:CPU_THREAD, %rax 171 movq %rsi, T_USERACC(%rax) 198 SAVE_REGS(%rax, %r11) 238 movq CPU_IDLE_THREAD(%r15), %rax /* idle thread pointer */ 243 movq T_SP(%rax), %rsp /* It is safe to set rsp */ 244 movq %rax, CPU_THREAD(%r15) 311 leaq CPU_KPTI_TR_RSP(%r13), %rax 314 movq T_STACK(%r12), %rax [all …]
|
H A D | ddi_i86_asm.S | 42 xorq %rax, %rax 48 movzbq (%rsi), %rax 51 movq ACC_GETB(%rdi), %rax 52 INDIRECT_JMP_REG(rax) 65 xorq %rax, %rax 71 movzwq (%rsi), %rax 74 movq ACC_GETW(%rdi), %rax 75 INDIRECT_JMP_REG(rax) 96 movq ACC_GETL(%rdi), %rax 97 INDIRECT_JMP_REG(rax) [all …]
|
H A D | copy.S | 197 movq (src, cnt, 8), %rax; \ 199 movnti %rax, (dst, cnt, 8); \ 953 L(P0Q9): mov %rax, -0x48(%rdi) 954 L(P0Q8): mov %rax, -0x40(%rdi) 955 L(P0Q7): mov %rax, -0x38(%rdi) 956 L(P0Q6): mov %rax, -0x30(%rdi) 957 L(P0Q5): mov %rax, -0x28(%rdi) 958 L(P0Q4): mov %rax, -0x20(%rdi) 959 L(P0Q3): mov %rax, -0x18(%rdi) 960 L(P0Q2): mov %rax, -0x10(%rdi) [all …]
|
/illumos-gate/usr/src/boot/libsa/amd64/ |
H A D | _setjmp.S | 50 movq %rdi,%rax 52 movq %rdx, 0(%rax) /* 0; retval */ 53 movq %rbx, 8(%rax) /* 1; rbx */ 54 movq %rsp,16(%rax) /* 2; rsp */ 55 movq %rbp,24(%rax) /* 3; rbp */ 56 movq %r12,32(%rax) /* 4; r12 */ 57 movq %r13,40(%rax) /* 5; r13 */ 58 movq %r14,48(%rax) /* 6; r14 */ 59 movq %r15,56(%rax) /* 7; r15 */ 60 fnstcw 64(%rax) /* 8; fpu cw */ [all …]
|
/illumos-gate/usr/src/boot/efi/loader/arch/amd64/ |
H A D | exc.S | 37 pushq %rax 76 movq (6*8)(%rsp),%rax /* saved %rsp value, AKA old stack */ 77 subq (5*8),%rax 79 movq %rdx,(%rax) 81 movq %rdx,(1*8)(%rax) 83 movq %rdx,(2*8)(%rax) 85 movq %rdx,(3*8)(%rax) 87 movq %rdx,(4*8)(%rax) 94 subq $16,%rax 97 movq %rdx,8(%rax) [all …]
|
/illumos-gate/usr/src/lib/libc/amd64/unwind/ |
H A D | unwind_frame.S | 69 movq %rsi,%rax 70 movq 40(%rax),%rdi /* set handler parameters */ 71 movq 32(%rax),%rsi 72 movq 8(%rax),%rdx 73 movq 16(%rax),%rcx 74 movq 24(%rax),%rbx /* restore preserved registers */ 75 movq 96(%rax),%r12 76 movq 104(%rax),%r13 77 movq 112(%rax),%r14 78 movq 120(%rax),%r15 [all …]
|
/illumos-gate/usr/src/lib/commpage/amd64/ |
H A D | cp_subr.S | 61 orq %rdx, %rax 81 addq %rdx, %rax 153 orq %rdx, %rax 170 addq %rdx, %rax 234 movq CP_TSC_LAST(%rdi), %rax 236 movq %rax, (%rsp) 245 testq %rax, %rax 280 subq %rdx, %rax /* delta = tsc - tsc_last */ 293 shrdq $_CONST(32 - NSEC_SHIFT), %rdx, %rax 295 addq %r8, %rax [all …]
|
/illumos-gate/usr/src/lib/libc/amd64/gen/ |
H A D | memcmp.S | 103 mov (%rsi), %rax 104 cmp (%rdi), %rax 138 mov (%rsi), %rax 142 sub (%rdi), %rax 147 or %rax, %r8 213 mov (%rsi), %rax 215 sub (%rdi), %rax 217 or %r8, %rax 225 or %r9, %rax 228 mov 32 (%rsi), %rax [all …]
|