/toolchain/binutils/binutils-2.25/gas/testsuite/gas/i386/ |
D | hle.s | 8 xacquire lock adcb $100,(%ecx) 9 lock xacquire adcb $100,(%ecx) 10 xrelease lock adcb $100,(%ecx) 11 lock xrelease adcb $100,(%ecx) 12 .byte 0xf0; .byte 0xf2; adcb $100,(%ecx) 13 .byte 0xf0; .byte 0xf3; adcb $100,(%ecx) 14 xacquire lock addb $100,(%ecx) 15 lock xacquire addb $100,(%ecx) 16 xrelease lock addb $100,(%ecx) 17 lock xrelease addb $100,(%ecx) [all …]
|
D | sse2avx.s | 7 ldmxcsr (%ecx) 8 stmxcsr (%ecx) 12 cvtdq2ps (%ecx),%xmm4 14 cvtpd2dq (%ecx),%xmm4 16 cvtpd2ps (%ecx),%xmm4 18 cvtps2dq (%ecx),%xmm4 20 cvttpd2dq (%ecx),%xmm4 22 cvttps2dq (%ecx),%xmm4 24 movapd (%ecx),%xmm4 26 movaps (%ecx),%xmm4 [all …]
|
D | avx.s | 11 vldmxcsr (%ecx) 12 vstmxcsr (%ecx) 16 vmaskmovpd (%ecx),%ymm4,%ymm6 17 vmaskmovpd %ymm4,%ymm6,(%ecx) 18 vmaskmovps (%ecx),%ymm4,%ymm6 19 vmaskmovps %ymm4,%ymm6,(%ecx) 23 vpermilpd $7,(%ecx),%ymm6 25 vpermilps $7,(%ecx),%ymm6 27 vroundpd $7,(%ecx),%ymm6 29 vroundps $7,(%ecx),%ymm6 [all …]
|
D | fma.s | 9 vfmadd132pd (%ecx),%ymm6,%ymm2 11 vfmadd132ps (%ecx),%ymm6,%ymm2 13 vfmadd213pd (%ecx),%ymm6,%ymm2 15 vfmadd213ps (%ecx),%ymm6,%ymm2 17 vfmadd231pd (%ecx),%ymm6,%ymm2 19 vfmadd231ps (%ecx),%ymm6,%ymm2 21 vfmaddsub132pd (%ecx),%ymm6,%ymm2 23 vfmaddsub132ps (%ecx),%ymm6,%ymm2 25 vfmaddsub213pd (%ecx),%ymm6,%ymm2 27 vfmaddsub213ps (%ecx),%ymm6,%ymm2 [all …]
|
D | avx256int.s | 8 vpmovmskb %ymm4,%ecx 24 vpshufd $7,(%ecx),%ymm6 26 vpshufhw $7,(%ecx),%ymm6 28 vpshuflw $7,(%ecx),%ymm6 32 vpackssdw (%ecx),%ymm6,%ymm2 34 vpacksswb (%ecx),%ymm6,%ymm2 36 vpackusdw (%ecx),%ymm6,%ymm2 38 vpackuswb (%ecx),%ymm6,%ymm2 40 vpaddb (%ecx),%ymm6,%ymm2 42 vpaddw (%ecx),%ymm6,%ymm2 [all …]
|
D | avx-scalar.s | 9 vcomisd (%ecx),%xmm4 11 vucomisd (%ecx),%xmm4 14 vmovsd (%ecx),%xmm4 17 vmovsd %xmm4,(%ecx) 21 vmovq %xmm4,(%ecx) 22 vmovq (%ecx),%xmm4 25 vcvtsd2si %xmm4,%ecx 26 vcvtsd2si (%ecx),%ecx 27 vcvttsd2si %xmm4,%ecx 28 vcvttsd2si (%ecx),%ecx [all …]
|
D | hle.d | 10 [ ]*[a-f0-9]+: f2 f0 80 11 64 xacquire lock adcb \$0x64,\(%ecx\) 11 [ ]*[a-f0-9]+: f2 f0 80 11 64 xacquire lock adcb \$0x64,\(%ecx\) 12 [ ]*[a-f0-9]+: f3 f0 80 11 64 xrelease lock adcb \$0x64,\(%ecx\) 13 [ ]*[a-f0-9]+: f3 f0 80 11 64 xrelease lock adcb \$0x64,\(%ecx\) 14 [ ]*[a-f0-9]+: f0 f2 80 11 64 lock xacquire adcb \$0x64,\(%ecx\) 15 [ ]*[a-f0-9]+: f0 f3 80 11 64 lock xrelease adcb \$0x64,\(%ecx\) 16 [ ]*[a-f0-9]+: f2 f0 80 01 64 xacquire lock addb \$0x64,\(%ecx\) 17 [ ]*[a-f0-9]+: f2 f0 80 01 64 xacquire lock addb \$0x64,\(%ecx\) 18 [ ]*[a-f0-9]+: f3 f0 80 01 64 xrelease lock addb \$0x64,\(%ecx\) 19 [ ]*[a-f0-9]+: f3 f0 80 01 64 xrelease lock addb \$0x64,\(%ecx\) [all …]
|
D | sse4_1.s | 5 blendpd $0,(%ecx),%xmm0 7 blendps $0,(%ecx),%xmm0 9 blendvpd %xmm0,(%ecx),%xmm0 11 blendvpd (%ecx),%xmm0 13 blendvps %xmm0,(%ecx),%xmm0 15 blendvps (%ecx),%xmm0 17 dppd $0,(%ecx),%xmm0 19 dpps $0,(%ecx),%xmm0 21 extractps $0,%xmm0,%ecx 22 extractps $0,%xmm0,(%ecx) [all …]
|
D | inval-reg.s | 6 imul %bx,%ecx 7 imul $10,%bx,%ecx 8 imul $0x200,%bx,%ecx 10 shld $0x90, %bx,%ecx 11 shld %cl, %bx,%ecx 12 shld %bx,%ecx 14 shrd $0x90, %bx,%ecx 15 shrd %cl, %bx,%ecx 16 shrd %bx,%ecx 18 bsf %bx,%ecx [all …]
|
D | hle-intel.d | 11 [ ]*[a-f0-9]+: f2 f0 80 11 64 xacquire lock adc BYTE PTR \[ecx\],0x64 12 [ ]*[a-f0-9]+: f2 f0 80 11 64 xacquire lock adc BYTE PTR \[ecx\],0x64 13 [ ]*[a-f0-9]+: f3 f0 80 11 64 xrelease lock adc BYTE PTR \[ecx\],0x64 14 [ ]*[a-f0-9]+: f3 f0 80 11 64 xrelease lock adc BYTE PTR \[ecx\],0x64 15 [ ]*[a-f0-9]+: f0 f2 80 11 64 lock xacquire adc BYTE PTR \[ecx\],0x64 16 [ ]*[a-f0-9]+: f0 f3 80 11 64 lock xrelease adc BYTE PTR \[ecx\],0x64 17 [ ]*[a-f0-9]+: f2 f0 80 01 64 xacquire lock add BYTE PTR \[ecx\],0x64 18 [ ]*[a-f0-9]+: f2 f0 80 01 64 xacquire lock add BYTE PTR \[ecx\],0x64 19 [ ]*[a-f0-9]+: f3 f0 80 01 64 xrelease lock add BYTE PTR \[ecx\],0x64 20 [ ]*[a-f0-9]+: f3 f0 80 01 64 xrelease lock add BYTE PTR \[ecx\],0x64 [all …]
|
D | avx2.s | 9 vpmaskmovd (%ecx),%ymm4,%ymm6 10 vpmaskmovd %ymm4,%ymm6,(%ecx) 11 vpmaskmovq (%ecx),%ymm4,%ymm6 12 vpmaskmovq %ymm4,%ymm6,(%ecx) 16 vpermpd $7,(%ecx),%ymm6 18 vpermq $7,(%ecx),%ymm6 22 vpermd (%ecx),%ymm6,%ymm2 24 vpermps (%ecx),%ymm6,%ymm2 26 vpsllvd (%ecx),%ymm6,%ymm2 28 vpsllvq (%ecx),%ymm6,%ymm2 [all …]
|
D | inval-reg.l | 59 [ ]*6[ ]+imul %bx,%ecx 60 [ ]*7[ ]+imul \$10,%bx,%ecx 61 [ ]*8[ ]+imul \$0x200,%bx,%ecx 63 [ ]*10[ ]+shld \$0x90, %bx,%ecx 64 [ ]*11[ ]+shld %cl, %bx,%ecx 65 [ ]*12[ ]+shld %bx,%ecx 67 [ ]*14[ ]+shrd \$0x90, %bx,%ecx 68 [ ]*15[ ]+shrd %cl, %bx,%ecx 69 [ ]*16[ ]+shrd %bx,%ecx 71 [ ]*18[ ]+bsf %bx,%ecx [all …]
|
D | hlebad.s | 38 xacquire adcb $100,(%ecx) 39 xrelease adcb $100,(%ecx) 48 xacquire adcw $1000,(%ecx) 49 xrelease adcw $1000,(%ecx) 52 xacquire adcl $10000000,%ecx 53 xacquire lock adcl $10000000,%ecx 54 lock xacquire adcl $10000000,%ecx 55 xrelease adcl $10000000,%ecx 56 xrelease lock adcl $10000000,%ecx 57 lock xrelease adcl $10000000,%ecx [all …]
|
D | fma-scalar.s | 9 vfmadd132sd (%ecx),%xmm6,%xmm2 11 vfmadd213sd (%ecx),%xmm6,%xmm2 13 vfmadd231sd (%ecx),%xmm6,%xmm2 15 vfmsub132sd (%ecx),%xmm6,%xmm2 17 vfmsub213sd (%ecx),%xmm6,%xmm2 19 vfmsub231sd (%ecx),%xmm6,%xmm2 21 vfnmadd132sd (%ecx),%xmm6,%xmm2 23 vfnmadd213sd (%ecx),%xmm6,%xmm2 25 vfnmadd231sd (%ecx),%xmm6,%xmm2 27 vfnmsub132sd (%ecx),%xmm6,%xmm2 [all …]
|
D | fma4.s | 8 vfmaddpd (%ecx),%ymm6,%ymm2,%ymm7 10 vfmaddps (%ecx),%ymm6,%ymm2,%ymm7 12 vfmaddps %xmm7,0x80(%ecx,%eax,4),%xmm6,%xmm1 14 vfmaddsubpd (%ecx),%ymm6,%ymm2,%ymm7 16 vfmaddsubps (%ecx),%ymm6,%ymm2,%ymm7 18 vfmsubaddpd (%ecx),%ymm6,%ymm2,%ymm7 20 vfmsubaddps (%ecx),%ymm6,%ymm2,%ymm7 22 vfmsubpd (%ecx),%ymm6,%ymm2,%ymm7 24 vfmsubps (%ecx),%ymm6,%ymm2,%ymm7 26 vfmaddpd (%ecx),%xmm6,%xmm2,%xmm7 [all …]
|
D | hlebad.l | 394 [ ]*38[ ]+xacquire adcb \$100,\(%ecx\) 395 [ ]*39[ ]+xrelease adcb \$100,\(%ecx\) 404 [ ]*48[ ]+xacquire adcw \$1000,\(%ecx\) 405 [ ]*49[ ]+xrelease adcw \$1000,\(%ecx\) 408 [ ]*52[ ]+xacquire adcl \$10000000,%ecx 409 [ ]*53[ ]+xacquire lock adcl \$10000000,%ecx 410 [ ]*54[ ]+lock xacquire adcl \$10000000,%ecx 411 [ ]*55[ ]+xrelease adcl \$10000000,%ecx 412 [ ]*56[ ]+xrelease lock adcl \$10000000,%ecx 413 [ ]*57[ ]+lock xrelease adcl \$10000000,%ecx [all …]
|
D | sse4_1.d | 9 [ ]*[0-9a-f]+: 66 0f 3a 0d 01 00 blendpd \$0x0,\(%ecx\),%xmm0 11 [ ]*[0-9a-f]+: 66 0f 3a 0c 01 00 blendps \$0x0,\(%ecx\),%xmm0 13 [ ]*[0-9a-f]+: 66 0f 38 15 01 blendvpd (%xmm0,)?\(%ecx\),%xmm0 15 [ ]*[0-9a-f]+: 66 0f 38 15 01 blendvpd (%xmm0,)?\(%ecx\),%xmm0 17 [ ]*[0-9a-f]+: 66 0f 38 14 01 blendvps (%xmm0,)?\(%ecx\),%xmm0 19 [ ]*[0-9a-f]+: 66 0f 38 14 01 blendvps (%xmm0,)?\(%ecx\),%xmm0 21 [ ]*[0-9a-f]+: 66 0f 3a 41 01 00 dppd \$0x0,\(%ecx\),%xmm0 23 [ ]*[0-9a-f]+: 66 0f 3a 40 01 00 dpps \$0x0,\(%ecx\),%xmm0 25 [ ]*[0-9a-f]+: 66 0f 3a 17 c1 00 extractps \$0x0,%xmm0,%ecx 26 [ ]*[0-9a-f]+: 66 0f 3a 17 01 00 extractps \$0x0,%xmm0,\(%ecx\) [all …]
|
D | sse2avx.d | 10 [ ]*[a-f0-9]+: c5 f8 ae 11 vldmxcsr \(%ecx\) 11 [ ]*[a-f0-9]+: c5 f8 ae 19 vstmxcsr \(%ecx\) 13 [ ]*[a-f0-9]+: c5 f8 5b 21 vcvtdq2ps \(%ecx\),%xmm4 15 [ ]*[a-f0-9]+: c5 fb e6 21 vcvtpd2dqx \(%ecx\),%xmm4 17 [ ]*[a-f0-9]+: c5 f9 5a 21 vcvtpd2psx \(%ecx\),%xmm4 19 [ ]*[a-f0-9]+: c5 f9 5b 21 vcvtps2dq \(%ecx\),%xmm4 21 [ ]*[a-f0-9]+: c5 f9 e6 21 vcvttpd2dqx \(%ecx\),%xmm4 23 [ ]*[a-f0-9]+: c5 fa 5b 21 vcvttps2dq \(%ecx\),%xmm4 25 [ ]*[a-f0-9]+: c5 f9 28 21 vmovapd \(%ecx\),%xmm4 27 [ ]*[a-f0-9]+: c5 f8 28 21 vmovaps \(%ecx\),%xmm4 [all …]
|
D | avx.d | 11 [ ]*[a-f0-9]+: c5 f8 ae 11 vldmxcsr \(%ecx\) 12 [ ]*[a-f0-9]+: c5 f8 ae 19 vstmxcsr \(%ecx\) 13 [ ]*[a-f0-9]+: c4 e2 5d 2d 31 vmaskmovpd \(%ecx\),%ymm4,%ymm6 14 [ ]*[a-f0-9]+: c4 e2 4d 2f 21 vmaskmovpd %ymm4,%ymm6,\(%ecx\) 15 [ ]*[a-f0-9]+: c4 e2 5d 2c 31 vmaskmovps \(%ecx\),%ymm4,%ymm6 16 [ ]*[a-f0-9]+: c4 e2 4d 2e 21 vmaskmovps %ymm4,%ymm6,\(%ecx\) 18 [ ]*[a-f0-9]+: c4 e3 7d 05 31 07 vpermilpd \$0x7,\(%ecx\),%ymm6 20 [ ]*[a-f0-9]+: c4 e3 7d 04 31 07 vpermilps \$0x7,\(%ecx\),%ymm6 22 [ ]*[a-f0-9]+: c4 e3 7d 09 31 07 vroundpd \$0x7,\(%ecx\),%ymm6 24 [ ]*[a-f0-9]+: c4 e3 7d 08 31 07 vroundps \$0x7,\(%ecx\),%ymm6 [all …]
|
D | mpx.s | 9 bndmk (%eax,%ecx), %bnd1 10 bndmk (,%ecx,1), %bnd1 11 bndmk 0x3(%ecx,%eax,1), %bnd1 19 bndmov 0x3(%ecx,%eax,1), %bnd1 27 bndmov %bnd1, 0x3(%ecx,%eax,1) 31 bndcl (%ecx), %bnd1 32 bndcl %ecx, %bnd1 35 bndcl (%eax,%ecx), %bnd1 36 bndcl (,%ecx,1), %bnd1 37 bndcl 0x3(%ecx,%eax,1), %bnd1 [all …]
|
D | avx256int.d | 10 [ ]*[a-f0-9]+: c5 fd d7 cc vpmovmskb %ymm4,%ecx 22 [ ]*[a-f0-9]+: c5 fd 70 31 07 vpshufd \$0x7,\(%ecx\),%ymm6 24 [ ]*[a-f0-9]+: c5 fe 70 31 07 vpshufhw \$0x7,\(%ecx\),%ymm6 26 [ ]*[a-f0-9]+: c5 ff 70 31 07 vpshuflw \$0x7,\(%ecx\),%ymm6 28 [ ]*[a-f0-9]+: c5 cd 6b 11 vpackssdw \(%ecx\),%ymm6,%ymm2 30 [ ]*[a-f0-9]+: c5 cd 63 11 vpacksswb \(%ecx\),%ymm6,%ymm2 32 [ ]*[a-f0-9]+: c4 e2 4d 2b 11 vpackusdw \(%ecx\),%ymm6,%ymm2 34 [ ]*[a-f0-9]+: c5 cd 67 11 vpackuswb \(%ecx\),%ymm6,%ymm2 36 [ ]*[a-f0-9]+: c5 cd fc 11 vpaddb \(%ecx\),%ymm6,%ymm2 38 [ ]*[a-f0-9]+: c5 cd fd 11 vpaddw \(%ecx\),%ymm6,%ymm2 [all …]
|
D | avx2.d | 10 [ ]*[a-f0-9]+: c4 e2 5d 8c 31 vpmaskmovd \(%ecx\),%ymm4,%ymm6 11 [ ]*[a-f0-9]+: c4 e2 4d 8e 21 vpmaskmovd %ymm4,%ymm6,\(%ecx\) 12 [ ]*[a-f0-9]+: c4 e2 dd 8c 31 vpmaskmovq \(%ecx\),%ymm4,%ymm6 13 [ ]*[a-f0-9]+: c4 e2 cd 8e 21 vpmaskmovq %ymm4,%ymm6,\(%ecx\) 15 [ ]*[a-f0-9]+: c4 e3 fd 01 31 07 vpermpd \$0x7,\(%ecx\),%ymm6 17 [ ]*[a-f0-9]+: c4 e3 fd 00 31 07 vpermq \$0x7,\(%ecx\),%ymm6 19 [ ]*[a-f0-9]+: c4 e2 4d 36 11 vpermd \(%ecx\),%ymm6,%ymm2 21 [ ]*[a-f0-9]+: c4 e2 4d 16 11 vpermps \(%ecx\),%ymm6,%ymm2 23 [ ]*[a-f0-9]+: c4 e2 4d 47 11 vpsllvd \(%ecx\),%ymm6,%ymm2 25 [ ]*[a-f0-9]+: c4 e2 cd 47 11 vpsllvq \(%ecx\),%ymm6,%ymm2 [all …]
|
D | avx-scalar.d | 12 [ ]*[a-f0-9]+: c5 fd 2f 21 vcomisd \(%ecx\),%xmm4 14 [ ]*[a-f0-9]+: c5 fd 2e 21 vucomisd \(%ecx\),%xmm4 15 [ ]*[a-f0-9]+: c5 ff 10 21 vmovsd \(%ecx\),%xmm4 16 [ ]*[a-f0-9]+: c5 ff 11 21 vmovsd %xmm4,\(%ecx\) 17 [ ]*[a-f0-9]+: c5 fd d6 21 vmovq %xmm4,\(%ecx\) 18 [ ]*[a-f0-9]+: c5 fe 7e 21 vmovq \(%ecx\),%xmm4 19 [ ]*[a-f0-9]+: c5 ff 2d cc vcvtsd2si %xmm4,%ecx 20 [ ]*[a-f0-9]+: c5 ff 2d 09 vcvtsd2si \(%ecx\),%ecx 21 [ ]*[a-f0-9]+: c5 ff 2c cc vcvttsd2si %xmm4,%ecx 22 [ ]*[a-f0-9]+: c5 ff 2c 09 vcvttsd2si \(%ecx\),%ecx [all …]
|
D | ssse3.s | 5 phaddw (%ecx),%mm0 7 phaddw (%ecx),%xmm0 9 phaddd (%ecx),%mm0 11 phaddd (%ecx),%xmm0 13 phaddsw (%ecx),%mm0 15 phaddsw (%ecx),%xmm0 17 phsubw (%ecx),%mm0 19 phsubw (%ecx),%xmm0 21 phsubd (%ecx),%mm0 23 phsubd (%ecx),%xmm0 [all …]
|
D | sse4_2.s | 7 crc32 %ecx,%ebx 8 crc32b (%ecx),%ebx 9 crc32w (%ecx),%ebx 10 crc32l (%ecx),%ebx 13 crc32l %ecx,%ebx 14 pcmpgtq (%ecx),%xmm0 16 pcmpestri $0x0,(%ecx),%xmm0 18 pcmpestrm $0x1,(%ecx),%xmm0 20 pcmpistri $0x2,(%ecx),%xmm0 22 pcmpistrm $0x3,(%ecx),%xmm0 [all …]
|