Home
last modified time | relevance | path

Searched refs:movb (Results 1 – 25 of 414) sorted by relevance

12345678910>>...17

/external/llvm/test/CodeGen/X86/
Dvector-compare-results.ll403 ; SSE2-NEXT: movb -{{[0-9]+}}(%rsp), %al
405 ; SSE2-NEXT: movb %al, 2(%rdi)
407 ; SSE2-NEXT: movb -{{[0-9]+}}(%rsp), %al
409 ; SSE2-NEXT: movb %al, (%rdi)
410 ; SSE2-NEXT: movb -{{[0-9]+}}(%rsp), %al
412 ; SSE2-NEXT: movb %al, 2(%rdi)
413 ; SSE2-NEXT: movb -{{[0-9]+}}(%rsp), %al
415 ; SSE2-NEXT: movb %al, (%rdi)
416 ; SSE2-NEXT: movb -{{[0-9]+}}(%rsp), %al
418 ; SSE2-NEXT: movb %al, 2(%rdi)
[all …]
Dno-sse2-avg.ll8 ; CHECK-NEXT: movb $0, 15(%rdi)
9 ; CHECK-NEXT: movb $0, 14(%rdi)
10 ; CHECK-NEXT: movb $0, 13(%rdi)
11 ; CHECK-NEXT: movb $0, 12(%rdi)
12 ; CHECK-NEXT: movb $0, 11(%rdi)
13 ; CHECK-NEXT: movb $0, 10(%rdi)
14 ; CHECK-NEXT: movb $0, 9(%rdi)
15 ; CHECK-NEXT: movb $0, 8(%rdi)
16 ; CHECK-NEXT: movb $0, 7(%rdi)
17 ; CHECK-NEXT: movb $0, 6(%rdi)
[all …]
Dh-register-store.ll3 ; X64-NEXT: movb %ah, (%rsi)
5 ; X64-NEXT: movb %ah, (%rsi)
7 ; X64-NEXT: movb %ah, (%rsi)
12 ; X32-NEXT: movb %ah, (%esi)
14 ; X32-NEXT: movb %ah, (%esi)
16 ; X32-NEXT: movb %ah, (%esi)
21 ; W64: movb %ch, (%rdx)
23 ; W64: movb %ch, (%rdx)
25 ; W64: movb %ch, (%rdx)
30 ; X86: movb %ah, (%e
[all …]
Datomic8.ll160 ; X64: movb
161 ; X64: movb
166 ; X32: movb
167 ; X32: movb
180 ; X64: movb
181 ; X64: movb
186 ; X32: movb
187 ; X32: movb
200 ; X64: movb
201 ; X64: movb
[all …]
Davx512vl-intrinsics-fast-isel.ll29 ; X32-NEXT: movb {{[0-9]+}}(%esp), %al
31 ; X32-NEXT: movb %al, (%esp)
41 ; X64-NEXT: movb %dil, -{{[0-9]+}}(%rsp)
62 ; X32-NEXT: movb {{[0-9]+}}(%esp), %al
64 ; X32-NEXT: movb %al, (%esp)
74 ; X64-NEXT: movb %dil, -{{[0-9]+}}(%rsp)
107 ; X32-NEXT: movb {{[0-9]+}}(%esp), %al
129 ; X32-NEXT: movb {{[0-9]+}}(%esp), %al
167 ; X32-NEXT: movb {{[0-9]+}}(%esp), %al
169 ; X32-NEXT: movb %al, {{[0-9]+}}(%esp)
[all …]
Dstore-narrow.ll18 ; X64: movb %sil, (%rdi)
21 ; X32: movb 8(%esp), %al
22 ; X32: movb %al, (%{{.*}})
35 ; X64: movb %sil, 1(%rdi)
38 ; X32: movb 8(%esp), %[[REG:[abcd]]]l
39 ; X32: movb %[[REG]]l, 1(%{{.*}})
105 ; X64: movb %sil, 5(%rdi)
109 ; X32: movb 8(%esp), %[[REG:[abcd]l]]
110 ; X32: movb %[[REG]], 5(%{{.*}})
124 ; X64: movb %sil, 5(%rdi)
[all …]
/external/swiftshader/third_party/llvm-7.0/llvm/test/CodeGen/X86/
Davx512-load-trunc-store-i1.ll8 ; AVX512-ALL-NEXT: movb (%rdi), %al
9 ; AVX512-ALL-NEXT: movb %al, (%rsi)
14 ; AVX512-ONLY-NEXT: movb (%rdi), %al
15 ; AVX512-ONLY-NEXT: movb %al, (%rsi)
25 ; AVX512-ALL-NEXT: movb (%rdi), %al
26 ; AVX512-ALL-NEXT: movb %al, (%rsi)
31 ; AVX512-ONLY-NEXT: movb (%rdi), %al
32 ; AVX512-ONLY-NEXT: movb %al, (%rsi)
42 ; AVX512-ALL-NEXT: movb (%rdi), %al
43 ; AVX512-ALL-NEXT: movb %al, (%rsi)
[all …]
Dunfold-masked-merge-vector-variablemask.ll111 ; CHECK-BASELINE-NEXT: movb {{[0-9]+}}(%rsp), %r10b
112 ; CHECK-BASELINE-NEXT: movb {{[0-9]+}}(%rsp), %r11b
113 ; CHECK-BASELINE-NEXT: movb {{[0-9]+}}(%rsp), %al
114 ; CHECK-BASELINE-NEXT: movb {{[0-9]+}}(%rsp), %bl
131 ; CHECK-BASELINE-NEXT: movb %bl, 3(%rdi)
132 ; CHECK-BASELINE-NEXT: movb %al, 2(%rdi)
133 ; CHECK-BASELINE-NEXT: movb %r11b, 1(%rdi)
134 ; CHECK-BASELINE-NEXT: movb %r10b, (%rdi)
142 ; CHECK-SSE1-NEXT: movb {{[0-9]+}}(%rsp), %r10b
143 ; CHECK-SSE1-NEXT: movb {{[0-9]+}}(%rsp), %r11b
[all …]
Dh-register-store.ll3 ; X64-NEXT: movb %ah, (%rsi)
5 ; X64-NEXT: movb %ah, (%rsi)
7 ; X64-NEXT: movb %ah, (%rsi)
12 ; X32-NEXT: movb %ah, (%esi)
14 ; X32-NEXT: movb %ah, (%esi)
16 ; X32-NEXT: movb %ah, (%esi)
21 ; W64: movb %ch, (%rdx)
23 ; W64: movb %ch, (%rdx)
25 ; W64: movb %ch, (%rdx)
30 ; X86: movb %ah, (%e
[all …]
Davoid-sfb-overlaps.ll26 ; CHECK-NEXT: movb $0, -1(%rdi)
33 ; CHECK-NEXT: movb -2(%rdi), %al
34 ; CHECK-NEXT: movb %al, 30(%rdi)
35 ; CHECK-NEXT: movb -1(%rdi), %al
36 ; CHECK-NEXT: movb %al, 31(%rdi)
47 ; DISABLED-NEXT: movb $0, -1(%rdi)
64 ; CHECK-AVX2-NEXT: movb $0, -1(%rdi)
71 ; CHECK-AVX2-NEXT: movb -2(%rdi), %al
72 ; CHECK-AVX2-NEXT: movb %al, 30(%rdi)
73 ; CHECK-AVX2-NEXT: movb -1(%rdi), %al
[all …]
Datomic8.ll160 ; X64: movb
161 ; X64: movb
166 ; X32: movb
167 ; X32: movb
180 ; X64: movb
181 ; X64: movb
186 ; X32: movb
187 ; X32: movb
200 ; X64: movb
201 ; X64: movb
[all …]
Dcmov-promotion.ll9 ; CMOV-NEXT: movb $117, %al
12 ; CMOV-NEXT: movb $-19, %al
21 ; NO_CMOV-NEXT: movb $117, %al
24 ; NO_CMOV-NEXT: movb $-19, %al
38 ; CMOV-NEXT: movb $126, %al
41 ; CMOV-NEXT: movb $-1, %al
49 ; NO_CMOV-NEXT: movb $126, %al
52 ; NO_CMOV-NEXT: movb $-1, %al
65 ; CMOV-NEXT: movb $126, %al
68 ; CMOV-NEXT: movb $-1, %al
[all …]
Dpr32256.ll13 ; CHECK-NEXT: movb %al, %cl
14 ; CHECK-NEXT: movb c, %dl
17 ; CHECK-NEXT: movb %cl, (%esp) # 1-byte Spill
22 ; CHECK-NEXT: movb %al, %cl
23 ; CHECK-NEXT: movb %cl, (%esp) # 1-byte Spill
26 ; CHECK-NEXT: movb (%esp), %al # 1-byte Reload
28 ; CHECK-NEXT: movb %al, {{[0-9]+}}(%esp)
Dpr32241.ll14 ; CHECK-NEXT: movb $1, %cl
16 ; CHECK-NEXT: movb %cl, {{[0-9]+}}(%esp) # 1-byte Spill
20 ; CHECK-NEXT: movb %al, %cl
21 ; CHECK-NEXT: movb %cl, {{[0-9]+}}(%esp) # 1-byte Spill
24 ; CHECK-NEXT: movb {{[0-9]+}}(%esp), %al # 1-byte Reload
34 ; CHECK-NEXT: movb $1, %al
35 ; CHECK-NEXT: movb %al, {{[0-9]+}}(%esp) # 1-byte Spill
39 ; CHECK-NEXT: movb %al, %cl
40 ; CHECK-NEXT: movb %cl, {{[0-9]+}}(%esp) # 1-byte Spill
43 ; CHECK-NEXT: movb {{[0-9]+}}(%esp), %al # 1-byte Reload
Dstore-narrow.ll17 ; X64: movb %sil, (%rdi)
20 ; X32: movb 8(%esp), %al
21 ; X32: movb %al, (%{{.*}})
34 ; X64: movb %sil, 1(%rdi)
37 ; X32: movb 8(%esp), %[[REG:[abcd]]]l
38 ; X32: movb %[[REG]]l, 1(%{{.*}})
104 ; X64: movb %sil, 5(%rdi)
108 ; X32: movb 8(%esp), %[[REG:[abcd]l]]
109 ; X32: movb %[[REG]], 5(%{{.*}})
123 ; X64: movb %sil, 5(%rdi)
[all …]
/external/llvm/test/MC/MachO/
Ddarwin-x86_64-reloc-offsets.s23 movb $0x12, _d(%rip)
26 movb $0x12, _d + 1(%rip)
43 movb %al, _d(%rip)
44 movb %al, _d + 1(%rip)
60 movb $0x12, L0(%rip)
63 movb $0x12, L0 + 1(%rip)
80 movb %al, L0(%rip)
81 movb %al, L0 + 1(%rip)
89 movb $0x12, L1(%rip)
92 movb $0x12, L1 + 1(%rip)
[all …]
/external/swiftshader/third_party/llvm-7.0/llvm/test/MC/MachO/
Ddarwin-x86_64-reloc-offsets.s23 movb $0x12, _d(%rip)
26 movb $0x12, _d + 1(%rip)
43 movb %al, _d(%rip)
44 movb %al, _d + 1(%rip)
60 movb $0x12, L0(%rip)
63 movb $0x12, L0 + 1(%rip)
80 movb %al, L0(%rip)
81 movb %al, L0 + 1(%rip)
89 movb $0x12, L1(%rip)
92 movb $0x12, L1 + 1(%rip)
[all …]
/external/swiftshader/third_party/LLVM/test/MC/MachO/
Ddarwin-x86_64-reloc-offsets.s23 movb $0x12, _d(%rip)
26 movb $0x12, _d + 1(%rip)
43 movb %al, _d(%rip)
44 movb %al, _d + 1(%rip)
60 movb $0x12, L0(%rip)
63 movb $0x12, L0 + 1(%rip)
80 movb %al, L0(%rip)
81 movb %al, L0 + 1(%rip)
89 movb $0x12, L1(%rip)
92 movb $0x12, L1 + 1(%rip)
[all …]
/external/swiftshader/third_party/llvm-7.0/llvm/test/MC/X86/
Daddress-size.s4 movb $0x0, (%esi)
6 movb $0x0, (%rsi)
10 movb $0x0, (%si)
12 movb $0x0, (%esi)
22 movb $0x0, (%si)
24 movb $0x0, (%esi)
26 movb $0x5a, (%bp,%di,1)
28 movb $0x5a, (%bp,%si,1)
/external/llvm/test/MC/X86/
Daddress-size.s4 movb $0x0, (%esi)
6 movb $0x0, (%rsi)
10 movb $0x0, (%si)
12 movb $0x0, (%esi)
22 movb $0x0, (%si)
24 movb $0x0, (%esi)
26 movb $0x5a, (%di,%bp,1)
/external/swiftshader/third_party/LLVM/test/CodeGen/X86/
Dh-register-store.ll3 ; X64-NEXT: movb %ah, (%rsi)
5 ; X64-NEXT: movb %ah, (%rsi)
7 ; X64-NEXT: movb %ah, (%rsi)
12 ; W64: movb %ch, (%rdx)
14 ; W64: movb %ch, (%rdx)
16 ; W64: movb %ch, (%rdx)
21 ; X32: movb %ah, (%e
23 ; X32: movb %ah, (%e
25 ; X32: movb %ah, (%e
Dstore-narrow.ll17 ; X64: movb %sil, (%rdi)
20 ; X32: movb 8(%esp), %al
21 ; X32: movb %al, (%{{.*}})
34 ; X64: movb %sil, 1(%rdi)
37 ; X32: movb 8(%esp), %al
38 ; X32: movb %al, 1(%{{.*}})
101 ; X64: movb %sil, 5(%rdi)
105 ; X32: movb 8(%esp), %al
106 ; X32: movb %al, 5(%{{.*}})
120 ; X64: movb %sil, 5(%rdi)
[all …]
/external/zlib/src/contrib/inflate86/
Dinffast.S342 movb (in_r), %al
449 movb bits_r, %cl /* cl = bits, needs it for shifting */
467 movb %ah, %cl /* cl = this.bits */
502 movb %al, %cl
511 movb %cl, %ch /* stash op in ch, freeing cl */
514 movb bits_r, %cl /* cl = bits, needs it for shifting */
518 movb %ch, %cl /* move op back to ecx */
552 movb bits_r, %cl /* cl = bits, needs it for shifting */
567 movb %ah, %cl
582 movb %al, %cl /* cl = this.op */
[all …]
/external/capstone/suite/MC/X86/
Daddress-size.s.cs2 0x67,0xc6,0x06,0x00 = movb $0x0, (%esi)
3 0xc6,0x06,0x00 = movb $0x0, (%rsi)
4 0x67,0xc6,0x06,0x00 = movb $0x0, (%si)
5 0xc6,0x06,0x00 = movb $0x0, (%esi)
/external/boringssl/mac-x86/crypto/fipsmodule/
Dghash-x86.S27 movb %dl,%cl
42 movb (%edi,%ebp,1),%cl
139 movb %dl,(%esp)
149 movb %dl,1(%esp)
163 movb %dl,2(%esp)
177 movb %dl,3(%esp)
191 movb %dl,4(%esp)
205 movb %dl,5(%esp)
219 movb %dl,6(%esp)
233 movb %dl,7(%esp)
[all …]

12345678910>>...17