• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=i386-unknown | FileCheck %s --check-prefix=X32
3; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s --check-prefix=X64
4
5target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
6
7define void @test(i256* %a, i256* %b, i256* %out) #0 {
8; X32-LABEL: test:
9; X32:       # %bb.0: # %entry
10; X32-NEXT:    pushl %ebp
11; X32-NEXT:    .cfi_def_cfa_offset 8
12; X32-NEXT:    pushl %ebx
13; X32-NEXT:    .cfi_def_cfa_offset 12
14; X32-NEXT:    pushl %edi
15; X32-NEXT:    .cfi_def_cfa_offset 16
16; X32-NEXT:    pushl %esi
17; X32-NEXT:    .cfi_def_cfa_offset 20
18; X32-NEXT:    subl $88, %esp
19; X32-NEXT:    .cfi_def_cfa_offset 108
20; X32-NEXT:    .cfi_offset %esi, -20
21; X32-NEXT:    .cfi_offset %edi, -16
22; X32-NEXT:    .cfi_offset %ebx, -12
23; X32-NEXT:    .cfi_offset %ebp, -8
24; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
25; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
26; X32-NEXT:    movl 12(%ecx), %ebp
27; X32-NEXT:    movl 8(%ecx), %edi
28; X32-NEXT:    movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill
29; X32-NEXT:    movl (%eax), %ebx
30; X32-NEXT:    movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill
31; X32-NEXT:    movl %edi, %eax
32; X32-NEXT:    mull %ebx
33; X32-NEXT:    movl %edx, %ecx
34; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
35; X32-NEXT:    movl %ebp, %eax
36; X32-NEXT:    movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill
37; X32-NEXT:    mull %ebx
38; X32-NEXT:    movl %edx, %esi
39; X32-NEXT:    movl %eax, %ebx
40; X32-NEXT:    addl %ecx, %ebx
41; X32-NEXT:    adcl $0, %esi
42; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
43; X32-NEXT:    movl 4(%eax), %ecx
44; X32-NEXT:    movl %edi, %eax
45; X32-NEXT:    mull %ecx
46; X32-NEXT:    movl %ecx, %edi
47; X32-NEXT:    movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill
48; X32-NEXT:    movl %edx, %ecx
49; X32-NEXT:    addl %ebx, %eax
50; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
51; X32-NEXT:    adcl %esi, %ecx
52; X32-NEXT:    setb {{[0-9]+}}(%esp) # 1-byte Folded Spill
53; X32-NEXT:    movl %ebp, %eax
54; X32-NEXT:    mull %edi
55; X32-NEXT:    movl %edx, %ebx
56; X32-NEXT:    movl %eax, %edi
57; X32-NEXT:    addl %ecx, %edi
58; X32-NEXT:    movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload
59; X32-NEXT:    adcl %eax, %ebx
60; X32-NEXT:    xorl %edx, %edx
61; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload
62; X32-NEXT:    movl %ecx, %eax
63; X32-NEXT:    mull %edx
64; X32-NEXT:    movl %edx, %ebp
65; X32-NEXT:    movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill
66; X32-NEXT:    movl %eax, %esi
67; X32-NEXT:    movl %eax, (%esp) # 4-byte Spill
68; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax # 4-byte Reload
69; X32-NEXT:    xorl %edx, %edx
70; X32-NEXT:    mull %edx
71; X32-NEXT:    movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill
72; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
73; X32-NEXT:    addl %esi, %eax
74; X32-NEXT:    adcl %ebp, %edx
75; X32-NEXT:    addl %edi, %eax
76; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
77; X32-NEXT:    adcl %ebx, %edx
78; X32-NEXT:    movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill
79; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi
80; X32-NEXT:    movl (%esi), %ebp
81; X32-NEXT:    movl %ebp, %eax
82; X32-NEXT:    movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill
83; X32-NEXT:    mull %ecx
84; X32-NEXT:    movl %ecx, %edi
85; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
86; X32-NEXT:    movl %edx, %ecx
87; X32-NEXT:    movl 4(%esi), %esi
88; X32-NEXT:    movl %esi, %eax
89; X32-NEXT:    movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill
90; X32-NEXT:    mull %edi
91; X32-NEXT:    movl %edx, %edi
92; X32-NEXT:    movl %eax, %ebx
93; X32-NEXT:    addl %ecx, %ebx
94; X32-NEXT:    adcl $0, %edi
95; X32-NEXT:    movl %ebp, %eax
96; X32-NEXT:    movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload
97; X32-NEXT:    mull %ebp
98; X32-NEXT:    movl %edx, %ecx
99; X32-NEXT:    addl %ebx, %eax
100; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
101; X32-NEXT:    adcl %edi, %ecx
102; X32-NEXT:    setb %bl
103; X32-NEXT:    movl %esi, %eax
104; X32-NEXT:    mull %ebp
105; X32-NEXT:    movl %edx, %edi
106; X32-NEXT:    movl %eax, %ebp
107; X32-NEXT:    addl %ecx, %ebp
108; X32-NEXT:    movzbl %bl, %eax
109; X32-NEXT:    adcl %eax, %edi
110; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi # 4-byte Reload
111; X32-NEXT:    movl %esi, %eax
112; X32-NEXT:    xorl %edx, %edx
113; X32-NEXT:    mull %edx
114; X32-NEXT:    movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill
115; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
116; X32-NEXT:    movl (%esp), %ecx # 4-byte Reload
117; X32-NEXT:    addl %eax, %ecx
118; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax # 4-byte Reload
119; X32-NEXT:    adcl %edx, %eax
120; X32-NEXT:    addl %ebp, %ecx
121; X32-NEXT:    adcl %edi, %eax
122; X32-NEXT:    addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload
123; X32-NEXT:    movl %ecx, (%esp) # 4-byte Spill
124; X32-NEXT:    adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload
125; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
126; X32-NEXT:    adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill
127; X32-NEXT:    adcl $0, {{[0-9]+}}(%esp) # 4-byte Folded Spill
128; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
129; X32-NEXT:    movl %eax, %ecx
130; X32-NEXT:    movl 8(%eax), %ebx
131; X32-NEXT:    movl %esi, %eax
132; X32-NEXT:    movl %esi, %edi
133; X32-NEXT:    mull %ebx
134; X32-NEXT:    movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill
135; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
136; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax # 4-byte Reload
137; X32-NEXT:    mull %ebx
138; X32-NEXT:    movl %edx, %ebp
139; X32-NEXT:    movl %eax, %esi
140; X32-NEXT:    addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload
141; X32-NEXT:    adcl $0, %ebp
142; X32-NEXT:    movl 12(%ecx), %ecx
143; X32-NEXT:    movl %edi, %eax
144; X32-NEXT:    mull %ecx
145; X32-NEXT:    movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill
146; X32-NEXT:    movl %edx, %edi
147; X32-NEXT:    addl %esi, %eax
148; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
149; X32-NEXT:    adcl %ebp, %edi
150; X32-NEXT:    setb {{[0-9]+}}(%esp) # 1-byte Folded Spill
151; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax # 4-byte Reload
152; X32-NEXT:    mull %ecx
153; X32-NEXT:    movl %edx, %esi
154; X32-NEXT:    movl %eax, %ebp
155; X32-NEXT:    addl %edi, %ebp
156; X32-NEXT:    movzbl {{[0-9]+}}(%esp), %eax # 1-byte Folded Reload
157; X32-NEXT:    adcl %eax, %esi
158; X32-NEXT:    movl %ebx, %edi
159; X32-NEXT:    movl %ebx, %eax
160; X32-NEXT:    xorl %ecx, %ecx
161; X32-NEXT:    mull %ecx
162; X32-NEXT:    movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill
163; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
164; X32-NEXT:    movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload
165; X32-NEXT:    addl %eax, %ebx
166; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax # 4-byte Reload
167; X32-NEXT:    adcl %edx, %eax
168; X32-NEXT:    addl %ebp, %ebx
169; X32-NEXT:    adcl %esi, %eax
170; X32-NEXT:    movl (%esp), %ecx # 4-byte Reload
171; X32-NEXT:    addl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill
172; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload
173; X32-NEXT:    adcl %ecx, {{[0-9]+}}(%esp) # 4-byte Folded Spill
174; X32-NEXT:    adcl $0, %ebx
175; X32-NEXT:    adcl $0, %eax
176; X32-NEXT:    addl {{[0-9]+}}(%esp), %ebx # 4-byte Folded Reload
177; X32-NEXT:    movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill
178; X32-NEXT:    adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload
179; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
180; X32-NEXT:    setb (%esp) # 1-byte Folded Spill
181; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi # 4-byte Reload
182; X32-NEXT:    movl %esi, %eax
183; X32-NEXT:    movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill
184; X32-NEXT:    mull %edi
185; X32-NEXT:    movl %edx, %ebx
186; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
187; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload
188; X32-NEXT:    movl %ecx, %eax
189; X32-NEXT:    mull %edi
190; X32-NEXT:    movl %edx, %edi
191; X32-NEXT:    movl %eax, %ebp
192; X32-NEXT:    addl %ebx, %ebp
193; X32-NEXT:    adcl $0, %edi
194; X32-NEXT:    movl %esi, %eax
195; X32-NEXT:    movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload
196; X32-NEXT:    mull %ebx
197; X32-NEXT:    movl %edx, %esi
198; X32-NEXT:    addl %ebp, %eax
199; X32-NEXT:    movl %eax, %ebp
200; X32-NEXT:    adcl %edi, %esi
201; X32-NEXT:    setb {{[0-9]+}}(%esp) # 1-byte Folded Spill
202; X32-NEXT:    movl %ecx, %eax
203; X32-NEXT:    mull %ebx
204; X32-NEXT:    addl %esi, %eax
205; X32-NEXT:    movzbl {{[0-9]+}}(%esp), %esi # 1-byte Folded Reload
206; X32-NEXT:    adcl %esi, %edx
207; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi # 4-byte Reload
208; X32-NEXT:    addl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload
209; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload
210; X32-NEXT:    adcl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload
211; X32-NEXT:    addl %eax, %esi
212; X32-NEXT:    adcl %edx, %ecx
213; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax # 4-byte Reload
214; X32-NEXT:    addl %eax, {{[0-9]+}}(%esp) # 4-byte Folded Spill
215; X32-NEXT:    adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload
216; X32-NEXT:    movl %ebp, {{[0-9]+}}(%esp) # 4-byte Spill
217; X32-NEXT:    movzbl (%esp), %eax # 1-byte Folded Reload
218; X32-NEXT:    adcl %eax, %esi
219; X32-NEXT:    movl %esi, {{[0-9]+}}(%esp) # 4-byte Spill
220; X32-NEXT:    adcl $0, %ecx
221; X32-NEXT:    movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill
222; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
223; X32-NEXT:    movl 16(%ecx), %esi
224; X32-NEXT:    imull %esi, %ebx
225; X32-NEXT:    movl %esi, %eax
226; X32-NEXT:    movl {{[0-9]+}}(%esp), %edi # 4-byte Reload
227; X32-NEXT:    mull %edi
228; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
229; X32-NEXT:    addl %ebx, %edx
230; X32-NEXT:    movl 20(%ecx), %eax
231; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
232; X32-NEXT:    imull %eax, %edi
233; X32-NEXT:    addl %edx, %edi
234; X32-NEXT:    movl %edi, {{[0-9]+}}(%esp) # 4-byte Spill
235; X32-NEXT:    movl 24(%ecx), %eax
236; X32-NEXT:    movl %ecx, %ebp
237; X32-NEXT:    movl %eax, %edi
238; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload
239; X32-NEXT:    imull %ecx, %edi
240; X32-NEXT:    movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload
241; X32-NEXT:    mull %ebx
242; X32-NEXT:    movl %eax, (%esp) # 4-byte Spill
243; X32-NEXT:    addl %edi, %edx
244; X32-NEXT:    movl 28(%ebp), %ebp
245; X32-NEXT:    imull %ebx, %ebp
246; X32-NEXT:    addl %edx, %ebp
247; X32-NEXT:    movl {{[0-9]+}}(%esp), %edx # 4-byte Reload
248; X32-NEXT:    addl %edx, (%esp) # 4-byte Folded Spill
249; X32-NEXT:    adcl {{[0-9]+}}(%esp), %ebp # 4-byte Folded Reload
250; X32-NEXT:    movl %ebx, %eax
251; X32-NEXT:    mull %esi
252; X32-NEXT:    movl %edx, %ebx
253; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
254; X32-NEXT:    movl %ecx, %eax
255; X32-NEXT:    mull %esi
256; X32-NEXT:    movl %edx, %ecx
257; X32-NEXT:    movl %eax, %edi
258; X32-NEXT:    addl %ebx, %edi
259; X32-NEXT:    adcl $0, %ecx
260; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax # 4-byte Reload
261; X32-NEXT:    movl {{[0-9]+}}(%esp), %ebx # 4-byte Reload
262; X32-NEXT:    mull %ebx
263; X32-NEXT:    movl %edx, %esi
264; X32-NEXT:    addl %edi, %eax
265; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
266; X32-NEXT:    adcl %ecx, %esi
267; X32-NEXT:    setb %cl
268; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax # 4-byte Reload
269; X32-NEXT:    mull %ebx
270; X32-NEXT:    addl %esi, %eax
271; X32-NEXT:    movzbl %cl, %ecx
272; X32-NEXT:    adcl %ecx, %edx
273; X32-NEXT:    addl (%esp), %eax # 4-byte Folded Reload
274; X32-NEXT:    movl %eax, (%esp) # 4-byte Spill
275; X32-NEXT:    adcl %ebp, %edx
276; X32-NEXT:    movl %edx, {{[0-9]+}}(%esp) # 4-byte Spill
277; X32-NEXT:    movl {{[0-9]+}}(%esp), %ebx
278; X32-NEXT:    movl 28(%ebx), %ecx
279; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi # 4-byte Reload
280; X32-NEXT:    imull %esi, %ecx
281; X32-NEXT:    movl 24(%ebx), %edi
282; X32-NEXT:    movl %esi, %eax
283; X32-NEXT:    mull %edi
284; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
285; X32-NEXT:    addl %ecx, %edx
286; X32-NEXT:    imull {{[0-9]+}}(%esp), %edi # 4-byte Folded Reload
287; X32-NEXT:    addl %edx, %edi
288; X32-NEXT:    movl 16(%ebx), %ebp
289; X32-NEXT:    movl 20(%ebx), %ebx
290; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax # 4-byte Reload
291; X32-NEXT:    movl %eax, %ecx
292; X32-NEXT:    imull %ebx, %ecx
293; X32-NEXT:    movl %ebx, {{[0-9]+}}(%esp) # 4-byte Spill
294; X32-NEXT:    mull %ebp
295; X32-NEXT:    addl %ecx, %edx
296; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload
297; X32-NEXT:    imull %ebp, %ecx
298; X32-NEXT:    addl %edx, %ecx
299; X32-NEXT:    addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload
300; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
301; X32-NEXT:    adcl %edi, %ecx
302; X32-NEXT:    movl %ecx, {{[0-9]+}}(%esp) # 4-byte Spill
303; X32-NEXT:    movl %ebp, %eax
304; X32-NEXT:    mull %esi
305; X32-NEXT:    movl %edx, %edi
306; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp) # 4-byte Spill
307; X32-NEXT:    movl %ebx, %eax
308; X32-NEXT:    mull %esi
309; X32-NEXT:    movl %edx, %ecx
310; X32-NEXT:    movl %eax, %ebx
311; X32-NEXT:    addl %edi, %ebx
312; X32-NEXT:    adcl $0, %ecx
313; X32-NEXT:    movl %ebp, %eax
314; X32-NEXT:    movl {{[0-9]+}}(%esp), %ebp # 4-byte Reload
315; X32-NEXT:    mull %ebp
316; X32-NEXT:    movl %edx, %edi
317; X32-NEXT:    movl %eax, %esi
318; X32-NEXT:    addl %ebx, %esi
319; X32-NEXT:    adcl %ecx, %edi
320; X32-NEXT:    setb %cl
321; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax # 4-byte Reload
322; X32-NEXT:    mull %ebp
323; X32-NEXT:    addl %edi, %eax
324; X32-NEXT:    movzbl %cl, %ecx
325; X32-NEXT:    adcl %ecx, %edx
326; X32-NEXT:    addl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload
327; X32-NEXT:    adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload
328; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx # 4-byte Reload
329; X32-NEXT:    addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload
330; X32-NEXT:    adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload
331; X32-NEXT:    adcl (%esp), %eax # 4-byte Folded Reload
332; X32-NEXT:    adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload
333; X32-NEXT:    addl {{[0-9]+}}(%esp), %ecx # 4-byte Folded Reload
334; X32-NEXT:    movl %ecx, %ebx
335; X32-NEXT:    adcl {{[0-9]+}}(%esp), %esi # 4-byte Folded Reload
336; X32-NEXT:    adcl {{[0-9]+}}(%esp), %eax # 4-byte Folded Reload
337; X32-NEXT:    adcl {{[0-9]+}}(%esp), %edx # 4-byte Folded Reload
338; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
339; X32-NEXT:    movl {{[0-9]+}}(%esp), %edi # 4-byte Reload
340; X32-NEXT:    movl %edi, (%ecx)
341; X32-NEXT:    movl {{[0-9]+}}(%esp), %edi # 4-byte Reload
342; X32-NEXT:    movl %edi, 4(%ecx)
343; X32-NEXT:    movl {{[0-9]+}}(%esp), %edi # 4-byte Reload
344; X32-NEXT:    movl %edi, 8(%ecx)
345; X32-NEXT:    movl {{[0-9]+}}(%esp), %edi # 4-byte Reload
346; X32-NEXT:    movl %edi, 12(%ecx)
347; X32-NEXT:    movl %ebx, 16(%ecx)
348; X32-NEXT:    movl %esi, 20(%ecx)
349; X32-NEXT:    movl %eax, 24(%ecx)
350; X32-NEXT:    movl %edx, 28(%ecx)
351; X32-NEXT:    addl $88, %esp
352; X32-NEXT:    .cfi_def_cfa_offset 20
353; X32-NEXT:    popl %esi
354; X32-NEXT:    .cfi_def_cfa_offset 16
355; X32-NEXT:    popl %edi
356; X32-NEXT:    .cfi_def_cfa_offset 12
357; X32-NEXT:    popl %ebx
358; X32-NEXT:    .cfi_def_cfa_offset 8
359; X32-NEXT:    popl %ebp
360; X32-NEXT:    .cfi_def_cfa_offset 4
361; X32-NEXT:    retl
362;
363; X64-LABEL: test:
364; X64:       # %bb.0: # %entry
365; X64-NEXT:    pushq %r15
366; X64-NEXT:    .cfi_def_cfa_offset 16
367; X64-NEXT:    pushq %r14
368; X64-NEXT:    .cfi_def_cfa_offset 24
369; X64-NEXT:    pushq %rbx
370; X64-NEXT:    .cfi_def_cfa_offset 32
371; X64-NEXT:    .cfi_offset %rbx, -32
372; X64-NEXT:    .cfi_offset %r14, -24
373; X64-NEXT:    .cfi_offset %r15, -16
374; X64-NEXT:    movq %rdx, %r9
375; X64-NEXT:    movq (%rdi), %r11
376; X64-NEXT:    movq 8(%rdi), %r8
377; X64-NEXT:    movq 16(%rdi), %rbx
378; X64-NEXT:    movq 16(%rsi), %r10
379; X64-NEXT:    movq (%rsi), %rcx
380; X64-NEXT:    movq 8(%rsi), %r15
381; X64-NEXT:    movq 24(%rdi), %rdi
382; X64-NEXT:    imulq %rcx, %rdi
383; X64-NEXT:    movq %rcx, %rax
384; X64-NEXT:    mulq %rbx
385; X64-NEXT:    movq %rax, %r14
386; X64-NEXT:    addq %rdi, %rdx
387; X64-NEXT:    imulq %r15, %rbx
388; X64-NEXT:    addq %rdx, %rbx
389; X64-NEXT:    movq %r10, %rdi
390; X64-NEXT:    imulq %r8, %rdi
391; X64-NEXT:    movq %r10, %rax
392; X64-NEXT:    mulq %r11
393; X64-NEXT:    movq %rax, %r10
394; X64-NEXT:    addq %rdi, %rdx
395; X64-NEXT:    movq 24(%rsi), %rdi
396; X64-NEXT:    imulq %r11, %rdi
397; X64-NEXT:    addq %rdx, %rdi
398; X64-NEXT:    addq %r14, %r10
399; X64-NEXT:    adcq %rbx, %rdi
400; X64-NEXT:    movq %r11, %rax
401; X64-NEXT:    mulq %rcx
402; X64-NEXT:    movq %rdx, %rsi
403; X64-NEXT:    movq %rax, %r14
404; X64-NEXT:    movq %r8, %rax
405; X64-NEXT:    mulq %rcx
406; X64-NEXT:    movq %rdx, %rcx
407; X64-NEXT:    movq %rax, %rbx
408; X64-NEXT:    addq %rsi, %rbx
409; X64-NEXT:    adcq $0, %rcx
410; X64-NEXT:    movq %r11, %rax
411; X64-NEXT:    mulq %r15
412; X64-NEXT:    movq %rdx, %rsi
413; X64-NEXT:    movq %rax, %r11
414; X64-NEXT:    addq %rbx, %r11
415; X64-NEXT:    adcq %rcx, %rsi
416; X64-NEXT:    setb %al
417; X64-NEXT:    movzbl %al, %ecx
418; X64-NEXT:    movq %r8, %rax
419; X64-NEXT:    mulq %r15
420; X64-NEXT:    addq %rsi, %rax
421; X64-NEXT:    adcq %rcx, %rdx
422; X64-NEXT:    addq %r10, %rax
423; X64-NEXT:    adcq %rdi, %rdx
424; X64-NEXT:    movq %r14, (%r9)
425; X64-NEXT:    movq %r11, 8(%r9)
426; X64-NEXT:    movq %rax, 16(%r9)
427; X64-NEXT:    movq %rdx, 24(%r9)
428; X64-NEXT:    popq %rbx
429; X64-NEXT:    .cfi_def_cfa_offset 24
430; X64-NEXT:    popq %r14
431; X64-NEXT:    .cfi_def_cfa_offset 16
432; X64-NEXT:    popq %r15
433; X64-NEXT:    .cfi_def_cfa_offset 8
434; X64-NEXT:    retq
435entry:
436  %av = load i256, i256* %a
437  %bv = load i256, i256* %b
438  %r = mul i256 %av, %bv
439  store i256 %r, i256* %out
440  ret void
441}
442
443attributes #0 = { norecurse nounwind uwtable }
444