• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=i386-linux-gnu -mattr=+sse2  -global-isel -verify-machineinstrs < %s -o - | FileCheck %s --check-prefix=ALL --check-prefix=X32
3; RUN: llc -mtriple=x86_64-linux-gnu             -global-isel -verify-machineinstrs < %s -o - | FileCheck %s --check-prefix=ALL --check-prefix=X64
4
5define i32 @test_ret_i32() {
6; X32-LABEL: test_ret_i32:
7; X32:       # %bb.0:
8; X32-NEXT:    movl $20, %eax
9; X32-NEXT:    retl
10;
11; X64-LABEL: test_ret_i32:
12; X64:       # %bb.0:
13; X64-NEXT:    movl $20, %eax
14; X64-NEXT:    retq
15  ret i32 20
16}
17
18define i64 @test_ret_i64() {
19; X32-LABEL: test_ret_i64:
20; X32:       # %bb.0:
21; X32-NEXT:    movl $4294967295, %eax # imm = 0xFFFFFFFF
22; X32-NEXT:    movl $15, %edx
23; X32-NEXT:    retl
24;
25; X64-LABEL: test_ret_i64:
26; X64:       # %bb.0:
27; X64-NEXT:    movabsq $68719476735, %rax # imm = 0xFFFFFFFFF
28; X64-NEXT:    retq
29  ret i64 68719476735
30}
31
32define i8 @test_arg_i8(i8 %a) {
33; X32-LABEL: test_arg_i8:
34; X32:       # %bb.0:
35; X32-NEXT:    movb {{[0-9]+}}(%esp), %al
36; X32-NEXT:    retl
37;
38; X64-LABEL: test_arg_i8:
39; X64:       # %bb.0:
40; X64-NEXT:    movl %edi, %eax
41; X64-NEXT:    retq
42  ret i8 %a
43}
44
45define i16 @test_arg_i16(i16 %a) {
46; X32-LABEL: test_arg_i16:
47; X32:       # %bb.0:
48; X32-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
49; X32-NEXT:    retl
50;
51; X64-LABEL: test_arg_i16:
52; X64:       # %bb.0:
53; X64-NEXT:    movl %edi, %eax
54; X64-NEXT:    retq
55  ret i16 %a
56}
57
58define i32 @test_arg_i32(i32 %a) {
59; X32-LABEL: test_arg_i32:
60; X32:       # %bb.0:
61; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
62; X32-NEXT:    retl
63;
64; X64-LABEL: test_arg_i32:
65; X64:       # %bb.0:
66; X64-NEXT:    movl %edi, %eax
67; X64-NEXT:    retq
68  ret i32 %a
69}
70
71define i64 @test_arg_i64(i64 %a) {
72; X32-LABEL: test_arg_i64:
73; X32:       # %bb.0:
74; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
75; X32-NEXT:    movl {{[0-9]+}}(%esp), %edx
76; X32-NEXT:    retl
77;
78; X64-LABEL: test_arg_i64:
79; X64:       # %bb.0:
80; X64-NEXT:    movq %rdi, %rax
81; X64-NEXT:    retq
82  ret i64 %a
83}
84
85define i64 @test_i64_args_8(i64 %arg1, i64 %arg2, i64 %arg3, i64 %arg4, i64 %arg5, i64 %arg6, i64 %arg7, i64 %arg8) {
86; X32-LABEL: test_i64_args_8:
87; X32:       # %bb.0:
88; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
89; X32-NEXT:    movl {{[0-9]+}}(%esp), %edx
90; X32-NEXT:    retl
91;
92; X64-LABEL: test_i64_args_8:
93; X64:       # %bb.0:
94; X64-NEXT:    movq {{[0-9]+}}(%rsp), %rax
95; X64-NEXT:    retq
96  ret i64 %arg8
97}
98
99define <4 x i32> @test_v4i32_args(<4 x i32> %arg1, <4 x i32> %arg2) {
100; X32-LABEL: test_v4i32_args:
101; X32:       # %bb.0:
102; X32-NEXT:    movaps %xmm1, %xmm0
103; X32-NEXT:    retl
104;
105; X64-LABEL: test_v4i32_args:
106; X64:       # %bb.0:
107; X64-NEXT:    movaps %xmm1, %xmm0
108; X64-NEXT:    retq
109  ret <4 x i32> %arg2
110}
111
112define <8 x i32> @test_v8i32_args(<8 x i32> %arg1, <8 x i32> %arg2) {
113; X32-LABEL: test_v8i32_args:
114; X32:       # %bb.0:
115; X32-NEXT:    subl $12, %esp
116; X32-NEXT:    .cfi_def_cfa_offset 16
117; X32-NEXT:    movups {{[0-9]+}}(%esp), %xmm1
118; X32-NEXT:    movaps %xmm2, %xmm0
119; X32-NEXT:    addl $12, %esp
120; X32-NEXT:    .cfi_def_cfa_offset 4
121; X32-NEXT:    retl
122;
123; X64-LABEL: test_v8i32_args:
124; X64:       # %bb.0:
125; X64-NEXT:    movaps %xmm2, %xmm0
126; X64-NEXT:    movaps %xmm3, %xmm1
127; X64-NEXT:    retq
128  ret <8 x i32> %arg2
129}
130
131declare void @trivial_callee()
132define void @test_trivial_call() {
133; X32-LABEL: test_trivial_call:
134; X32:       # %bb.0:
135; X32-NEXT:    subl $12, %esp
136; X32-NEXT:    .cfi_def_cfa_offset 16
137; X32-NEXT:    calll trivial_callee
138; X32-NEXT:    addl $12, %esp
139; X32-NEXT:    .cfi_def_cfa_offset 4
140; X32-NEXT:    retl
141;
142; X64-LABEL: test_trivial_call:
143; X64:       # %bb.0:
144; X64-NEXT:    pushq %rax
145; X64-NEXT:    .cfi_def_cfa_offset 16
146; X64-NEXT:    callq trivial_callee
147; X64-NEXT:    popq %rax
148; X64-NEXT:    .cfi_def_cfa_offset 8
149; X64-NEXT:    retq
150  call void @trivial_callee()
151  ret void
152}
153
154declare void @simple_arg_callee(i32 %in0, i32 %in1)
155define void @test_simple_arg_call(i32 %in0, i32 %in1) {
156; X32-LABEL: test_simple_arg_call:
157; X32:       # %bb.0:
158; X32-NEXT:    subl $12, %esp
159; X32-NEXT:    .cfi_def_cfa_offset 16
160; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
161; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
162; X32-NEXT:    movl %ecx, (%esp)
163; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp)
164; X32-NEXT:    calll simple_arg_callee
165; X32-NEXT:    addl $12, %esp
166; X32-NEXT:    .cfi_def_cfa_offset 4
167; X32-NEXT:    retl
168;
169; X64-LABEL: test_simple_arg_call:
170; X64:       # %bb.0:
171; X64-NEXT:    pushq %rax
172; X64-NEXT:    .cfi_def_cfa_offset 16
173; X64-NEXT:    movl %edi, %eax
174; X64-NEXT:    movl %esi, %edi
175; X64-NEXT:    movl %eax, %esi
176; X64-NEXT:    callq simple_arg_callee
177; X64-NEXT:    popq %rax
178; X64-NEXT:    .cfi_def_cfa_offset 8
179; X64-NEXT:    retq
180  call void @simple_arg_callee(i32 %in1, i32 %in0)
181  ret void
182}
183
184declare void @simple_arg8_callee(i32 %arg1, i32 %arg2, i32 %arg3, i32 %arg4, i32 %arg5, i32 %arg6, i32 %arg7, i32 %arg8)
185define void @test_simple_arg8_call(i32 %in0) {
186; X32-LABEL: test_simple_arg8_call:
187; X32:       # %bb.0:
188; X32-NEXT:    subl $44, %esp
189; X32-NEXT:    .cfi_def_cfa_offset 48
190; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
191; X32-NEXT:    movl %eax, (%esp)
192; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp)
193; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp)
194; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp)
195; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp)
196; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp)
197; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp)
198; X32-NEXT:    movl %eax, {{[0-9]+}}(%esp)
199; X32-NEXT:    calll simple_arg8_callee
200; X32-NEXT:    addl $44, %esp
201; X32-NEXT:    .cfi_def_cfa_offset 4
202; X32-NEXT:    retl
203;
204; X64-LABEL: test_simple_arg8_call:
205; X64:       # %bb.0:
206; X64-NEXT:    subq $24, %rsp
207; X64-NEXT:    .cfi_def_cfa_offset 32
208; X64-NEXT:    movl %edi, (%rsp)
209; X64-NEXT:    movl %edi, {{[0-9]+}}(%rsp)
210; X64-NEXT:    movl %edi, %esi
211; X64-NEXT:    movl %edi, %edx
212; X64-NEXT:    movl %edi, %ecx
213; X64-NEXT:    movl %edi, %r8d
214; X64-NEXT:    movl %edi, %r9d
215; X64-NEXT:    callq simple_arg8_callee
216; X64-NEXT:    addq $24, %rsp
217; X64-NEXT:    .cfi_def_cfa_offset 8
218; X64-NEXT:    retq
219  call void @simple_arg8_callee(i32 %in0, i32 %in0, i32 %in0, i32 %in0,i32 %in0, i32 %in0, i32 %in0, i32 %in0)
220  ret void
221}
222
223declare i32 @simple_return_callee(i32 %in0)
224define i32 @test_simple_return_callee() {
225; X32-LABEL: test_simple_return_callee:
226; X32:       # %bb.0:
227; X32-NEXT:    subl $12, %esp
228; X32-NEXT:    .cfi_def_cfa_offset 16
229; X32-NEXT:    movl $5, %eax
230; X32-NEXT:    movl %eax, (%esp)
231; X32-NEXT:    calll simple_return_callee
232; X32-NEXT:    addl %eax, %eax
233; X32-NEXT:    addl $12, %esp
234; X32-NEXT:    .cfi_def_cfa_offset 4
235; X32-NEXT:    retl
236;
237; X64-LABEL: test_simple_return_callee:
238; X64:       # %bb.0:
239; X64-NEXT:    pushq %rax
240; X64-NEXT:    .cfi_def_cfa_offset 16
241; X64-NEXT:    movl $5, %edi
242; X64-NEXT:    callq simple_return_callee
243; X64-NEXT:    addl %eax, %eax
244; X64-NEXT:    popq %rcx
245; X64-NEXT:    .cfi_def_cfa_offset 8
246; X64-NEXT:    retq
247  %call = call i32 @simple_return_callee(i32 5)
248  %r = add i32 %call, %call
249  ret i32 %r
250}
251
252declare <8 x i32> @split_return_callee(<8 x i32> %in0)
253define <8 x i32> @test_split_return_callee(<8 x i32> %arg1, <8 x i32> %arg2) {
254; X32-LABEL: test_split_return_callee:
255; X32:       # %bb.0:
256; X32-NEXT:    subl $44, %esp
257; X32-NEXT:    .cfi_def_cfa_offset 48
258; X32-NEXT:    movaps %xmm0, (%esp) # 16-byte Spill
259; X32-NEXT:    movaps %xmm1, {{[0-9]+}}(%esp) # 16-byte Spill
260; X32-NEXT:    movdqu {{[0-9]+}}(%esp), %xmm1
261; X32-NEXT:    movdqa %xmm2, %xmm0
262; X32-NEXT:    calll split_return_callee
263; X32-NEXT:    paddd (%esp), %xmm0 # 16-byte Folded Reload
264; X32-NEXT:    paddd {{[0-9]+}}(%esp), %xmm1 # 16-byte Folded Reload
265; X32-NEXT:    addl $44, %esp
266; X32-NEXT:    .cfi_def_cfa_offset 4
267; X32-NEXT:    retl
268;
269; X64-LABEL: test_split_return_callee:
270; X64:       # %bb.0:
271; X64-NEXT:    subq $40, %rsp
272; X64-NEXT:    .cfi_def_cfa_offset 48
273; X64-NEXT:    movaps %xmm0, (%rsp) # 16-byte Spill
274; X64-NEXT:    movaps %xmm1, {{[0-9]+}}(%rsp) # 16-byte Spill
275; X64-NEXT:    movdqa %xmm2, %xmm0
276; X64-NEXT:    movdqa %xmm3, %xmm1
277; X64-NEXT:    callq split_return_callee
278; X64-NEXT:    paddd (%rsp), %xmm0 # 16-byte Folded Reload
279; X64-NEXT:    paddd {{[0-9]+}}(%rsp), %xmm1 # 16-byte Folded Reload
280; X64-NEXT:    addq $40, %rsp
281; X64-NEXT:    .cfi_def_cfa_offset 8
282; X64-NEXT:    retq
283  %call = call <8 x i32> @split_return_callee(<8 x i32> %arg2)
284  %r = add <8 x i32> %arg1, %call
285  ret  <8 x i32> %r
286}
287
288define void @test_indirect_call(void()* %func) {
289; X32-LABEL: test_indirect_call:
290; X32:       # %bb.0:
291; X32-NEXT:    subl $12, %esp
292; X32-NEXT:    .cfi_def_cfa_offset 16
293; X32-NEXT:    calll *{{[0-9]+}}(%esp)
294; X32-NEXT:    addl $12, %esp
295; X32-NEXT:    .cfi_def_cfa_offset 4
296; X32-NEXT:    retl
297;
298; X64-LABEL: test_indirect_call:
299; X64:       # %bb.0:
300; X64-NEXT:    pushq %rax
301; X64-NEXT:    .cfi_def_cfa_offset 16
302; X64-NEXT:    callq *%rdi
303; X64-NEXT:    popq %rax
304; X64-NEXT:    .cfi_def_cfa_offset 8
305; X64-NEXT:    retq
306  call void %func()
307  ret void
308}
309
310declare void @take_char(i8)
311define void @test_abi_exts_call(i8* %addr) {
312; X32-LABEL: test_abi_exts_call:
313; X32:       # %bb.0:
314; X32-NEXT:    pushl %ebx
315; X32-NEXT:    .cfi_def_cfa_offset 8
316; X32-NEXT:    pushl %esi
317; X32-NEXT:    .cfi_def_cfa_offset 12
318; X32-NEXT:    pushl %eax
319; X32-NEXT:    .cfi_def_cfa_offset 16
320; X32-NEXT:    .cfi_offset %esi, -12
321; X32-NEXT:    .cfi_offset %ebx, -8
322; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
323; X32-NEXT:    movb (%eax), %bl
324; X32-NEXT:    movzbl %bl, %esi
325; X32-NEXT:    movl %esi, (%esp)
326; X32-NEXT:    calll take_char
327; X32-NEXT:    movsbl %bl, %eax
328; X32-NEXT:    movl %eax, (%esp)
329; X32-NEXT:    calll take_char
330; X32-NEXT:    movl %esi, (%esp)
331; X32-NEXT:    calll take_char
332; X32-NEXT:    addl $4, %esp
333; X32-NEXT:    .cfi_def_cfa_offset 12
334; X32-NEXT:    popl %esi
335; X32-NEXT:    .cfi_def_cfa_offset 8
336; X32-NEXT:    popl %ebx
337; X32-NEXT:    .cfi_def_cfa_offset 4
338; X32-NEXT:    retl
339;
340; X64-LABEL: test_abi_exts_call:
341; X64:       # %bb.0:
342; X64-NEXT:    pushq %rbx
343; X64-NEXT:    .cfi_def_cfa_offset 16
344; X64-NEXT:    .cfi_offset %rbx, -16
345; X64-NEXT:    movb (%rdi), %al
346; X64-NEXT:    movzbl %al, %ebx
347; X64-NEXT:    movl %ebx, %edi
348; X64-NEXT:    callq take_char
349; X64-NEXT:    movsbl %bl, %edi
350; X64-NEXT:    callq take_char
351; X64-NEXT:    movl %ebx, %edi
352; X64-NEXT:    callq take_char
353; X64-NEXT:    popq %rbx
354; X64-NEXT:    .cfi_def_cfa_offset 8
355; X64-NEXT:    retq
356  %val = load i8, i8* %addr
357  call void @take_char(i8 %val)
358  call void @take_char(i8 signext %val)
359  call void @take_char(i8 zeroext %val)
360 ret void
361}
362
363declare void @variadic_callee(i8*, ...)
364define void @test_variadic_call_1(i8** %addr_ptr, i32* %val_ptr) {
365; X32-LABEL: test_variadic_call_1:
366; X32:       # %bb.0:
367; X32-NEXT:    subl $12, %esp
368; X32-NEXT:    .cfi_def_cfa_offset 16
369; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
370; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
371; X32-NEXT:    movl (%eax), %eax
372; X32-NEXT:    movl (%ecx), %ecx
373; X32-NEXT:    movl %eax, (%esp)
374; X32-NEXT:    movl %ecx, {{[0-9]+}}(%esp)
375; X32-NEXT:    calll variadic_callee
376; X32-NEXT:    addl $12, %esp
377; X32-NEXT:    .cfi_def_cfa_offset 4
378; X32-NEXT:    retl
379;
380; X64-LABEL: test_variadic_call_1:
381; X64:       # %bb.0:
382; X64-NEXT:    pushq %rax
383; X64-NEXT:    .cfi_def_cfa_offset 16
384; X64-NEXT:    movq (%rdi), %rdi
385; X64-NEXT:    movl (%rsi), %esi
386; X64-NEXT:    movb $0, %al
387; X64-NEXT:    callq variadic_callee
388; X64-NEXT:    popq %rax
389; X64-NEXT:    .cfi_def_cfa_offset 8
390; X64-NEXT:    retq
391
392  %addr = load i8*, i8** %addr_ptr
393  %val = load i32, i32* %val_ptr
394  call void (i8*, ...) @variadic_callee(i8* %addr, i32 %val)
395  ret void
396}
397
398define void @test_variadic_call_2(i8** %addr_ptr, double* %val_ptr) {
399; X32-LABEL: test_variadic_call_2:
400; X32:       # %bb.0:
401; X32-NEXT:    subl $12, %esp
402; X32-NEXT:    .cfi_def_cfa_offset 16
403; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
404; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
405; X32-NEXT:    movl (%eax), %eax
406; X32-NEXT:    movl (%ecx), %edx
407; X32-NEXT:    movl 4(%ecx), %ecx
408; X32-NEXT:    movl %eax, (%esp)
409; X32-NEXT:    movl $4, %eax
410; X32-NEXT:    leal (%esp,%eax), %eax
411; X32-NEXT:    movl %edx, {{[0-9]+}}(%esp)
412; X32-NEXT:    movl %ecx, 4(%eax)
413; X32-NEXT:    calll variadic_callee
414; X32-NEXT:    addl $12, %esp
415; X32-NEXT:    .cfi_def_cfa_offset 4
416; X32-NEXT:    retl
417;
418; X64-LABEL: test_variadic_call_2:
419; X64:       # %bb.0:
420; X64-NEXT:    pushq %rax
421; X64-NEXT:    .cfi_def_cfa_offset 16
422; X64-NEXT:    movq (%rdi), %rdi
423; X64-NEXT:    movq (%rsi), %rax
424; X64-NEXT:    movq %rax, %xmm0
425; X64-NEXT:    movb $1, %al
426; X64-NEXT:    callq variadic_callee
427; X64-NEXT:    popq %rax
428; X64-NEXT:    .cfi_def_cfa_offset 8
429; X64-NEXT:    retq
430
431  %addr = load i8*, i8** %addr_ptr
432  %val = load double, double* %val_ptr
433  call void (i8*, ...) @variadic_callee(i8* %addr, double %val)
434  ret void
435}
436