• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -march=aarch64 -run-pass=aarch64-prelegalizer-combiner -verify-machineinstrs %s -o - | FileCheck %s
3--- |
4  target datalayout = "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128"
5  target triple = "aarch64"
6
7  define void @test_ms1(i8* nocapture %dst, i32 %c, i32 %len) local_unnamed_addr #0 {
8  entry:
9    %0 = trunc i32 %c to i8
10    %conv = zext i32 %len to i64
11    tail call void @llvm.memset.p0i8.i64(i8* align 1 %dst, i8 %0, i64 %conv, i1 false)
12    ret void
13  }
14
15  declare void @llvm.memset.p0i8.i64(i8* nocapture writeonly, i8, i64, i1 immarg) #1
16
17  define void @test_ms2_const(i8* nocapture %dst, i32 %c) local_unnamed_addr #0 {
18  entry:
19    %0 = trunc i32 %c to i8
20    tail call void @llvm.memset.p0i8.i64(i8* align 1 %dst, i8 %0, i64 16, i1 false)
21    ret void
22  }
23
24  define void @test_zero_const(i8* nocapture %dst) local_unnamed_addr #0 {
25  entry:
26    tail call void @llvm.memset.p0i8.i64(i8* align 1 %dst, i8 0, i64 64, i1 false)
27    ret void
28  }
29
30  define void @test_ms3_const_both(i8* nocapture %dst) local_unnamed_addr #0 {
31  entry:
32    tail call void @llvm.memset.p0i8.i64(i8* align 1 %dst, i8 64, i64 16, i1 false)
33    ret void
34  }
35
36  define void @test_ms_vector(i8* nocapture %dst, i32 %c) local_unnamed_addr #0 {
37  entry:
38    %0 = trunc i32 %c to i8
39    tail call void @llvm.memset.p0i8.i64(i8* align 1 %dst, i8 %0, i64 16, i1 false)
40    ret void
41  }
42
43  define void @test_ms4_const_both_unaligned(i8* nocapture %dst) local_unnamed_addr #0 {
44  entry:
45    tail call void @llvm.memset.p0i8.i64(i8* align 1 %dst, i8 64, i64 18, i1 false)
46    ret void
47  }
48
49  declare void @llvm.stackprotector(i8*, i8**) #2
50
51  attributes #0 = { nounwind ssp uwtable "correctly-rounded-divide-sqrt-fp-math"="false" "disable-tail-calls"="false" "less-precise-fpmad"="false" "min-legal-vector-width"="0" "frame-pointer"="all" "no-infs-fp-math"="false" "no-jump-tables"="false" "no-nans-fp-math"="false" "no-signed-zeros-fp-math"="false" "no-trapping-math"="false" "stack-protector-buffer-size"="8" "target-cpu"="cyclone" "target-features"="+aes,+crypto,+fp-armv8,+neon,+sha2,+zcm,+zcz" "unsafe-fp-math"="false" "use-soft-float"="false" }
52  attributes #1 = { argmemonly nounwind }
53
54...
55---
56name:            test_ms1
57alignment:       4
58tracksRegLiveness: true
59body:             |
60  bb.1.entry:
61    liveins: $w1, $w2, $x0
62
63    ; CHECK-LABEL: name: test_ms1
64    ; CHECK: liveins: $w1, $w2, $x0
65    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
66    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $w1
67    ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY $w2
68    ; CHECK: [[TRUNC:%[0-9]+]]:_(s8) = G_TRUNC [[COPY1]](s32)
69    ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[COPY2]](s32)
70    ; CHECK: G_MEMSET [[COPY]](p0), [[TRUNC]](s8), [[ZEXT]](s64), 1 :: (store 1 into %ir.dst)
71    ; CHECK: RET_ReallyLR
72    %0:_(p0) = COPY $x0
73    %1:_(s32) = COPY $w1
74    %2:_(s32) = COPY $w2
75    %3:_(s8) = G_TRUNC %1(s32)
76    %4:_(s64) = G_ZEXT %2(s32)
77    G_MEMSET %0(p0), %3(s8), %4(s64), 1 :: (store 1 into %ir.dst)
78    RET_ReallyLR
79
80...
81---
82name:            test_ms2_const
83alignment:       4
84tracksRegLiveness: true
85body:             |
86  bb.1.entry:
87    liveins: $w1, $x0
88
89    ; CHECK-LABEL: name: test_ms2_const
90    ; CHECK: liveins: $w1, $x0
91    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
92    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $w1
93    ; CHECK: [[TRUNC:%[0-9]+]]:_(s8) = G_TRUNC [[COPY1]](s32)
94    ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[TRUNC]](s8)
95    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 72340172838076673
96    ; CHECK: [[MUL:%[0-9]+]]:_(s64) = G_MUL [[ZEXT]], [[C]]
97    ; CHECK: G_STORE [[MUL]](s64), [[COPY]](p0) :: (store 8 into %ir.dst, align 1)
98    ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
99    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C1]](s64)
100    ; CHECK: G_STORE [[MUL]](s64), [[PTR_ADD]](p0) :: (store 8 into %ir.dst + 8, align 1)
101    ; CHECK: RET_ReallyLR
102    %0:_(p0) = COPY $x0
103    %1:_(s32) = COPY $w1
104    %3:_(s64) = G_CONSTANT i64 16
105    %2:_(s8) = G_TRUNC %1(s32)
106    G_MEMSET %0(p0), %2(s8), %3(s64), 1 :: (store 1 into %ir.dst)
107    RET_ReallyLR
108
109...
110---
111name:            test_zero_const
112alignment:       4
113tracksRegLiveness: true
114body:             |
115  bb.1.entry:
116    liveins: $w1, $x0
117
118    ; CHECK-LABEL: name: test_zero_const
119    ; CHECK: liveins: $w1, $x0
120    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
121    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
122    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64)
123    ; CHECK: G_STORE [[BUILD_VECTOR]](<2 x s64>), [[COPY]](p0) :: (store 16 into %ir.dst, align 1)
124    ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
125    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C1]](s64)
126    ; CHECK: G_STORE [[BUILD_VECTOR]](<2 x s64>), [[PTR_ADD]](p0) :: (store 16 into %ir.dst + 16, align 1)
127    ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
128    ; CHECK: [[PTR_ADD1:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C2]](s64)
129    ; CHECK: G_STORE [[BUILD_VECTOR]](<2 x s64>), [[PTR_ADD1]](p0) :: (store 16 into %ir.dst + 32, align 1)
130    ; CHECK: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
131    ; CHECK: [[PTR_ADD2:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C3]](s64)
132    ; CHECK: G_STORE [[BUILD_VECTOR]](<2 x s64>), [[PTR_ADD2]](p0) :: (store 16 into %ir.dst + 48, align 1)
133    ; CHECK: RET_ReallyLR
134    %0:_(p0) = COPY $x0
135    %1:_(s32) = G_CONSTANT i32 0
136    %3:_(s64) = G_CONSTANT i64 64
137    %2:_(s8) = G_TRUNC %1(s32)
138    G_MEMSET %0(p0), %2(s8), %3(s64), 1 :: (store 1 into %ir.dst)
139    RET_ReallyLR
140
141...
142
143---
144name:            test_ms3_const_both
145alignment:       4
146tracksRegLiveness: true
147body:             |
148  bb.1.entry:
149    liveins: $x0
150
151    ; CHECK-LABEL: name: test_ms3_const_both
152    ; CHECK: liveins: $x0
153    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
154    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4629771061636907072
155    ; CHECK: G_STORE [[C]](s64), [[COPY]](p0) :: (store 8 into %ir.dst, align 1)
156    ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
157    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C1]](s64)
158    ; CHECK: G_STORE [[C]](s64), [[PTR_ADD]](p0) :: (store 8 into %ir.dst + 8, align 1)
159    ; CHECK: RET_ReallyLR
160    %0:_(p0) = COPY $x0
161    %1:_(s8) = G_CONSTANT i8 64
162    %2:_(s64) = G_CONSTANT i64 16
163    G_MEMSET %0(p0), %1(s8), %2(s64), 1 :: (store 1 into %ir.dst)
164    RET_ReallyLR
165
166...
167---
168name:            test_ms_vector
169alignment:       4
170tracksRegLiveness: true
171body:             |
172  bb.1.entry:
173    liveins: $w1, $x0
174
175    ; CHECK-LABEL: name: test_ms_vector
176    ; CHECK: liveins: $w1, $x0
177    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
178    ; CHECK: [[COPY1:%[0-9]+]]:_(s32) = COPY $w1
179    ; CHECK: [[TRUNC:%[0-9]+]]:_(s8) = G_TRUNC [[COPY1]](s32)
180    ; CHECK: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[TRUNC]](s8)
181    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 72340172838076673
182    ; CHECK: [[MUL:%[0-9]+]]:_(s64) = G_MUL [[ZEXT]], [[C]]
183    ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[MUL]](s64), [[MUL]](s64)
184    ; CHECK: G_STORE [[BUILD_VECTOR]](<2 x s64>), [[COPY]](p0) :: (store 16 into %ir.dst, align 1)
185    ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
186    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C1]](s64)
187    ; CHECK: G_STORE [[BUILD_VECTOR]](<2 x s64>), [[PTR_ADD]](p0) :: (store 16 into %ir.dst + 16, align 1)
188    ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
189    ; CHECK: [[PTR_ADD1:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C2]](s64)
190    ; CHECK: G_STORE [[BUILD_VECTOR]](<2 x s64>), [[PTR_ADD1]](p0) :: (store 16 into %ir.dst + 32, align 1)
191    ; CHECK: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 44
192    ; CHECK: [[PTR_ADD2:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C3]](s64)
193    ; CHECK: G_STORE [[BUILD_VECTOR]](<2 x s64>), [[PTR_ADD2]](p0) :: (store 16 into %ir.dst + 44, align 1)
194    ; CHECK: RET_ReallyLR
195    %0:_(p0) = COPY $x0
196    %1:_(s32) = COPY $w1
197    %3:_(s64) = G_CONSTANT i64 60
198    %2:_(s8) = G_TRUNC %1(s32)
199    G_MEMSET %0(p0), %2(s8), %3(s64), 1 :: (store 1 into %ir.dst)
200    RET_ReallyLR
201
202...
203---
204name:            test_ms4_const_both_unaligned
205alignment:       4
206tracksRegLiveness: true
207body:             |
208  bb.1.entry:
209    liveins: $x0
210
211    ; CHECK-LABEL: name: test_ms4_const_both_unaligned
212    ; CHECK: liveins: $x0
213    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
214    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4629771061636907072
215    ; CHECK: G_STORE [[C]](s64), [[COPY]](p0) :: (store 8 into %ir.dst, align 1)
216    ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
217    ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C1]](s64)
218    ; CHECK: G_STORE [[C]](s64), [[PTR_ADD]](p0) :: (store 8 into %ir.dst + 8, align 1)
219    ; CHECK: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[C]](s64)
220    ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 16
221    ; CHECK: [[PTR_ADD1:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C2]](s64)
222    ; CHECK: G_STORE [[TRUNC]](s16), [[PTR_ADD1]](p0) :: (store 2 into %ir.dst + 16, align 1)
223    ; CHECK: RET_ReallyLR
224    %0:_(p0) = COPY $x0
225    %1:_(s8) = G_CONSTANT i8 64
226    %2:_(s64) = G_CONSTANT i64 18
227    G_MEMSET %0(p0), %1(s8), %2(s64), 1 :: (store 1 into %ir.dst)
228    RET_ReallyLR
229
230...
231