• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // RUN: %clang_cc1 -triple arm64-none-linux-gnu -target-feature +neon \
2 // RUN:  -emit-llvm -o - %s | opt -S -mem2reg | FileCheck %s
3 
4 
5 #include <arm_neon.h>
6 
7 // CHECK-LABEL: define float @test_vdups_lane_f32(<2 x float> %a) #0 {
8 // CHECK:   [[TMP0:%.*]] = bitcast <2 x float> %a to <8 x i8>
9 // CHECK:   [[TMP1:%.*]] = bitcast <8 x i8> [[TMP0]] to <2 x float>
10 // CHECK:   [[VDUPS_LANE:%.*]] = extractelement <2 x float> [[TMP1]], i32 1
11 // CHECK:   ret float [[VDUPS_LANE]]
test_vdups_lane_f32(float32x2_t a)12 float32_t test_vdups_lane_f32(float32x2_t a) {
13   return vdups_lane_f32(a, 1);
14 }
15 
16 
17 // CHECK-LABEL: define double @test_vdupd_lane_f64(<1 x double> %a) #0 {
18 // CHECK:   [[TMP0:%.*]] = bitcast <1 x double> %a to <8 x i8>
19 // CHECK:   [[TMP1:%.*]] = bitcast <8 x i8> [[TMP0]] to <1 x double>
20 // CHECK:   [[VDUPD_LANE:%.*]] = extractelement <1 x double> [[TMP1]], i32 0
21 // CHECK:   ret double [[VDUPD_LANE]]
test_vdupd_lane_f64(float64x1_t a)22 float64_t test_vdupd_lane_f64(float64x1_t a) {
23   return vdupd_lane_f64(a, 0);
24 }
25 
26 
27 // CHECK-LABEL: define float @test_vdups_laneq_f32(<4 x float> %a) #0 {
28 // CHECK:   [[TMP0:%.*]] = bitcast <4 x float> %a to <16 x i8>
29 // CHECK:   [[TMP1:%.*]] = bitcast <16 x i8> [[TMP0]] to <4 x float>
30 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <4 x float> [[TMP1]], i32 3
31 // CHECK:   ret float [[VGETQ_LANE]]
test_vdups_laneq_f32(float32x4_t a)32 float32_t test_vdups_laneq_f32(float32x4_t a) {
33   return vdups_laneq_f32(a, 3);
34 }
35 
36 
37 // CHECK-LABEL: define double @test_vdupd_laneq_f64(<2 x double> %a) #0 {
38 // CHECK:   [[TMP0:%.*]] = bitcast <2 x double> %a to <16 x i8>
39 // CHECK:   [[TMP1:%.*]] = bitcast <16 x i8> [[TMP0]] to <2 x double>
40 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <2 x double> [[TMP1]], i32 1
41 // CHECK:   ret double [[VGETQ_LANE]]
test_vdupd_laneq_f64(float64x2_t a)42 float64_t test_vdupd_laneq_f64(float64x2_t a) {
43   return vdupd_laneq_f64(a, 1);
44 }
45 
46 
47 // CHECK-LABEL: define i8 @test_vdupb_lane_s8(<8 x i8> %a) #0 {
48 // CHECK:   [[VGET_LANE:%.*]] = extractelement <8 x i8> %a, i32 7
49 // CHECK:   ret i8 [[VGET_LANE]]
test_vdupb_lane_s8(int8x8_t a)50 int8_t test_vdupb_lane_s8(int8x8_t a) {
51   return vdupb_lane_s8(a, 7);
52 }
53 
54 
55 // CHECK-LABEL: define i16 @test_vduph_lane_s16(<4 x i16> %a) #0 {
56 // CHECK:   [[TMP0:%.*]] = bitcast <4 x i16> %a to <8 x i8>
57 // CHECK:   [[TMP1:%.*]] = bitcast <8 x i8> [[TMP0]] to <4 x i16>
58 // CHECK:   [[VGET_LANE:%.*]] = extractelement <4 x i16> [[TMP1]], i32 3
59 // CHECK:   ret i16 [[VGET_LANE]]
test_vduph_lane_s16(int16x4_t a)60 int16_t test_vduph_lane_s16(int16x4_t a) {
61   return vduph_lane_s16(a, 3);
62 }
63 
64 
65 // CHECK-LABEL: define i32 @test_vdups_lane_s32(<2 x i32> %a) #0 {
66 // CHECK:   [[TMP0:%.*]] = bitcast <2 x i32> %a to <8 x i8>
67 // CHECK:   [[TMP1:%.*]] = bitcast <8 x i8> [[TMP0]] to <2 x i32>
68 // CHECK:   [[VGET_LANE:%.*]] = extractelement <2 x i32> [[TMP1]], i32 1
69 // CHECK:   ret i32 [[VGET_LANE]]
test_vdups_lane_s32(int32x2_t a)70 int32_t test_vdups_lane_s32(int32x2_t a) {
71   return vdups_lane_s32(a, 1);
72 }
73 
74 
75 // CHECK-LABEL: define i64 @test_vdupd_lane_s64(<1 x i64> %a) #0 {
76 // CHECK:   [[TMP0:%.*]] = bitcast <1 x i64> %a to <8 x i8>
77 // CHECK:   [[TMP1:%.*]] = bitcast <8 x i8> [[TMP0]] to <1 x i64>
78 // CHECK:   [[VGET_LANE:%.*]] = extractelement <1 x i64> [[TMP1]], i32 0
79 // CHECK:   ret i64 [[VGET_LANE]]
test_vdupd_lane_s64(int64x1_t a)80 int64_t test_vdupd_lane_s64(int64x1_t a) {
81   return vdupd_lane_s64(a, 0);
82 }
83 
84 
85 // CHECK-LABEL: define i8 @test_vdupb_lane_u8(<8 x i8> %a) #0 {
86 // CHECK:   [[VGET_LANE:%.*]] = extractelement <8 x i8> %a, i32 7
87 // CHECK:   ret i8 [[VGET_LANE]]
test_vdupb_lane_u8(uint8x8_t a)88 uint8_t test_vdupb_lane_u8(uint8x8_t a) {
89   return vdupb_lane_u8(a, 7);
90 }
91 
92 
93 // CHECK-LABEL: define i16 @test_vduph_lane_u16(<4 x i16> %a) #0 {
94 // CHECK:   [[TMP0:%.*]] = bitcast <4 x i16> %a to <8 x i8>
95 // CHECK:   [[TMP1:%.*]] = bitcast <8 x i8> [[TMP0]] to <4 x i16>
96 // CHECK:   [[VGET_LANE:%.*]] = extractelement <4 x i16> [[TMP1]], i32 3
97 // CHECK:   ret i16 [[VGET_LANE]]
test_vduph_lane_u16(uint16x4_t a)98 uint16_t test_vduph_lane_u16(uint16x4_t a) {
99   return vduph_lane_u16(a, 3);
100 }
101 
102 
103 // CHECK-LABEL: define i32 @test_vdups_lane_u32(<2 x i32> %a) #0 {
104 // CHECK:   [[TMP0:%.*]] = bitcast <2 x i32> %a to <8 x i8>
105 // CHECK:   [[TMP1:%.*]] = bitcast <8 x i8> [[TMP0]] to <2 x i32>
106 // CHECK:   [[VGET_LANE:%.*]] = extractelement <2 x i32> [[TMP1]], i32 1
107 // CHECK:   ret i32 [[VGET_LANE]]
test_vdups_lane_u32(uint32x2_t a)108 uint32_t test_vdups_lane_u32(uint32x2_t a) {
109   return vdups_lane_u32(a, 1);
110 }
111 
112 
113 // CHECK-LABEL: define i64 @test_vdupd_lane_u64(<1 x i64> %a) #0 {
114 // CHECK:   [[TMP0:%.*]] = bitcast <1 x i64> %a to <8 x i8>
115 // CHECK:   [[TMP1:%.*]] = bitcast <8 x i8> [[TMP0]] to <1 x i64>
116 // CHECK:   [[VGET_LANE:%.*]] = extractelement <1 x i64> [[TMP1]], i32 0
117 // CHECK:   ret i64 [[VGET_LANE]]
test_vdupd_lane_u64(uint64x1_t a)118 uint64_t test_vdupd_lane_u64(uint64x1_t a) {
119   return vdupd_lane_u64(a, 0);
120 }
121 
122 // CHECK-LABEL: define i8 @test_vdupb_laneq_s8(<16 x i8> %a) #0 {
123 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <16 x i8> %a, i32 15
124 // CHECK:   ret i8 [[VGETQ_LANE]]
test_vdupb_laneq_s8(int8x16_t a)125 int8_t test_vdupb_laneq_s8(int8x16_t a) {
126   return vdupb_laneq_s8(a, 15);
127 }
128 
129 
130 // CHECK-LABEL: define i16 @test_vduph_laneq_s16(<8 x i16> %a) #0 {
131 // CHECK:   [[TMP0:%.*]] = bitcast <8 x i16> %a to <16 x i8>
132 // CHECK:   [[TMP1:%.*]] = bitcast <16 x i8> [[TMP0]] to <8 x i16>
133 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <8 x i16> [[TMP1]], i32 7
134 // CHECK:   ret i16 [[VGETQ_LANE]]
test_vduph_laneq_s16(int16x8_t a)135 int16_t test_vduph_laneq_s16(int16x8_t a) {
136   return vduph_laneq_s16(a, 7);
137 }
138 
139 
140 // CHECK-LABEL: define i32 @test_vdups_laneq_s32(<4 x i32> %a) #0 {
141 // CHECK:   [[TMP0:%.*]] = bitcast <4 x i32> %a to <16 x i8>
142 // CHECK:   [[TMP1:%.*]] = bitcast <16 x i8> [[TMP0]] to <4 x i32>
143 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <4 x i32> [[TMP1]], i32 3
144 // CHECK:   ret i32 [[VGETQ_LANE]]
test_vdups_laneq_s32(int32x4_t a)145 int32_t test_vdups_laneq_s32(int32x4_t a) {
146   return vdups_laneq_s32(a, 3);
147 }
148 
149 
150 // CHECK-LABEL: define i64 @test_vdupd_laneq_s64(<2 x i64> %a) #0 {
151 // CHECK:   [[TMP0:%.*]] = bitcast <2 x i64> %a to <16 x i8>
152 // CHECK:   [[TMP1:%.*]] = bitcast <16 x i8> [[TMP0]] to <2 x i64>
153 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <2 x i64> [[TMP1]], i32 1
154 // CHECK:   ret i64 [[VGETQ_LANE]]
test_vdupd_laneq_s64(int64x2_t a)155 int64_t test_vdupd_laneq_s64(int64x2_t a) {
156   return vdupd_laneq_s64(a, 1);
157 }
158 
159 
160 // CHECK-LABEL: define i8 @test_vdupb_laneq_u8(<16 x i8> %a) #0 {
161 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <16 x i8> %a, i32 15
162 // CHECK:   ret i8 [[VGETQ_LANE]]
test_vdupb_laneq_u8(uint8x16_t a)163 uint8_t test_vdupb_laneq_u8(uint8x16_t a) {
164   return vdupb_laneq_u8(a, 15);
165 }
166 
167 
168 // CHECK-LABEL: define i16 @test_vduph_laneq_u16(<8 x i16> %a) #0 {
169 // CHECK:   [[TMP0:%.*]] = bitcast <8 x i16> %a to <16 x i8>
170 // CHECK:   [[TMP1:%.*]] = bitcast <16 x i8> [[TMP0]] to <8 x i16>
171 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <8 x i16> [[TMP1]], i32 7
172 // CHECK:   ret i16 [[VGETQ_LANE]]
test_vduph_laneq_u16(uint16x8_t a)173 uint16_t test_vduph_laneq_u16(uint16x8_t a) {
174   return vduph_laneq_u16(a, 7);
175 }
176 
177 
178 // CHECK-LABEL: define i32 @test_vdups_laneq_u32(<4 x i32> %a) #0 {
179 // CHECK:   [[TMP0:%.*]] = bitcast <4 x i32> %a to <16 x i8>
180 // CHECK:   [[TMP1:%.*]] = bitcast <16 x i8> [[TMP0]] to <4 x i32>
181 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <4 x i32> [[TMP1]], i32 3
182 // CHECK:   ret i32 [[VGETQ_LANE]]
test_vdups_laneq_u32(uint32x4_t a)183 uint32_t test_vdups_laneq_u32(uint32x4_t a) {
184   return vdups_laneq_u32(a, 3);
185 }
186 
187 
188 // CHECK-LABEL: define i64 @test_vdupd_laneq_u64(<2 x i64> %a) #0 {
189 // CHECK:   [[TMP0:%.*]] = bitcast <2 x i64> %a to <16 x i8>
190 // CHECK:   [[TMP1:%.*]] = bitcast <16 x i8> [[TMP0]] to <2 x i64>
191 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <2 x i64> [[TMP1]], i32 1
192 // CHECK:   ret i64 [[VGETQ_LANE]]
test_vdupd_laneq_u64(uint64x2_t a)193 uint64_t test_vdupd_laneq_u64(uint64x2_t a) {
194   return vdupd_laneq_u64(a, 1);
195 }
196 
197 // CHECK-LABEL: define i8 @test_vdupb_lane_p8(<8 x i8> %a) #0 {
198 // CHECK:   [[VGET_LANE:%.*]] = extractelement <8 x i8> %a, i32 7
199 // CHECK:   ret i8 [[VGET_LANE]]
test_vdupb_lane_p8(poly8x8_t a)200 poly8_t test_vdupb_lane_p8(poly8x8_t a) {
201   return vdupb_lane_p8(a, 7);
202 }
203 
204 // CHECK-LABEL: define i16 @test_vduph_lane_p16(<4 x i16> %a) #0 {
205 // CHECK:   [[TMP0:%.*]] = bitcast <4 x i16> %a to <8 x i8>
206 // CHECK:   [[TMP1:%.*]] = bitcast <8 x i8> [[TMP0]] to <4 x i16>
207 // CHECK:   [[VGET_LANE:%.*]] = extractelement <4 x i16> [[TMP1]], i32 3
208 // CHECK:   ret i16 [[VGET_LANE]]
test_vduph_lane_p16(poly16x4_t a)209 poly16_t test_vduph_lane_p16(poly16x4_t a) {
210   return vduph_lane_p16(a, 3);
211 }
212 
213 // CHECK-LABEL: define i8 @test_vdupb_laneq_p8(<16 x i8> %a) #0 {
214 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <16 x i8> %a, i32 15
215 // CHECK:   ret i8 [[VGETQ_LANE]]
test_vdupb_laneq_p8(poly8x16_t a)216 poly8_t test_vdupb_laneq_p8(poly8x16_t a) {
217   return vdupb_laneq_p8(a, 15);
218 }
219 
220 // CHECK-LABEL: define i16 @test_vduph_laneq_p16(<8 x i16> %a) #0 {
221 // CHECK:   [[TMP0:%.*]] = bitcast <8 x i16> %a to <16 x i8>
222 // CHECK:   [[TMP1:%.*]] = bitcast <16 x i8> [[TMP0]] to <8 x i16>
223 // CHECK:   [[VGETQ_LANE:%.*]] = extractelement <8 x i16> [[TMP1]], i32 7
224 // CHECK:   ret i16 [[VGETQ_LANE]]
test_vduph_laneq_p16(poly16x8_t a)225 poly16_t test_vduph_laneq_p16(poly16x8_t a) {
226   return vduph_laneq_p16(a, 7);
227 }
228 
229