• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=i686-unknown -mattr=+sse2 | FileCheck %s --check-prefix=X86-SSE
3; RUN: llc < %s -mtriple=i686-unknown -mattr=+avx | FileCheck %s --check-prefix=X86-AVX
4; RUN: llc < %s -mtriple=i686-unknown -mattr=+avx512f,+avx512vl | FileCheck %s --check-prefix=X86-AVX512F
5; RUN: llc < %s -mtriple=i686-unknown -mattr=+avx512f,+avx512dq,+avx512vl | FileCheck %s --check-prefix=X86-AVX512DQ
6; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+sse2 | FileCheck %s --check-prefix=X64-SSE
7; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+avx | FileCheck %s --check-prefix=X64-AVX
8; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+avx512f,+avx512vl | FileCheck %s --check-prefix=X64-AVX512F
9; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+avx512f,+avx512dq,+avx512vl | FileCheck %s --check-prefix=X64-AVX512DQ
10
11;PR29078
12
13define <2 x double> @mask_sitofp_2i64_2f64(<2 x i64> %a) nounwind {
14; X86-SSE-LABEL: mask_sitofp_2i64_2f64:
15; X86-SSE:       # %bb.0:
16; X86-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
17; X86-SSE-NEXT:    pand {{\.LCPI.*}}, %xmm0
18; X86-SSE-NEXT:    cvtdq2pd %xmm0, %xmm0
19; X86-SSE-NEXT:    retl
20;
21; X86-AVX-LABEL: mask_sitofp_2i64_2f64:
22; X86-AVX:       # %bb.0:
23; X86-AVX-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
24; X86-AVX-NEXT:    vcvtdq2pd %xmm0, %xmm0
25; X86-AVX-NEXT:    retl
26;
27; X86-AVX512F-LABEL: mask_sitofp_2i64_2f64:
28; X86-AVX512F:       # %bb.0:
29; X86-AVX512F-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
30; X86-AVX512F-NEXT:    vcvtdq2pd %xmm0, %xmm0
31; X86-AVX512F-NEXT:    retl
32;
33; X86-AVX512DQ-LABEL: mask_sitofp_2i64_2f64:
34; X86-AVX512DQ:       # %bb.0:
35; X86-AVX512DQ-NEXT:    vandps {{\.LCPI.*}}, %xmm0, %xmm0
36; X86-AVX512DQ-NEXT:    vcvtqq2pd %xmm0, %xmm0
37; X86-AVX512DQ-NEXT:    retl
38;
39; X64-SSE-LABEL: mask_sitofp_2i64_2f64:
40; X64-SSE:       # %bb.0:
41; X64-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
42; X64-SSE-NEXT:    pand {{.*}}(%rip), %xmm0
43; X64-SSE-NEXT:    cvtdq2pd %xmm0, %xmm0
44; X64-SSE-NEXT:    retq
45;
46; X64-AVX-LABEL: mask_sitofp_2i64_2f64:
47; X64-AVX:       # %bb.0:
48; X64-AVX-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
49; X64-AVX-NEXT:    vcvtdq2pd %xmm0, %xmm0
50; X64-AVX-NEXT:    retq
51;
52; X64-AVX512F-LABEL: mask_sitofp_2i64_2f64:
53; X64-AVX512F:       # %bb.0:
54; X64-AVX512F-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
55; X64-AVX512F-NEXT:    vcvtdq2pd %xmm0, %xmm0
56; X64-AVX512F-NEXT:    retq
57;
58; X64-AVX512DQ-LABEL: mask_sitofp_2i64_2f64:
59; X64-AVX512DQ:       # %bb.0:
60; X64-AVX512DQ-NEXT:    vandps {{.*}}(%rip), %xmm0, %xmm0
61; X64-AVX512DQ-NEXT:    vcvtqq2pd %xmm0, %xmm0
62; X64-AVX512DQ-NEXT:    retq
63  %and = and <2 x i64> %a, <i64 255, i64 65535>
64  %cvt = sitofp <2 x i64> %and to <2 x double>
65  ret <2 x double> %cvt
66}
67
68define <2 x double> @mask_uitofp_2i64_2f64(<2 x i64> %a) nounwind {
69; X86-SSE-LABEL: mask_uitofp_2i64_2f64:
70; X86-SSE:       # %bb.0:
71; X86-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
72; X86-SSE-NEXT:    pand {{\.LCPI.*}}, %xmm0
73; X86-SSE-NEXT:    cvtdq2pd %xmm0, %xmm0
74; X86-SSE-NEXT:    retl
75;
76; X86-AVX-LABEL: mask_uitofp_2i64_2f64:
77; X86-AVX:       # %bb.0:
78; X86-AVX-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
79; X86-AVX-NEXT:    vcvtdq2pd %xmm0, %xmm0
80; X86-AVX-NEXT:    retl
81;
82; X86-AVX512F-LABEL: mask_uitofp_2i64_2f64:
83; X86-AVX512F:       # %bb.0:
84; X86-AVX512F-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
85; X86-AVX512F-NEXT:    vcvtdq2pd %xmm0, %xmm0
86; X86-AVX512F-NEXT:    retl
87;
88; X86-AVX512DQ-LABEL: mask_uitofp_2i64_2f64:
89; X86-AVX512DQ:       # %bb.0:
90; X86-AVX512DQ-NEXT:    vandps {{\.LCPI.*}}, %xmm0, %xmm0
91; X86-AVX512DQ-NEXT:    vcvtqq2pd %xmm0, %xmm0
92; X86-AVX512DQ-NEXT:    retl
93;
94; X64-SSE-LABEL: mask_uitofp_2i64_2f64:
95; X64-SSE:       # %bb.0:
96; X64-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
97; X64-SSE-NEXT:    pand {{.*}}(%rip), %xmm0
98; X64-SSE-NEXT:    cvtdq2pd %xmm0, %xmm0
99; X64-SSE-NEXT:    retq
100;
101; X64-AVX-LABEL: mask_uitofp_2i64_2f64:
102; X64-AVX:       # %bb.0:
103; X64-AVX-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
104; X64-AVX-NEXT:    vcvtdq2pd %xmm0, %xmm0
105; X64-AVX-NEXT:    retq
106;
107; X64-AVX512F-LABEL: mask_uitofp_2i64_2f64:
108; X64-AVX512F:       # %bb.0:
109; X64-AVX512F-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[8,9],zero,zero,xmm0[u,u,u,u,u,u,u,u]
110; X64-AVX512F-NEXT:    vcvtdq2pd %xmm0, %xmm0
111; X64-AVX512F-NEXT:    retq
112;
113; X64-AVX512DQ-LABEL: mask_uitofp_2i64_2f64:
114; X64-AVX512DQ:       # %bb.0:
115; X64-AVX512DQ-NEXT:    vandps {{.*}}(%rip), %xmm0, %xmm0
116; X64-AVX512DQ-NEXT:    vcvtqq2pd %xmm0, %xmm0
117; X64-AVX512DQ-NEXT:    retq
118  %and = and <2 x i64> %a, <i64 255, i64 65535>
119  %cvt = uitofp <2 x i64> %and to <2 x double>
120  ret <2 x double> %cvt
121}
122
123define <4 x float> @mask_sitofp_4i64_4f32(<4 x i64> %a) nounwind {
124; X86-SSE-LABEL: mask_sitofp_4i64_4f32:
125; X86-SSE:       # %bb.0:
126; X86-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
127; X86-SSE-NEXT:    andps {{\.LCPI.*}}, %xmm0
128; X86-SSE-NEXT:    cvtdq2ps %xmm0, %xmm0
129; X86-SSE-NEXT:    retl
130;
131; X86-AVX-LABEL: mask_sitofp_4i64_4f32:
132; X86-AVX:       # %bb.0:
133; X86-AVX-NEXT:    vextractf128 $1, %ymm0, %xmm1
134; X86-AVX-NEXT:    vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
135; X86-AVX-NEXT:    vandps {{\.LCPI.*}}, %xmm0, %xmm0
136; X86-AVX-NEXT:    vcvtdq2ps %xmm0, %xmm0
137; X86-AVX-NEXT:    vzeroupper
138; X86-AVX-NEXT:    retl
139;
140; X86-AVX512F-LABEL: mask_sitofp_4i64_4f32:
141; X86-AVX512F:       # %bb.0:
142; X86-AVX512F-NEXT:    vpmovqd %ymm0, %xmm0
143; X86-AVX512F-NEXT:    vpand {{\.LCPI.*}}, %xmm0, %xmm0
144; X86-AVX512F-NEXT:    vcvtdq2ps %xmm0, %xmm0
145; X86-AVX512F-NEXT:    vzeroupper
146; X86-AVX512F-NEXT:    retl
147;
148; X86-AVX512DQ-LABEL: mask_sitofp_4i64_4f32:
149; X86-AVX512DQ:       # %bb.0:
150; X86-AVX512DQ-NEXT:    vandps {{\.LCPI.*}}, %ymm0, %ymm0
151; X86-AVX512DQ-NEXT:    vcvtqq2ps %ymm0, %xmm0
152; X86-AVX512DQ-NEXT:    vzeroupper
153; X86-AVX512DQ-NEXT:    retl
154;
155; X64-SSE-LABEL: mask_sitofp_4i64_4f32:
156; X64-SSE:       # %bb.0:
157; X64-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
158; X64-SSE-NEXT:    andps {{.*}}(%rip), %xmm0
159; X64-SSE-NEXT:    cvtdq2ps %xmm0, %xmm0
160; X64-SSE-NEXT:    retq
161;
162; X64-AVX-LABEL: mask_sitofp_4i64_4f32:
163; X64-AVX:       # %bb.0:
164; X64-AVX-NEXT:    vextractf128 $1, %ymm0, %xmm1
165; X64-AVX-NEXT:    vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
166; X64-AVX-NEXT:    vandps {{.*}}(%rip), %xmm0, %xmm0
167; X64-AVX-NEXT:    vcvtdq2ps %xmm0, %xmm0
168; X64-AVX-NEXT:    vzeroupper
169; X64-AVX-NEXT:    retq
170;
171; X64-AVX512F-LABEL: mask_sitofp_4i64_4f32:
172; X64-AVX512F:       # %bb.0:
173; X64-AVX512F-NEXT:    vpmovqd %ymm0, %xmm0
174; X64-AVX512F-NEXT:    vpand {{.*}}(%rip), %xmm0, %xmm0
175; X64-AVX512F-NEXT:    vcvtdq2ps %xmm0, %xmm0
176; X64-AVX512F-NEXT:    vzeroupper
177; X64-AVX512F-NEXT:    retq
178;
179; X64-AVX512DQ-LABEL: mask_sitofp_4i64_4f32:
180; X64-AVX512DQ:       # %bb.0:
181; X64-AVX512DQ-NEXT:    vandps {{.*}}(%rip), %ymm0, %ymm0
182; X64-AVX512DQ-NEXT:    vcvtqq2ps %ymm0, %xmm0
183; X64-AVX512DQ-NEXT:    vzeroupper
184; X64-AVX512DQ-NEXT:    retq
185  %and = and <4 x i64> %a, <i64 127, i64 255, i64 4095, i64 65535>
186  %cvt = sitofp <4 x i64> %and to <4 x float>
187  ret <4 x float> %cvt
188}
189
190define <4 x float> @mask_uitofp_4i64_4f32(<4 x i64> %a) nounwind {
191; X86-SSE-LABEL: mask_uitofp_4i64_4f32:
192; X86-SSE:       # %bb.0:
193; X86-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
194; X86-SSE-NEXT:    andps {{\.LCPI.*}}, %xmm0
195; X86-SSE-NEXT:    cvtdq2ps %xmm0, %xmm0
196; X86-SSE-NEXT:    retl
197;
198; X86-AVX-LABEL: mask_uitofp_4i64_4f32:
199; X86-AVX:       # %bb.0:
200; X86-AVX-NEXT:    vextractf128 $1, %ymm0, %xmm1
201; X86-AVX-NEXT:    vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
202; X86-AVX-NEXT:    vandps {{\.LCPI.*}}, %xmm0, %xmm0
203; X86-AVX-NEXT:    vcvtdq2ps %xmm0, %xmm0
204; X86-AVX-NEXT:    vzeroupper
205; X86-AVX-NEXT:    retl
206;
207; X86-AVX512F-LABEL: mask_uitofp_4i64_4f32:
208; X86-AVX512F:       # %bb.0:
209; X86-AVX512F-NEXT:    vpmovqd %ymm0, %xmm0
210; X86-AVX512F-NEXT:    vpand {{\.LCPI.*}}, %xmm0, %xmm0
211; X86-AVX512F-NEXT:    vcvtdq2ps %xmm0, %xmm0
212; X86-AVX512F-NEXT:    vzeroupper
213; X86-AVX512F-NEXT:    retl
214;
215; X86-AVX512DQ-LABEL: mask_uitofp_4i64_4f32:
216; X86-AVX512DQ:       # %bb.0:
217; X86-AVX512DQ-NEXT:    vandps {{\.LCPI.*}}, %ymm0, %ymm0
218; X86-AVX512DQ-NEXT:    vcvtqq2ps %ymm0, %xmm0
219; X86-AVX512DQ-NEXT:    vzeroupper
220; X86-AVX512DQ-NEXT:    retl
221;
222; X64-SSE-LABEL: mask_uitofp_4i64_4f32:
223; X64-SSE:       # %bb.0:
224; X64-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
225; X64-SSE-NEXT:    andps {{.*}}(%rip), %xmm0
226; X64-SSE-NEXT:    cvtdq2ps %xmm0, %xmm0
227; X64-SSE-NEXT:    retq
228;
229; X64-AVX-LABEL: mask_uitofp_4i64_4f32:
230; X64-AVX:       # %bb.0:
231; X64-AVX-NEXT:    vextractf128 $1, %ymm0, %xmm1
232; X64-AVX-NEXT:    vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
233; X64-AVX-NEXT:    vandps {{.*}}(%rip), %xmm0, %xmm0
234; X64-AVX-NEXT:    vcvtdq2ps %xmm0, %xmm0
235; X64-AVX-NEXT:    vzeroupper
236; X64-AVX-NEXT:    retq
237;
238; X64-AVX512F-LABEL: mask_uitofp_4i64_4f32:
239; X64-AVX512F:       # %bb.0:
240; X64-AVX512F-NEXT:    vpmovqd %ymm0, %xmm0
241; X64-AVX512F-NEXT:    vpand {{.*}}(%rip), %xmm0, %xmm0
242; X64-AVX512F-NEXT:    vcvtdq2ps %xmm0, %xmm0
243; X64-AVX512F-NEXT:    vzeroupper
244; X64-AVX512F-NEXT:    retq
245;
246; X64-AVX512DQ-LABEL: mask_uitofp_4i64_4f32:
247; X64-AVX512DQ:       # %bb.0:
248; X64-AVX512DQ-NEXT:    vandps {{.*}}(%rip), %ymm0, %ymm0
249; X64-AVX512DQ-NEXT:    vcvtqq2ps %ymm0, %xmm0
250; X64-AVX512DQ-NEXT:    vzeroupper
251; X64-AVX512DQ-NEXT:    retq
252  %and = and <4 x i64> %a, <i64 127, i64 255, i64 4095, i64 65535>
253  %cvt = uitofp <4 x i64> %and to <4 x float>
254  ret <4 x float> %cvt
255}
256
257define <2 x double> @clamp_sitofp_2i64_2f64(<2 x i64> %a) nounwind {
258; X86-SSE-LABEL: clamp_sitofp_2i64_2f64:
259; X86-SSE:       # %bb.0:
260; X86-SSE-NEXT:    movdqa {{.*#+}} xmm1 = [2147483648,0,2147483648,0]
261; X86-SSE-NEXT:    movdqa %xmm0, %xmm2
262; X86-SSE-NEXT:    pxor %xmm1, %xmm2
263; X86-SSE-NEXT:    movdqa {{.*#+}} xmm3 = [2147483393,4294967295,2147483393,4294967295]
264; X86-SSE-NEXT:    movdqa %xmm2, %xmm4
265; X86-SSE-NEXT:    pcmpgtd %xmm3, %xmm4
266; X86-SSE-NEXT:    pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2]
267; X86-SSE-NEXT:    pcmpeqd %xmm3, %xmm2
268; X86-SSE-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
269; X86-SSE-NEXT:    pand %xmm5, %xmm2
270; X86-SSE-NEXT:    pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3]
271; X86-SSE-NEXT:    por %xmm2, %xmm3
272; X86-SSE-NEXT:    pand %xmm3, %xmm0
273; X86-SSE-NEXT:    pandn {{\.LCPI.*}}, %xmm3
274; X86-SSE-NEXT:    por %xmm0, %xmm3
275; X86-SSE-NEXT:    pxor %xmm3, %xmm1
276; X86-SSE-NEXT:    movdqa {{.*#+}} xmm0 = [2147483903,0,2147483903,0]
277; X86-SSE-NEXT:    movdqa %xmm0, %xmm2
278; X86-SSE-NEXT:    pcmpgtd %xmm1, %xmm2
279; X86-SSE-NEXT:    pshufd {{.*#+}} xmm4 = xmm2[0,0,2,2]
280; X86-SSE-NEXT:    pcmpeqd %xmm0, %xmm1
281; X86-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3]
282; X86-SSE-NEXT:    pand %xmm4, %xmm0
283; X86-SSE-NEXT:    pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3]
284; X86-SSE-NEXT:    por %xmm0, %xmm1
285; X86-SSE-NEXT:    pand %xmm1, %xmm3
286; X86-SSE-NEXT:    pandn {{\.LCPI.*}}, %xmm1
287; X86-SSE-NEXT:    por %xmm3, %xmm1
288; X86-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3]
289; X86-SSE-NEXT:    cvtdq2pd %xmm0, %xmm0
290; X86-SSE-NEXT:    retl
291;
292; X86-AVX-LABEL: clamp_sitofp_2i64_2f64:
293; X86-AVX:       # %bb.0:
294; X86-AVX-NEXT:    vmovddup {{.*#+}} xmm1 = [18446744073709551361,18446744073709551361]
295; X86-AVX-NEXT:    # xmm1 = mem[0,0]
296; X86-AVX-NEXT:    vpcmpgtq %xmm1, %xmm0, %xmm2
297; X86-AVX-NEXT:    vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
298; X86-AVX-NEXT:    vmovddup {{.*#+}} xmm1 = [255,255]
299; X86-AVX-NEXT:    # xmm1 = mem[0,0]
300; X86-AVX-NEXT:    vpcmpgtq %xmm0, %xmm1, %xmm2
301; X86-AVX-NEXT:    vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
302; X86-AVX-NEXT:    vpermilps {{.*#+}} xmm0 = xmm0[0,2,2,3]
303; X86-AVX-NEXT:    vcvtdq2pd %xmm0, %xmm0
304; X86-AVX-NEXT:    retl
305;
306; X86-AVX512F-LABEL: clamp_sitofp_2i64_2f64:
307; X86-AVX512F:       # %bb.0:
308; X86-AVX512F-NEXT:    vpmaxsq {{\.LCPI.*}}, %xmm0, %xmm0
309; X86-AVX512F-NEXT:    vpminsq {{\.LCPI.*}}, %xmm0, %xmm0
310; X86-AVX512F-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
311; X86-AVX512F-NEXT:    vcvtdq2pd %xmm0, %xmm0
312; X86-AVX512F-NEXT:    retl
313;
314; X86-AVX512DQ-LABEL: clamp_sitofp_2i64_2f64:
315; X86-AVX512DQ:       # %bb.0:
316; X86-AVX512DQ-NEXT:    vpmaxsq {{\.LCPI.*}}, %xmm0, %xmm0
317; X86-AVX512DQ-NEXT:    vpminsq {{\.LCPI.*}}, %xmm0, %xmm0
318; X86-AVX512DQ-NEXT:    vcvtqq2pd %xmm0, %xmm0
319; X86-AVX512DQ-NEXT:    retl
320;
321; X64-SSE-LABEL: clamp_sitofp_2i64_2f64:
322; X64-SSE:       # %bb.0:
323; X64-SSE-NEXT:    movdqa {{.*#+}} xmm1 = [2147483648,2147483648]
324; X64-SSE-NEXT:    movdqa %xmm0, %xmm2
325; X64-SSE-NEXT:    pxor %xmm1, %xmm2
326; X64-SSE-NEXT:    movdqa {{.*#+}} xmm3 = [18446744071562067713,18446744071562067713]
327; X64-SSE-NEXT:    movdqa %xmm2, %xmm4
328; X64-SSE-NEXT:    pcmpgtd %xmm3, %xmm4
329; X64-SSE-NEXT:    pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2]
330; X64-SSE-NEXT:    pcmpeqd %xmm3, %xmm2
331; X64-SSE-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
332; X64-SSE-NEXT:    pand %xmm5, %xmm2
333; X64-SSE-NEXT:    pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3]
334; X64-SSE-NEXT:    por %xmm2, %xmm3
335; X64-SSE-NEXT:    pand %xmm3, %xmm0
336; X64-SSE-NEXT:    pandn {{.*}}(%rip), %xmm3
337; X64-SSE-NEXT:    por %xmm0, %xmm3
338; X64-SSE-NEXT:    pxor %xmm3, %xmm1
339; X64-SSE-NEXT:    movdqa {{.*#+}} xmm0 = [2147483903,2147483903]
340; X64-SSE-NEXT:    movdqa %xmm0, %xmm2
341; X64-SSE-NEXT:    pcmpgtd %xmm1, %xmm2
342; X64-SSE-NEXT:    pshufd {{.*#+}} xmm4 = xmm2[0,0,2,2]
343; X64-SSE-NEXT:    pcmpeqd %xmm0, %xmm1
344; X64-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3]
345; X64-SSE-NEXT:    pand %xmm4, %xmm0
346; X64-SSE-NEXT:    pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3]
347; X64-SSE-NEXT:    por %xmm0, %xmm1
348; X64-SSE-NEXT:    pand %xmm1, %xmm3
349; X64-SSE-NEXT:    pandn {{.*}}(%rip), %xmm1
350; X64-SSE-NEXT:    por %xmm3, %xmm1
351; X64-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm1[0,2,2,3]
352; X64-SSE-NEXT:    cvtdq2pd %xmm0, %xmm0
353; X64-SSE-NEXT:    retq
354;
355; X64-AVX-LABEL: clamp_sitofp_2i64_2f64:
356; X64-AVX:       # %bb.0:
357; X64-AVX-NEXT:    vmovdqa {{.*#+}} xmm1 = [18446744073709551361,18446744073709551361]
358; X64-AVX-NEXT:    vpcmpgtq %xmm1, %xmm0, %xmm2
359; X64-AVX-NEXT:    vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
360; X64-AVX-NEXT:    vmovdqa {{.*#+}} xmm1 = [255,255]
361; X64-AVX-NEXT:    vpcmpgtq %xmm0, %xmm1, %xmm2
362; X64-AVX-NEXT:    vblendvpd %xmm2, %xmm0, %xmm1, %xmm0
363; X64-AVX-NEXT:    vpermilps {{.*#+}} xmm0 = xmm0[0,2,2,3]
364; X64-AVX-NEXT:    vcvtdq2pd %xmm0, %xmm0
365; X64-AVX-NEXT:    retq
366;
367; X64-AVX512F-LABEL: clamp_sitofp_2i64_2f64:
368; X64-AVX512F:       # %bb.0:
369; X64-AVX512F-NEXT:    vpmaxsq {{.*}}(%rip), %xmm0, %xmm0
370; X64-AVX512F-NEXT:    vpminsq {{.*}}(%rip), %xmm0, %xmm0
371; X64-AVX512F-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[0,2,2,3]
372; X64-AVX512F-NEXT:    vcvtdq2pd %xmm0, %xmm0
373; X64-AVX512F-NEXT:    retq
374;
375; X64-AVX512DQ-LABEL: clamp_sitofp_2i64_2f64:
376; X64-AVX512DQ:       # %bb.0:
377; X64-AVX512DQ-NEXT:    vpmaxsq {{.*}}(%rip), %xmm0, %xmm0
378; X64-AVX512DQ-NEXT:    vpminsq {{.*}}(%rip), %xmm0, %xmm0
379; X64-AVX512DQ-NEXT:    vcvtqq2pd %xmm0, %xmm0
380; X64-AVX512DQ-NEXT:    retq
381  %clo = icmp slt <2 x i64> %a, <i64 -255, i64 -255>
382  %lo = select <2 x i1> %clo, <2 x i64> <i64 -255, i64 -255>, <2 x i64> %a
383  %chi = icmp sgt <2 x i64> %lo, <i64 255, i64 255>
384  %hi = select <2 x i1> %chi, <2 x i64> <i64 255, i64 255>, <2 x i64> %lo
385  %cvt = sitofp <2 x i64> %hi to <2 x double>
386  ret <2 x double> %cvt
387}
388