• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1//===- TableGen'erated file -------------------------------------*- C++ -*-===//
2//
3// "Fast" Instruction Selector for the X86 target
4//
5// Automatically generated file, do not edit!
6//
7//===----------------------------------------------------------------------===//
8
9
10// FastEmit Immediate Predicate functions.
11static bool Predicate_i64immSExt32(int64_t Imm) {
12 return Imm == (int32_t)Imm;
13}
14
15
16// FastEmit functions for ISD::ANY_EXTEND.
17
18unsigned FastEmit_ISD_ANY_EXTEND_MVT_i8_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
19  return FastEmitInst_r(X86::MOVZX32rr8, X86::GR32RegisterClass, Op0, Op0IsKill);
20}
21
22unsigned FastEmit_ISD_ANY_EXTEND_MVT_i8_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
23  return FastEmitInst_r(X86::MOVZX64rr8, X86::GR64RegisterClass, Op0, Op0IsKill);
24}
25
26unsigned FastEmit_ISD_ANY_EXTEND_MVT_i8_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
27switch (RetVT.SimpleTy) {
28  case MVT::i32: return FastEmit_ISD_ANY_EXTEND_MVT_i8_MVT_i32_r(Op0, Op0IsKill);
29  case MVT::i64: return FastEmit_ISD_ANY_EXTEND_MVT_i8_MVT_i64_r(Op0, Op0IsKill);
30  default: return 0;
31}
32}
33
34unsigned FastEmit_ISD_ANY_EXTEND_MVT_i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
35  if (RetVT.SimpleTy != MVT::i64)
36    return 0;
37  return FastEmitInst_r(X86::MOVZX64rr16, X86::GR64RegisterClass, Op0, Op0IsKill);
38}
39
40unsigned FastEmit_ISD_ANY_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
41  switch (VT.SimpleTy) {
42  case MVT::i8: return FastEmit_ISD_ANY_EXTEND_MVT_i8_r(RetVT, Op0, Op0IsKill);
43  case MVT::i16: return FastEmit_ISD_ANY_EXTEND_MVT_i16_r(RetVT, Op0, Op0IsKill);
44  default: return 0;
45  }
46}
47
48// FastEmit functions for ISD::BITCAST.
49
50unsigned FastEmit_ISD_BITCAST_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
51  if (RetVT.SimpleTy != MVT::f32)
52    return 0;
53  if ((Subtarget->hasAVX())) {
54    return FastEmitInst_r(X86::VMOVDI2SSrr, X86::FR32RegisterClass, Op0, Op0IsKill);
55  }
56  if ((Subtarget->hasSSE2())) {
57    return FastEmitInst_r(X86::MOVDI2SSrr, X86::FR32RegisterClass, Op0, Op0IsKill);
58  }
59  return 0;
60}
61
62unsigned FastEmit_ISD_BITCAST_MVT_i64_MVT_f64_r(unsigned Op0, bool Op0IsKill) {
63  if ((Subtarget->hasAVX())) {
64    return FastEmitInst_r(X86::VMOV64toSDrr, X86::FR64RegisterClass, Op0, Op0IsKill);
65  }
66  if ((Subtarget->hasSSE2())) {
67    return FastEmitInst_r(X86::MOV64toSDrr, X86::FR64RegisterClass, Op0, Op0IsKill);
68  }
69  return 0;
70}
71
72unsigned FastEmit_ISD_BITCAST_MVT_i64_MVT_x86mmx_r(unsigned Op0, bool Op0IsKill) {
73  return FastEmitInst_r(X86::MMX_MOVD64to64rr, X86::VR64RegisterClass, Op0, Op0IsKill);
74  if ((Subtarget->hasMMX())) {
75    return FastEmitInst_r(X86::MMX_MOVD64rrv164, X86::VR64RegisterClass, Op0, Op0IsKill);
76  }
77  return 0;
78}
79
80unsigned FastEmit_ISD_BITCAST_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
81switch (RetVT.SimpleTy) {
82  case MVT::f64: return FastEmit_ISD_BITCAST_MVT_i64_MVT_f64_r(Op0, Op0IsKill);
83  case MVT::x86mmx: return FastEmit_ISD_BITCAST_MVT_i64_MVT_x86mmx_r(Op0, Op0IsKill);
84  default: return 0;
85}
86}
87
88unsigned FastEmit_ISD_BITCAST_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
89  if (RetVT.SimpleTy != MVT::i32)
90    return 0;
91  if ((Subtarget->hasAVX())) {
92    return FastEmitInst_r(X86::VMOVSS2DIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
93  }
94  if ((Subtarget->hasSSE2())) {
95    return FastEmitInst_r(X86::MOVSS2DIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
96  }
97  return 0;
98}
99
100unsigned FastEmit_ISD_BITCAST_MVT_f64_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
101  if ((Subtarget->hasAVX())) {
102    return FastEmitInst_r(X86::VMOVSDto64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
103  }
104  if ((Subtarget->hasSSE2())) {
105    return FastEmitInst_r(X86::MOVSDto64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
106  }
107  return 0;
108}
109
110unsigned FastEmit_ISD_BITCAST_MVT_f64_MVT_x86mmx_r(unsigned Op0, bool Op0IsKill) {
111  return FastEmitInst_r(X86::MMX_MOVFR642Qrr, X86::VR64RegisterClass, Op0, Op0IsKill);
112}
113
114unsigned FastEmit_ISD_BITCAST_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
115switch (RetVT.SimpleTy) {
116  case MVT::i64: return FastEmit_ISD_BITCAST_MVT_f64_MVT_i64_r(Op0, Op0IsKill);
117  case MVT::x86mmx: return FastEmit_ISD_BITCAST_MVT_f64_MVT_x86mmx_r(Op0, Op0IsKill);
118  default: return 0;
119}
120}
121
122unsigned FastEmit_ISD_BITCAST_MVT_x86mmx_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
123  return FastEmitInst_r(X86::MMX_MOVD64from64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
124  if ((Subtarget->hasMMX())) {
125    return FastEmitInst_r(X86::MMX_MOVD64from64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
126  }
127  return 0;
128}
129
130unsigned FastEmit_ISD_BITCAST_MVT_x86mmx_MVT_f64_r(unsigned Op0, bool Op0IsKill) {
131  return FastEmitInst_r(X86::MMX_MOVQ2FR64rr, X86::FR64RegisterClass, Op0, Op0IsKill);
132}
133
134unsigned FastEmit_ISD_BITCAST_MVT_x86mmx_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
135switch (RetVT.SimpleTy) {
136  case MVT::i64: return FastEmit_ISD_BITCAST_MVT_x86mmx_MVT_i64_r(Op0, Op0IsKill);
137  case MVT::f64: return FastEmit_ISD_BITCAST_MVT_x86mmx_MVT_f64_r(Op0, Op0IsKill);
138  default: return 0;
139}
140}
141
142unsigned FastEmit_ISD_BITCAST_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
143  switch (VT.SimpleTy) {
144  case MVT::i32: return FastEmit_ISD_BITCAST_MVT_i32_r(RetVT, Op0, Op0IsKill);
145  case MVT::i64: return FastEmit_ISD_BITCAST_MVT_i64_r(RetVT, Op0, Op0IsKill);
146  case MVT::f32: return FastEmit_ISD_BITCAST_MVT_f32_r(RetVT, Op0, Op0IsKill);
147  case MVT::f64: return FastEmit_ISD_BITCAST_MVT_f64_r(RetVT, Op0, Op0IsKill);
148  case MVT::x86mmx: return FastEmit_ISD_BITCAST_MVT_x86mmx_r(RetVT, Op0, Op0IsKill);
149  default: return 0;
150  }
151}
152
153// FastEmit functions for ISD::BRIND.
154
155unsigned FastEmit_ISD_BRIND_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
156  if (RetVT.SimpleTy != MVT::isVoid)
157    return 0;
158  if ((!Subtarget->is64Bit())) {
159    return FastEmitInst_r(X86::JMP32r, X86::GR32RegisterClass, Op0, Op0IsKill);
160  }
161  return 0;
162}
163
164unsigned FastEmit_ISD_BRIND_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
165  if (RetVT.SimpleTy != MVT::isVoid)
166    return 0;
167  if ((Subtarget->is64Bit())) {
168    return FastEmitInst_r(X86::JMP64r, X86::GR64RegisterClass, Op0, Op0IsKill);
169  }
170  return 0;
171}
172
173unsigned FastEmit_ISD_BRIND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
174  switch (VT.SimpleTy) {
175  case MVT::i32: return FastEmit_ISD_BRIND_MVT_i32_r(RetVT, Op0, Op0IsKill);
176  case MVT::i64: return FastEmit_ISD_BRIND_MVT_i64_r(RetVT, Op0, Op0IsKill);
177  default: return 0;
178  }
179}
180
181// FastEmit functions for ISD::BSWAP.
182
183unsigned FastEmit_ISD_BSWAP_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
184  if (RetVT.SimpleTy != MVT::i32)
185    return 0;
186  return FastEmitInst_r(X86::BSWAP32r, X86::GR32RegisterClass, Op0, Op0IsKill);
187}
188
189unsigned FastEmit_ISD_BSWAP_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
190  if (RetVT.SimpleTy != MVT::i64)
191    return 0;
192  return FastEmitInst_r(X86::BSWAP64r, X86::GR64RegisterClass, Op0, Op0IsKill);
193}
194
195unsigned FastEmit_ISD_BSWAP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
196  switch (VT.SimpleTy) {
197  case MVT::i32: return FastEmit_ISD_BSWAP_MVT_i32_r(RetVT, Op0, Op0IsKill);
198  case MVT::i64: return FastEmit_ISD_BSWAP_MVT_i64_r(RetVT, Op0, Op0IsKill);
199  default: return 0;
200  }
201}
202
203// FastEmit functions for ISD::CTLZ.
204
205unsigned FastEmit_ISD_CTLZ_MVT_i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
206  if (RetVT.SimpleTy != MVT::i16)
207    return 0;
208  if ((Subtarget->hasLZCNT())) {
209    return FastEmitInst_r(X86::LZCNT16rr, X86::GR16RegisterClass, Op0, Op0IsKill);
210  }
211  return 0;
212}
213
214unsigned FastEmit_ISD_CTLZ_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
215  if (RetVT.SimpleTy != MVT::i32)
216    return 0;
217  if ((Subtarget->hasLZCNT())) {
218    return FastEmitInst_r(X86::LZCNT32rr, X86::GR32RegisterClass, Op0, Op0IsKill);
219  }
220  return 0;
221}
222
223unsigned FastEmit_ISD_CTLZ_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
224  if (RetVT.SimpleTy != MVT::i64)
225    return 0;
226  if ((Subtarget->hasLZCNT())) {
227    return FastEmitInst_r(X86::LZCNT64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
228  }
229  return 0;
230}
231
232unsigned FastEmit_ISD_CTLZ_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
233  switch (VT.SimpleTy) {
234  case MVT::i16: return FastEmit_ISD_CTLZ_MVT_i16_r(RetVT, Op0, Op0IsKill);
235  case MVT::i32: return FastEmit_ISD_CTLZ_MVT_i32_r(RetVT, Op0, Op0IsKill);
236  case MVT::i64: return FastEmit_ISD_CTLZ_MVT_i64_r(RetVT, Op0, Op0IsKill);
237  default: return 0;
238  }
239}
240
241// FastEmit functions for ISD::CTPOP.
242
243unsigned FastEmit_ISD_CTPOP_MVT_i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
244  if (RetVT.SimpleTy != MVT::i16)
245    return 0;
246  if ((Subtarget->hasPOPCNT())) {
247    return FastEmitInst_r(X86::POPCNT16rr, X86::GR16RegisterClass, Op0, Op0IsKill);
248  }
249  return 0;
250}
251
252unsigned FastEmit_ISD_CTPOP_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
253  if (RetVT.SimpleTy != MVT::i32)
254    return 0;
255  if ((Subtarget->hasPOPCNT())) {
256    return FastEmitInst_r(X86::POPCNT32rr, X86::GR32RegisterClass, Op0, Op0IsKill);
257  }
258  return 0;
259}
260
261unsigned FastEmit_ISD_CTPOP_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
262  if (RetVT.SimpleTy != MVT::i64)
263    return 0;
264  if ((Subtarget->hasPOPCNT())) {
265    return FastEmitInst_r(X86::POPCNT64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
266  }
267  return 0;
268}
269
270unsigned FastEmit_ISD_CTPOP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
271  switch (VT.SimpleTy) {
272  case MVT::i16: return FastEmit_ISD_CTPOP_MVT_i16_r(RetVT, Op0, Op0IsKill);
273  case MVT::i32: return FastEmit_ISD_CTPOP_MVT_i32_r(RetVT, Op0, Op0IsKill);
274  case MVT::i64: return FastEmit_ISD_CTPOP_MVT_i64_r(RetVT, Op0, Op0IsKill);
275  default: return 0;
276  }
277}
278
279// FastEmit functions for ISD::CTTZ.
280
281unsigned FastEmit_ISD_CTTZ_MVT_i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
282  if (RetVT.SimpleTy != MVT::i16)
283    return 0;
284  if ((Subtarget->hasBMI())) {
285    return FastEmitInst_r(X86::TZCNT16rr, X86::GR16RegisterClass, Op0, Op0IsKill);
286  }
287  return 0;
288}
289
290unsigned FastEmit_ISD_CTTZ_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
291  if (RetVT.SimpleTy != MVT::i32)
292    return 0;
293  if ((Subtarget->hasBMI())) {
294    return FastEmitInst_r(X86::TZCNT32rr, X86::GR32RegisterClass, Op0, Op0IsKill);
295  }
296  return 0;
297}
298
299unsigned FastEmit_ISD_CTTZ_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
300  if (RetVT.SimpleTy != MVT::i64)
301    return 0;
302  if ((Subtarget->hasBMI())) {
303    return FastEmitInst_r(X86::TZCNT64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
304  }
305  return 0;
306}
307
308unsigned FastEmit_ISD_CTTZ_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
309  switch (VT.SimpleTy) {
310  case MVT::i16: return FastEmit_ISD_CTTZ_MVT_i16_r(RetVT, Op0, Op0IsKill);
311  case MVT::i32: return FastEmit_ISD_CTTZ_MVT_i32_r(RetVT, Op0, Op0IsKill);
312  case MVT::i64: return FastEmit_ISD_CTTZ_MVT_i64_r(RetVT, Op0, Op0IsKill);
313  default: return 0;
314  }
315}
316
317// FastEmit functions for ISD::FABS.
318
319unsigned FastEmit_ISD_FABS_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
320  if (RetVT.SimpleTy != MVT::f32)
321    return 0;
322  if ((!Subtarget->hasXMM())) {
323    return FastEmitInst_r(X86::ABS_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill);
324  }
325  return 0;
326}
327
328unsigned FastEmit_ISD_FABS_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
329  if (RetVT.SimpleTy != MVT::f64)
330    return 0;
331  if ((!Subtarget->hasXMMInt())) {
332    return FastEmitInst_r(X86::ABS_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill);
333  }
334  return 0;
335}
336
337unsigned FastEmit_ISD_FABS_MVT_f80_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
338  if (RetVT.SimpleTy != MVT::f80)
339    return 0;
340  return FastEmitInst_r(X86::ABS_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill);
341}
342
343unsigned FastEmit_ISD_FABS_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
344  switch (VT.SimpleTy) {
345  case MVT::f32: return FastEmit_ISD_FABS_MVT_f32_r(RetVT, Op0, Op0IsKill);
346  case MVT::f64: return FastEmit_ISD_FABS_MVT_f64_r(RetVT, Op0, Op0IsKill);
347  case MVT::f80: return FastEmit_ISD_FABS_MVT_f80_r(RetVT, Op0, Op0IsKill);
348  default: return 0;
349  }
350}
351
352// FastEmit functions for ISD::FCOS.
353
354unsigned FastEmit_ISD_FCOS_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
355  if (RetVT.SimpleTy != MVT::f32)
356    return 0;
357  if ((!Subtarget->hasXMM())) {
358    return FastEmitInst_r(X86::COS_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill);
359  }
360  return 0;
361}
362
363unsigned FastEmit_ISD_FCOS_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
364  if (RetVT.SimpleTy != MVT::f64)
365    return 0;
366  if ((!Subtarget->hasXMMInt())) {
367    return FastEmitInst_r(X86::COS_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill);
368  }
369  return 0;
370}
371
372unsigned FastEmit_ISD_FCOS_MVT_f80_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
373  if (RetVT.SimpleTy != MVT::f80)
374    return 0;
375  return FastEmitInst_r(X86::COS_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill);
376}
377
378unsigned FastEmit_ISD_FCOS_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
379  switch (VT.SimpleTy) {
380  case MVT::f32: return FastEmit_ISD_FCOS_MVT_f32_r(RetVT, Op0, Op0IsKill);
381  case MVT::f64: return FastEmit_ISD_FCOS_MVT_f64_r(RetVT, Op0, Op0IsKill);
382  case MVT::f80: return FastEmit_ISD_FCOS_MVT_f80_r(RetVT, Op0, Op0IsKill);
383  default: return 0;
384  }
385}
386
387// FastEmit functions for ISD::FNEG.
388
389unsigned FastEmit_ISD_FNEG_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
390  if (RetVT.SimpleTy != MVT::f32)
391    return 0;
392  if ((!Subtarget->hasXMM())) {
393    return FastEmitInst_r(X86::CHS_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill);
394  }
395  return 0;
396}
397
398unsigned FastEmit_ISD_FNEG_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
399  if (RetVT.SimpleTy != MVT::f64)
400    return 0;
401  if ((!Subtarget->hasXMMInt())) {
402    return FastEmitInst_r(X86::CHS_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill);
403  }
404  return 0;
405}
406
407unsigned FastEmit_ISD_FNEG_MVT_f80_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
408  if (RetVT.SimpleTy != MVT::f80)
409    return 0;
410  return FastEmitInst_r(X86::CHS_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill);
411}
412
413unsigned FastEmit_ISD_FNEG_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
414  switch (VT.SimpleTy) {
415  case MVT::f32: return FastEmit_ISD_FNEG_MVT_f32_r(RetVT, Op0, Op0IsKill);
416  case MVT::f64: return FastEmit_ISD_FNEG_MVT_f64_r(RetVT, Op0, Op0IsKill);
417  case MVT::f80: return FastEmit_ISD_FNEG_MVT_f80_r(RetVT, Op0, Op0IsKill);
418  default: return 0;
419  }
420}
421
422// FastEmit functions for ISD::FP_EXTEND.
423
424unsigned FastEmit_ISD_FP_EXTEND_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
425  if (RetVT.SimpleTy != MVT::f64)
426    return 0;
427  if ((Subtarget->hasSSE2())) {
428    return FastEmitInst_r(X86::CVTSS2SDrr, X86::FR64RegisterClass, Op0, Op0IsKill);
429  }
430  return 0;
431}
432
433unsigned FastEmit_ISD_FP_EXTEND_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
434  if (RetVT.SimpleTy != MVT::v4f64)
435    return 0;
436  return FastEmitInst_r(X86::VCVTPS2PDYrr, X86::VR256RegisterClass, Op0, Op0IsKill);
437}
438
439unsigned FastEmit_ISD_FP_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
440  switch (VT.SimpleTy) {
441  case MVT::f32: return FastEmit_ISD_FP_EXTEND_MVT_f32_r(RetVT, Op0, Op0IsKill);
442  case MVT::v4f32: return FastEmit_ISD_FP_EXTEND_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
443  default: return 0;
444  }
445}
446
447// FastEmit functions for ISD::FP_ROUND.
448
449unsigned FastEmit_ISD_FP_ROUND_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
450  if (RetVT.SimpleTy != MVT::f32)
451    return 0;
452  if ((Subtarget->hasSSE2())) {
453    return FastEmitInst_r(X86::CVTSD2SSrr, X86::FR32RegisterClass, Op0, Op0IsKill);
454  }
455  return 0;
456}
457
458unsigned FastEmit_ISD_FP_ROUND_MVT_v4f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
459  if (RetVT.SimpleTy != MVT::v4f32)
460    return 0;
461  return FastEmitInst_r(X86::VCVTPD2PSYrr, X86::VR128RegisterClass, Op0, Op0IsKill);
462}
463
464unsigned FastEmit_ISD_FP_ROUND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
465  switch (VT.SimpleTy) {
466  case MVT::f64: return FastEmit_ISD_FP_ROUND_MVT_f64_r(RetVT, Op0, Op0IsKill);
467  case MVT::v4f64: return FastEmit_ISD_FP_ROUND_MVT_v4f64_r(RetVT, Op0, Op0IsKill);
468  default: return 0;
469  }
470}
471
472// FastEmit functions for ISD::FP_TO_SINT.
473
474unsigned FastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
475  if ((Subtarget->hasAVX())) {
476    return FastEmitInst_r(X86::VCVTTSS2SIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
477  }
478  if ((Subtarget->hasSSE1())) {
479    return FastEmitInst_r(X86::CVTTSS2SIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
480  }
481  return 0;
482}
483
484unsigned FastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
485  if ((Subtarget->hasAVX())) {
486    return FastEmitInst_r(X86::VCVTTSS2SI64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
487  }
488  if ((Subtarget->hasSSE1())) {
489    return FastEmitInst_r(X86::CVTTSS2SI64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
490  }
491  return 0;
492}
493
494unsigned FastEmit_ISD_FP_TO_SINT_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
495switch (RetVT.SimpleTy) {
496  case MVT::i32: return FastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i32_r(Op0, Op0IsKill);
497  case MVT::i64: return FastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i64_r(Op0, Op0IsKill);
498  default: return 0;
499}
500}
501
502unsigned FastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
503  if ((Subtarget->hasAVX())) {
504    return FastEmitInst_r(X86::VCVTTSD2SIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
505  }
506  if ((Subtarget->hasSSE2())) {
507    return FastEmitInst_r(X86::CVTTSD2SIrr, X86::GR32RegisterClass, Op0, Op0IsKill);
508  }
509  return 0;
510}
511
512unsigned FastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
513  if ((Subtarget->hasAVX())) {
514    return FastEmitInst_r(X86::VCVTTSD2SI64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
515  }
516  if ((Subtarget->hasSSE2())) {
517    return FastEmitInst_r(X86::CVTTSD2SI64rr, X86::GR64RegisterClass, Op0, Op0IsKill);
518  }
519  return 0;
520}
521
522unsigned FastEmit_ISD_FP_TO_SINT_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
523switch (RetVT.SimpleTy) {
524  case MVT::i32: return FastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i32_r(Op0, Op0IsKill);
525  case MVT::i64: return FastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i64_r(Op0, Op0IsKill);
526  default: return 0;
527}
528}
529
530unsigned FastEmit_ISD_FP_TO_SINT_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
531  if (RetVT.SimpleTy != MVT::v4i32)
532    return 0;
533  if ((Subtarget->hasAVX())) {
534    return FastEmitInst_r(X86::VCVTTPS2DQrr, X86::VR128RegisterClass, Op0, Op0IsKill);
535  }
536  if ((Subtarget->hasSSE2())) {
537    return FastEmitInst_r(X86::CVTTPS2DQrr, X86::VR128RegisterClass, Op0, Op0IsKill);
538  }
539  return 0;
540}
541
542unsigned FastEmit_ISD_FP_TO_SINT_MVT_v8f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
543  if (RetVT.SimpleTy != MVT::v8i32)
544    return 0;
545  if ((Subtarget->hasAVX())) {
546    return FastEmitInst_r(X86::VCVTTPS2DQYrr, X86::VR256RegisterClass, Op0, Op0IsKill);
547  }
548  return 0;
549}
550
551unsigned FastEmit_ISD_FP_TO_SINT_MVT_v4f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
552  if (RetVT.SimpleTy != MVT::v4i32)
553    return 0;
554  return FastEmitInst_r(X86::VCVTPD2DQYrr, X86::VR128RegisterClass, Op0, Op0IsKill);
555}
556
557unsigned FastEmit_ISD_FP_TO_SINT_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
558  switch (VT.SimpleTy) {
559  case MVT::f32: return FastEmit_ISD_FP_TO_SINT_MVT_f32_r(RetVT, Op0, Op0IsKill);
560  case MVT::f64: return FastEmit_ISD_FP_TO_SINT_MVT_f64_r(RetVT, Op0, Op0IsKill);
561  case MVT::v4f32: return FastEmit_ISD_FP_TO_SINT_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
562  case MVT::v8f32: return FastEmit_ISD_FP_TO_SINT_MVT_v8f32_r(RetVT, Op0, Op0IsKill);
563  case MVT::v4f64: return FastEmit_ISD_FP_TO_SINT_MVT_v4f64_r(RetVT, Op0, Op0IsKill);
564  default: return 0;
565  }
566}
567
568// FastEmit functions for ISD::FSIN.
569
570unsigned FastEmit_ISD_FSIN_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
571  if (RetVT.SimpleTy != MVT::f32)
572    return 0;
573  if ((!Subtarget->hasXMM())) {
574    return FastEmitInst_r(X86::SIN_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill);
575  }
576  return 0;
577}
578
579unsigned FastEmit_ISD_FSIN_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
580  if (RetVT.SimpleTy != MVT::f64)
581    return 0;
582  if ((!Subtarget->hasXMMInt())) {
583    return FastEmitInst_r(X86::SIN_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill);
584  }
585  return 0;
586}
587
588unsigned FastEmit_ISD_FSIN_MVT_f80_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
589  if (RetVT.SimpleTy != MVT::f80)
590    return 0;
591  return FastEmitInst_r(X86::SIN_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill);
592}
593
594unsigned FastEmit_ISD_FSIN_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
595  switch (VT.SimpleTy) {
596  case MVT::f32: return FastEmit_ISD_FSIN_MVT_f32_r(RetVT, Op0, Op0IsKill);
597  case MVT::f64: return FastEmit_ISD_FSIN_MVT_f64_r(RetVT, Op0, Op0IsKill);
598  case MVT::f80: return FastEmit_ISD_FSIN_MVT_f80_r(RetVT, Op0, Op0IsKill);
599  default: return 0;
600  }
601}
602
603// FastEmit functions for ISD::FSQRT.
604
605unsigned FastEmit_ISD_FSQRT_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
606  if (RetVT.SimpleTy != MVT::f32)
607    return 0;
608  if ((!Subtarget->hasXMM())) {
609    return FastEmitInst_r(X86::SQRT_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill);
610  }
611  if ((Subtarget->hasSSE1())) {
612    return FastEmitInst_r(X86::SQRTSSr, X86::FR32RegisterClass, Op0, Op0IsKill);
613  }
614  return 0;
615}
616
617unsigned FastEmit_ISD_FSQRT_MVT_f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
618  if (RetVT.SimpleTy != MVT::f64)
619    return 0;
620  if ((!Subtarget->hasXMMInt())) {
621    return FastEmitInst_r(X86::SQRT_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill);
622  }
623  if ((Subtarget->hasSSE2())) {
624    return FastEmitInst_r(X86::SQRTSDr, X86::FR64RegisterClass, Op0, Op0IsKill);
625  }
626  return 0;
627}
628
629unsigned FastEmit_ISD_FSQRT_MVT_f80_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
630  if (RetVT.SimpleTy != MVT::f80)
631    return 0;
632  return FastEmitInst_r(X86::SQRT_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill);
633}
634
635unsigned FastEmit_ISD_FSQRT_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
636  if (RetVT.SimpleTy != MVT::v4f32)
637    return 0;
638  if ((Subtarget->hasAVX())) {
639    return FastEmitInst_r(X86::VSQRTPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
640  }
641  if ((Subtarget->hasSSE1())) {
642    return FastEmitInst_r(X86::SQRTPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
643  }
644  return 0;
645}
646
647unsigned FastEmit_ISD_FSQRT_MVT_v8f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
648  if (RetVT.SimpleTy != MVT::v8f32)
649    return 0;
650  if ((Subtarget->hasAVX())) {
651    return FastEmitInst_r(X86::VSQRTPSYr, X86::VR256RegisterClass, Op0, Op0IsKill);
652  }
653  return 0;
654}
655
656unsigned FastEmit_ISD_FSQRT_MVT_v2f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
657  if (RetVT.SimpleTy != MVT::v2f64)
658    return 0;
659  if ((Subtarget->hasAVX())) {
660    return FastEmitInst_r(X86::VSQRTPDr, X86::VR128RegisterClass, Op0, Op0IsKill);
661  }
662  if ((Subtarget->hasSSE2())) {
663    return FastEmitInst_r(X86::SQRTPDr, X86::VR128RegisterClass, Op0, Op0IsKill);
664  }
665  return 0;
666}
667
668unsigned FastEmit_ISD_FSQRT_MVT_v4f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
669  if (RetVT.SimpleTy != MVT::v4f64)
670    return 0;
671  if ((Subtarget->hasAVX())) {
672    return FastEmitInst_r(X86::VSQRTPDYr, X86::VR256RegisterClass, Op0, Op0IsKill);
673  }
674  return 0;
675}
676
677unsigned FastEmit_ISD_FSQRT_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
678  switch (VT.SimpleTy) {
679  case MVT::f32: return FastEmit_ISD_FSQRT_MVT_f32_r(RetVT, Op0, Op0IsKill);
680  case MVT::f64: return FastEmit_ISD_FSQRT_MVT_f64_r(RetVT, Op0, Op0IsKill);
681  case MVT::f80: return FastEmit_ISD_FSQRT_MVT_f80_r(RetVT, Op0, Op0IsKill);
682  case MVT::v4f32: return FastEmit_ISD_FSQRT_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
683  case MVT::v8f32: return FastEmit_ISD_FSQRT_MVT_v8f32_r(RetVT, Op0, Op0IsKill);
684  case MVT::v2f64: return FastEmit_ISD_FSQRT_MVT_v2f64_r(RetVT, Op0, Op0IsKill);
685  case MVT::v4f64: return FastEmit_ISD_FSQRT_MVT_v4f64_r(RetVT, Op0, Op0IsKill);
686  default: return 0;
687  }
688}
689
690// FastEmit functions for ISD::SCALAR_TO_VECTOR.
691
692unsigned FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_MVT_v4i32_r(unsigned Op0, bool Op0IsKill) {
693  if ((Subtarget->hasAVX())) {
694    return FastEmitInst_r(X86::VMOVDI2PDIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
695  }
696  if ((Subtarget->hasSSE2())) {
697    return FastEmitInst_r(X86::MOVDI2PDIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
698  }
699  return 0;
700}
701
702unsigned FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_MVT_x86mmx_r(unsigned Op0, bool Op0IsKill) {
703  if ((Subtarget->hasMMX())) {
704    return FastEmitInst_r(X86::MMX_MOVD64rr, X86::VR64RegisterClass, Op0, Op0IsKill);
705  }
706  return 0;
707}
708
709unsigned FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
710switch (RetVT.SimpleTy) {
711  case MVT::v4i32: return FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_MVT_v4i32_r(Op0, Op0IsKill);
712  case MVT::x86mmx: return FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_MVT_x86mmx_r(Op0, Op0IsKill);
713  default: return 0;
714}
715}
716
717unsigned FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
718  if (RetVT.SimpleTy != MVT::v2i64)
719    return 0;
720  if ((Subtarget->hasAVX())) {
721    return FastEmitInst_r(X86::VMOV64toPQIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
722  }
723  if ((Subtarget->hasSSE2())) {
724    return FastEmitInst_r(X86::MOV64toPQIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
725  }
726  return 0;
727}
728
729unsigned FastEmit_ISD_SCALAR_TO_VECTOR_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
730  switch (VT.SimpleTy) {
731  case MVT::i32: return FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_r(RetVT, Op0, Op0IsKill);
732  case MVT::i64: return FastEmit_ISD_SCALAR_TO_VECTOR_MVT_i64_r(RetVT, Op0, Op0IsKill);
733  default: return 0;
734  }
735}
736
737// FastEmit functions for ISD::SIGN_EXTEND.
738
739unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
740  return FastEmitInst_r(X86::MOVSX32rr8, X86::GR32RegisterClass, Op0, Op0IsKill);
741}
742
743unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
744  return FastEmitInst_r(X86::MOVSX64rr8, X86::GR64RegisterClass, Op0, Op0IsKill);
745}
746
747unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i8_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
748switch (RetVT.SimpleTy) {
749  case MVT::i32: return FastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i32_r(Op0, Op0IsKill);
750  case MVT::i64: return FastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i64_r(Op0, Op0IsKill);
751  default: return 0;
752}
753}
754
755unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
756  return FastEmitInst_r(X86::MOVSX32rr16, X86::GR32RegisterClass, Op0, Op0IsKill);
757}
758
759unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
760  return FastEmitInst_r(X86::MOVSX64rr16, X86::GR64RegisterClass, Op0, Op0IsKill);
761}
762
763unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
764switch (RetVT.SimpleTy) {
765  case MVT::i32: return FastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i32_r(Op0, Op0IsKill);
766  case MVT::i64: return FastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i64_r(Op0, Op0IsKill);
767  default: return 0;
768}
769}
770
771unsigned FastEmit_ISD_SIGN_EXTEND_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
772  if (RetVT.SimpleTy != MVT::i64)
773    return 0;
774  return FastEmitInst_r(X86::MOVSX64rr32, X86::GR64RegisterClass, Op0, Op0IsKill);
775}
776
777unsigned FastEmit_ISD_SIGN_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
778  switch (VT.SimpleTy) {
779  case MVT::i8: return FastEmit_ISD_SIGN_EXTEND_MVT_i8_r(RetVT, Op0, Op0IsKill);
780  case MVT::i16: return FastEmit_ISD_SIGN_EXTEND_MVT_i16_r(RetVT, Op0, Op0IsKill);
781  case MVT::i32: return FastEmit_ISD_SIGN_EXTEND_MVT_i32_r(RetVT, Op0, Op0IsKill);
782  default: return 0;
783  }
784}
785
786// FastEmit functions for ISD::SINT_TO_FP.
787
788unsigned FastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f32_r(unsigned Op0, bool Op0IsKill) {
789  if ((Subtarget->hasSSE1())) {
790    return FastEmitInst_r(X86::CVTSI2SSrr, X86::FR32RegisterClass, Op0, Op0IsKill);
791  }
792  return 0;
793}
794
795unsigned FastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f64_r(unsigned Op0, bool Op0IsKill) {
796  if ((Subtarget->hasSSE2())) {
797    return FastEmitInst_r(X86::CVTSI2SDrr, X86::FR64RegisterClass, Op0, Op0IsKill);
798  }
799  return 0;
800}
801
802unsigned FastEmit_ISD_SINT_TO_FP_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
803switch (RetVT.SimpleTy) {
804  case MVT::f32: return FastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f32_r(Op0, Op0IsKill);
805  case MVT::f64: return FastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f64_r(Op0, Op0IsKill);
806  default: return 0;
807}
808}
809
810unsigned FastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f32_r(unsigned Op0, bool Op0IsKill) {
811  if ((Subtarget->hasSSE1())) {
812    return FastEmitInst_r(X86::CVTSI2SS64rr, X86::FR32RegisterClass, Op0, Op0IsKill);
813  }
814  return 0;
815}
816
817unsigned FastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f64_r(unsigned Op0, bool Op0IsKill) {
818  if ((Subtarget->hasSSE2())) {
819    return FastEmitInst_r(X86::CVTSI2SD64rr, X86::FR64RegisterClass, Op0, Op0IsKill);
820  }
821  return 0;
822}
823
824unsigned FastEmit_ISD_SINT_TO_FP_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
825switch (RetVT.SimpleTy) {
826  case MVT::f32: return FastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f32_r(Op0, Op0IsKill);
827  case MVT::f64: return FastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f64_r(Op0, Op0IsKill);
828  default: return 0;
829}
830}
831
832unsigned FastEmit_ISD_SINT_TO_FP_MVT_v4i32_MVT_v4f32_r(unsigned Op0, bool Op0IsKill) {
833  if ((Subtarget->hasAVX())) {
834    return FastEmitInst_r(X86::Int_VCVTDQ2PSrr, X86::VR128RegisterClass, Op0, Op0IsKill);
835  }
836  if ((Subtarget->hasSSE2())) {
837    return FastEmitInst_r(X86::Int_CVTDQ2PSrr, X86::VR128RegisterClass, Op0, Op0IsKill);
838  }
839  return 0;
840}
841
842unsigned FastEmit_ISD_SINT_TO_FP_MVT_v4i32_MVT_v4f64_r(unsigned Op0, bool Op0IsKill) {
843  return FastEmitInst_r(X86::VCVTDQ2PDYrr, X86::VR256RegisterClass, Op0, Op0IsKill);
844}
845
846unsigned FastEmit_ISD_SINT_TO_FP_MVT_v4i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
847switch (RetVT.SimpleTy) {
848  case MVT::v4f32: return FastEmit_ISD_SINT_TO_FP_MVT_v4i32_MVT_v4f32_r(Op0, Op0IsKill);
849  case MVT::v4f64: return FastEmit_ISD_SINT_TO_FP_MVT_v4i32_MVT_v4f64_r(Op0, Op0IsKill);
850  default: return 0;
851}
852}
853
854unsigned FastEmit_ISD_SINT_TO_FP_MVT_v8i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
855  if (RetVT.SimpleTy != MVT::v8f32)
856    return 0;
857  if ((Subtarget->hasAVX())) {
858    return FastEmitInst_r(X86::VCVTDQ2PSYrr, X86::VR256RegisterClass, Op0, Op0IsKill);
859  }
860  return 0;
861}
862
863unsigned FastEmit_ISD_SINT_TO_FP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
864  switch (VT.SimpleTy) {
865  case MVT::i32: return FastEmit_ISD_SINT_TO_FP_MVT_i32_r(RetVT, Op0, Op0IsKill);
866  case MVT::i64: return FastEmit_ISD_SINT_TO_FP_MVT_i64_r(RetVT, Op0, Op0IsKill);
867  case MVT::v4i32: return FastEmit_ISD_SINT_TO_FP_MVT_v4i32_r(RetVT, Op0, Op0IsKill);
868  case MVT::v8i32: return FastEmit_ISD_SINT_TO_FP_MVT_v8i32_r(RetVT, Op0, Op0IsKill);
869  default: return 0;
870  }
871}
872
873// FastEmit functions for ISD::TRUNCATE.
874
875unsigned FastEmit_ISD_TRUNCATE_MVT_i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
876  if (RetVT.SimpleTy != MVT::i8)
877    return 0;
878  if ((Subtarget->is64Bit())) {
879    return FastEmitInst_extractsubreg(RetVT, Op0, Op0IsKill, X86::sub_8bit);
880  }
881  return 0;
882}
883
884unsigned FastEmit_ISD_TRUNCATE_MVT_i32_MVT_i8_r(unsigned Op0, bool Op0IsKill) {
885  if ((Subtarget->is64Bit())) {
886    return FastEmitInst_extractsubreg(MVT::i8, Op0, Op0IsKill, X86::sub_8bit);
887  }
888  return 0;
889}
890
891unsigned FastEmit_ISD_TRUNCATE_MVT_i32_MVT_i16_r(unsigned Op0, bool Op0IsKill) {
892  return FastEmitInst_extractsubreg(MVT::i16, Op0, Op0IsKill, X86::sub_16bit);
893}
894
895unsigned FastEmit_ISD_TRUNCATE_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
896switch (RetVT.SimpleTy) {
897  case MVT::i8: return FastEmit_ISD_TRUNCATE_MVT_i32_MVT_i8_r(Op0, Op0IsKill);
898  case MVT::i16: return FastEmit_ISD_TRUNCATE_MVT_i32_MVT_i16_r(Op0, Op0IsKill);
899  default: return 0;
900}
901}
902
903unsigned FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i8_r(unsigned Op0, bool Op0IsKill) {
904  return FastEmitInst_extractsubreg(MVT::i8, Op0, Op0IsKill, X86::sub_8bit);
905}
906
907unsigned FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i16_r(unsigned Op0, bool Op0IsKill) {
908  return FastEmitInst_extractsubreg(MVT::i16, Op0, Op0IsKill, X86::sub_16bit);
909}
910
911unsigned FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
912  return FastEmitInst_extractsubreg(MVT::i32, Op0, Op0IsKill, X86::sub_32bit);
913}
914
915unsigned FastEmit_ISD_TRUNCATE_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
916switch (RetVT.SimpleTy) {
917  case MVT::i8: return FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i8_r(Op0, Op0IsKill);
918  case MVT::i16: return FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i16_r(Op0, Op0IsKill);
919  case MVT::i32: return FastEmit_ISD_TRUNCATE_MVT_i64_MVT_i32_r(Op0, Op0IsKill);
920  default: return 0;
921}
922}
923
924unsigned FastEmit_ISD_TRUNCATE_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
925  switch (VT.SimpleTy) {
926  case MVT::i16: return FastEmit_ISD_TRUNCATE_MVT_i16_r(RetVT, Op0, Op0IsKill);
927  case MVT::i32: return FastEmit_ISD_TRUNCATE_MVT_i32_r(RetVT, Op0, Op0IsKill);
928  case MVT::i64: return FastEmit_ISD_TRUNCATE_MVT_i64_r(RetVT, Op0, Op0IsKill);
929  default: return 0;
930  }
931}
932
933// FastEmit functions for ISD::ZERO_EXTEND.
934
935unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i8_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
936  return FastEmitInst_r(X86::MOVZX32rr8, X86::GR32RegisterClass, Op0, Op0IsKill);
937}
938
939unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i8_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
940  return FastEmitInst_r(X86::MOVZX64rr8, X86::GR64RegisterClass, Op0, Op0IsKill);
941}
942
943unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i8_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
944switch (RetVT.SimpleTy) {
945  case MVT::i32: return FastEmit_ISD_ZERO_EXTEND_MVT_i8_MVT_i32_r(Op0, Op0IsKill);
946  case MVT::i64: return FastEmit_ISD_ZERO_EXTEND_MVT_i8_MVT_i64_r(Op0, Op0IsKill);
947  default: return 0;
948}
949}
950
951unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i16_MVT_i32_r(unsigned Op0, bool Op0IsKill) {
952  return FastEmitInst_r(X86::MOVZX32rr16, X86::GR32RegisterClass, Op0, Op0IsKill);
953}
954
955unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i16_MVT_i64_r(unsigned Op0, bool Op0IsKill) {
956  return FastEmitInst_r(X86::MOVZX64rr16, X86::GR64RegisterClass, Op0, Op0IsKill);
957}
958
959unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i16_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
960switch (RetVT.SimpleTy) {
961  case MVT::i32: return FastEmit_ISD_ZERO_EXTEND_MVT_i16_MVT_i32_r(Op0, Op0IsKill);
962  case MVT::i64: return FastEmit_ISD_ZERO_EXTEND_MVT_i16_MVT_i64_r(Op0, Op0IsKill);
963  default: return 0;
964}
965}
966
967unsigned FastEmit_ISD_ZERO_EXTEND_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
968  if (RetVT.SimpleTy != MVT::i64)
969    return 0;
970  return FastEmitInst_r(X86::MOVZX64rr32, X86::GR64RegisterClass, Op0, Op0IsKill);
971}
972
973unsigned FastEmit_ISD_ZERO_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
974  switch (VT.SimpleTy) {
975  case MVT::i8: return FastEmit_ISD_ZERO_EXTEND_MVT_i8_r(RetVT, Op0, Op0IsKill);
976  case MVT::i16: return FastEmit_ISD_ZERO_EXTEND_MVT_i16_r(RetVT, Op0, Op0IsKill);
977  case MVT::i32: return FastEmit_ISD_ZERO_EXTEND_MVT_i32_r(RetVT, Op0, Op0IsKill);
978  default: return 0;
979  }
980}
981
982// FastEmit functions for X86ISD::CALL.
983
984unsigned FastEmit_X86ISD_CALL_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
985  if (RetVT.SimpleTy != MVT::isVoid)
986    return 0;
987  if ((!Subtarget->is64Bit())) {
988    return FastEmitInst_r(X86::CALL32r, X86::GR32RegisterClass, Op0, Op0IsKill);
989  }
990  return 0;
991}
992
993unsigned FastEmit_X86ISD_CALL_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
994  if (RetVT.SimpleTy != MVT::isVoid)
995    return 0;
996  if ((Subtarget->is64Bit()) && (!Subtarget->isTargetWin64())) {
997    return FastEmitInst_r(X86::CALL64r, X86::GR64RegisterClass, Op0, Op0IsKill);
998  }
999  if ((Subtarget->isTargetWin64())) {
1000    return FastEmitInst_r(X86::WINCALL64r, X86::GR64RegisterClass, Op0, Op0IsKill);
1001  }
1002  return 0;
1003}
1004
1005unsigned FastEmit_X86ISD_CALL_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1006  switch (VT.SimpleTy) {
1007  case MVT::i32: return FastEmit_X86ISD_CALL_MVT_i32_r(RetVT, Op0, Op0IsKill);
1008  case MVT::i64: return FastEmit_X86ISD_CALL_MVT_i64_r(RetVT, Op0, Op0IsKill);
1009  default: return 0;
1010  }
1011}
1012
1013// FastEmit functions for X86ISD::EH_RETURN.
1014
1015unsigned FastEmit_X86ISD_EH_RETURN_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1016  if (RetVT.SimpleTy != MVT::isVoid)
1017    return 0;
1018  return FastEmitInst_r(X86::EH_RETURN, X86::GR32RegisterClass, Op0, Op0IsKill);
1019}
1020
1021unsigned FastEmit_X86ISD_EH_RETURN_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1022  if (RetVT.SimpleTy != MVT::isVoid)
1023    return 0;
1024  return FastEmitInst_r(X86::EH_RETURN64, X86::GR64RegisterClass, Op0, Op0IsKill);
1025}
1026
1027unsigned FastEmit_X86ISD_EH_RETURN_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1028  switch (VT.SimpleTy) {
1029  case MVT::i32: return FastEmit_X86ISD_EH_RETURN_MVT_i32_r(RetVT, Op0, Op0IsKill);
1030  case MVT::i64: return FastEmit_X86ISD_EH_RETURN_MVT_i64_r(RetVT, Op0, Op0IsKill);
1031  default: return 0;
1032  }
1033}
1034
1035// FastEmit functions for X86ISD::FRCP.
1036
1037unsigned FastEmit_X86ISD_FRCP_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1038  if (RetVT.SimpleTy != MVT::f32)
1039    return 0;
1040  if ((Subtarget->hasSSE1())) {
1041    return FastEmitInst_r(X86::RCPSSr, X86::FR32RegisterClass, Op0, Op0IsKill);
1042  }
1043  return 0;
1044}
1045
1046unsigned FastEmit_X86ISD_FRCP_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1047  if (RetVT.SimpleTy != MVT::v4f32)
1048    return 0;
1049  if ((Subtarget->hasAVX())) {
1050    return FastEmitInst_r(X86::VRCPPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
1051  }
1052  if ((Subtarget->hasSSE1())) {
1053    return FastEmitInst_r(X86::RCPPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
1054  }
1055  return 0;
1056}
1057
1058unsigned FastEmit_X86ISD_FRCP_MVT_v8f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1059  if (RetVT.SimpleTy != MVT::v8f32)
1060    return 0;
1061  if ((Subtarget->hasAVX())) {
1062    return FastEmitInst_r(X86::VRCPPSYr, X86::VR256RegisterClass, Op0, Op0IsKill);
1063  }
1064  return 0;
1065}
1066
1067unsigned FastEmit_X86ISD_FRCP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1068  switch (VT.SimpleTy) {
1069  case MVT::f32: return FastEmit_X86ISD_FRCP_MVT_f32_r(RetVT, Op0, Op0IsKill);
1070  case MVT::v4f32: return FastEmit_X86ISD_FRCP_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
1071  case MVT::v8f32: return FastEmit_X86ISD_FRCP_MVT_v8f32_r(RetVT, Op0, Op0IsKill);
1072  default: return 0;
1073  }
1074}
1075
1076// FastEmit functions for X86ISD::FRSQRT.
1077
1078unsigned FastEmit_X86ISD_FRSQRT_MVT_f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1079  if (RetVT.SimpleTy != MVT::f32)
1080    return 0;
1081  if ((Subtarget->hasSSE1())) {
1082    return FastEmitInst_r(X86::RSQRTSSr, X86::FR32RegisterClass, Op0, Op0IsKill);
1083  }
1084  return 0;
1085}
1086
1087unsigned FastEmit_X86ISD_FRSQRT_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1088  if (RetVT.SimpleTy != MVT::v4f32)
1089    return 0;
1090  if ((Subtarget->hasAVX())) {
1091    return FastEmitInst_r(X86::VRSQRTPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
1092  }
1093  if ((Subtarget->hasSSE1())) {
1094    return FastEmitInst_r(X86::RSQRTPSr, X86::VR128RegisterClass, Op0, Op0IsKill);
1095  }
1096  return 0;
1097}
1098
1099unsigned FastEmit_X86ISD_FRSQRT_MVT_v8f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1100  if (RetVT.SimpleTy != MVT::v8f32)
1101    return 0;
1102  if ((Subtarget->hasAVX())) {
1103    return FastEmitInst_r(X86::VRSQRTPSYr, X86::VR256RegisterClass, Op0, Op0IsKill);
1104  }
1105  return 0;
1106}
1107
1108unsigned FastEmit_X86ISD_FRSQRT_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1109  switch (VT.SimpleTy) {
1110  case MVT::f32: return FastEmit_X86ISD_FRSQRT_MVT_f32_r(RetVT, Op0, Op0IsKill);
1111  case MVT::v4f32: return FastEmit_X86ISD_FRSQRT_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
1112  case MVT::v8f32: return FastEmit_X86ISD_FRSQRT_MVT_v8f32_r(RetVT, Op0, Op0IsKill);
1113  default: return 0;
1114  }
1115}
1116
1117// FastEmit functions for X86ISD::MEMBARRIER.
1118
1119unsigned FastEmit_X86ISD_MEMBARRIER_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1120  if (RetVT.SimpleTy != MVT::isVoid)
1121    return 0;
1122  if ((Subtarget->is64Bit())) {
1123    return FastEmitInst_r(X86::Int_MemBarrierNoSSE64, X86::GR64RegisterClass, Op0, Op0IsKill);
1124  }
1125  return 0;
1126}
1127
1128unsigned FastEmit_X86ISD_MEMBARRIER_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1129  switch (VT.SimpleTy) {
1130  case MVT::i64: return FastEmit_X86ISD_MEMBARRIER_MVT_i64_r(RetVT, Op0, Op0IsKill);
1131  default: return 0;
1132  }
1133}
1134
1135// FastEmit functions for X86ISD::MMX_MOVD2W.
1136
1137unsigned FastEmit_X86ISD_MMX_MOVD2W_MVT_x86mmx_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1138  if (RetVT.SimpleTy != MVT::i32)
1139    return 0;
1140  if ((Subtarget->hasMMX())) {
1141    return FastEmitInst_r(X86::MMX_MOVD64grr, X86::GR32RegisterClass, Op0, Op0IsKill);
1142  }
1143  return 0;
1144}
1145
1146unsigned FastEmit_X86ISD_MMX_MOVD2W_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1147  switch (VT.SimpleTy) {
1148  case MVT::x86mmx: return FastEmit_X86ISD_MMX_MOVD2W_MVT_x86mmx_r(RetVT, Op0, Op0IsKill);
1149  default: return 0;
1150  }
1151}
1152
1153// FastEmit functions for X86ISD::MMX_MOVW2D.
1154
1155unsigned FastEmit_X86ISD_MMX_MOVW2D_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1156  if (RetVT.SimpleTy != MVT::x86mmx)
1157    return 0;
1158  if ((Subtarget->hasMMX())) {
1159    return FastEmitInst_r(X86::MMX_MOVD64rr, X86::VR64RegisterClass, Op0, Op0IsKill);
1160  }
1161  return 0;
1162}
1163
1164unsigned FastEmit_X86ISD_MMX_MOVW2D_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1165  switch (VT.SimpleTy) {
1166  case MVT::i32: return FastEmit_X86ISD_MMX_MOVW2D_MVT_i32_r(RetVT, Op0, Op0IsKill);
1167  default: return 0;
1168  }
1169}
1170
1171// FastEmit functions for X86ISD::MOVDDUP.
1172
1173unsigned FastEmit_X86ISD_MOVDDUP_MVT_v4i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1174  if (RetVT.SimpleTy != MVT::v4i64)
1175    return 0;
1176  if ((Subtarget->hasAVX())) {
1177    return FastEmitInst_r(X86::VMOVDDUPYrr, X86::VR256RegisterClass, Op0, Op0IsKill);
1178  }
1179  return 0;
1180}
1181
1182unsigned FastEmit_X86ISD_MOVDDUP_MVT_v4f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1183  if (RetVT.SimpleTy != MVT::v4f64)
1184    return 0;
1185  if ((Subtarget->hasAVX())) {
1186    return FastEmitInst_r(X86::VMOVDDUPYrr, X86::VR256RegisterClass, Op0, Op0IsKill);
1187  }
1188  return 0;
1189}
1190
1191unsigned FastEmit_X86ISD_MOVDDUP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1192  switch (VT.SimpleTy) {
1193  case MVT::v4i64: return FastEmit_X86ISD_MOVDDUP_MVT_v4i64_r(RetVT, Op0, Op0IsKill);
1194  case MVT::v4f64: return FastEmit_X86ISD_MOVDDUP_MVT_v4f64_r(RetVT, Op0, Op0IsKill);
1195  default: return 0;
1196  }
1197}
1198
1199// FastEmit functions for X86ISD::MOVDQ2Q.
1200
1201unsigned FastEmit_X86ISD_MOVDQ2Q_MVT_v2i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1202  if (RetVT.SimpleTy != MVT::x86mmx)
1203    return 0;
1204  return FastEmitInst_r(X86::MMX_MOVDQ2Qrr, X86::VR64RegisterClass, Op0, Op0IsKill);
1205}
1206
1207unsigned FastEmit_X86ISD_MOVDQ2Q_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1208  switch (VT.SimpleTy) {
1209  case MVT::v2i64: return FastEmit_X86ISD_MOVDQ2Q_MVT_v2i64_r(RetVT, Op0, Op0IsKill);
1210  default: return 0;
1211  }
1212}
1213
1214// FastEmit functions for X86ISD::MOVQ2DQ.
1215
1216unsigned FastEmit_X86ISD_MOVQ2DQ_MVT_x86mmx_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1217  if (RetVT.SimpleTy != MVT::v2i64)
1218    return 0;
1219  return FastEmitInst_r(X86::MMX_MOVQ2DQrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1220}
1221
1222unsigned FastEmit_X86ISD_MOVQ2DQ_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1223  switch (VT.SimpleTy) {
1224  case MVT::x86mmx: return FastEmit_X86ISD_MOVQ2DQ_MVT_x86mmx_r(RetVT, Op0, Op0IsKill);
1225  default: return 0;
1226  }
1227}
1228
1229// FastEmit functions for X86ISD::MOVSHDUP.
1230
1231unsigned FastEmit_X86ISD_MOVSHDUP_MVT_v4i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1232  if (RetVT.SimpleTy != MVT::v4i32)
1233    return 0;
1234  if ((Subtarget->hasAVX())) {
1235    return FastEmitInst_r(X86::VMOVSHDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1236  }
1237  if ((Subtarget->hasSSE3())) {
1238    return FastEmitInst_r(X86::MOVSHDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1239  }
1240  return 0;
1241}
1242
1243unsigned FastEmit_X86ISD_MOVSHDUP_MVT_v8i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1244  if (RetVT.SimpleTy != MVT::v8i32)
1245    return 0;
1246  if ((Subtarget->hasAVX())) {
1247    return FastEmitInst_r(X86::VMOVSHDUPYrr, X86::VR256RegisterClass, Op0, Op0IsKill);
1248  }
1249  return 0;
1250}
1251
1252unsigned FastEmit_X86ISD_MOVSHDUP_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1253  if (RetVT.SimpleTy != MVT::v4f32)
1254    return 0;
1255  if ((Subtarget->hasAVX())) {
1256    return FastEmitInst_r(X86::VMOVSHDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1257  }
1258  if ((Subtarget->hasSSE3())) {
1259    return FastEmitInst_r(X86::MOVSHDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1260  }
1261  return 0;
1262}
1263
1264unsigned FastEmit_X86ISD_MOVSHDUP_MVT_v8f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1265  if (RetVT.SimpleTy != MVT::v8f32)
1266    return 0;
1267  if ((Subtarget->hasAVX())) {
1268    return FastEmitInst_r(X86::VMOVSHDUPYrr, X86::VR256RegisterClass, Op0, Op0IsKill);
1269  }
1270  return 0;
1271}
1272
1273unsigned FastEmit_X86ISD_MOVSHDUP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1274  switch (VT.SimpleTy) {
1275  case MVT::v4i32: return FastEmit_X86ISD_MOVSHDUP_MVT_v4i32_r(RetVT, Op0, Op0IsKill);
1276  case MVT::v8i32: return FastEmit_X86ISD_MOVSHDUP_MVT_v8i32_r(RetVT, Op0, Op0IsKill);
1277  case MVT::v4f32: return FastEmit_X86ISD_MOVSHDUP_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
1278  case MVT::v8f32: return FastEmit_X86ISD_MOVSHDUP_MVT_v8f32_r(RetVT, Op0, Op0IsKill);
1279  default: return 0;
1280  }
1281}
1282
1283// FastEmit functions for X86ISD::MOVSLDUP.
1284
1285unsigned FastEmit_X86ISD_MOVSLDUP_MVT_v4i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1286  if (RetVT.SimpleTy != MVT::v4i32)
1287    return 0;
1288  if ((Subtarget->hasAVX())) {
1289    return FastEmitInst_r(X86::VMOVSLDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1290  }
1291  if ((Subtarget->hasSSE3())) {
1292    return FastEmitInst_r(X86::MOVSLDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1293  }
1294  return 0;
1295}
1296
1297unsigned FastEmit_X86ISD_MOVSLDUP_MVT_v8i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1298  if (RetVT.SimpleTy != MVT::v8i32)
1299    return 0;
1300  if ((Subtarget->hasAVX())) {
1301    return FastEmitInst_r(X86::VMOVSLDUPYrr, X86::VR256RegisterClass, Op0, Op0IsKill);
1302  }
1303  return 0;
1304}
1305
1306unsigned FastEmit_X86ISD_MOVSLDUP_MVT_v4f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1307  if (RetVT.SimpleTy != MVT::v4f32)
1308    return 0;
1309  if ((Subtarget->hasAVX())) {
1310    return FastEmitInst_r(X86::VMOVSLDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1311  }
1312  if ((Subtarget->hasSSE3())) {
1313    return FastEmitInst_r(X86::MOVSLDUPrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1314  }
1315  return 0;
1316}
1317
1318unsigned FastEmit_X86ISD_MOVSLDUP_MVT_v8f32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1319  if (RetVT.SimpleTy != MVT::v8f32)
1320    return 0;
1321  if ((Subtarget->hasAVX())) {
1322    return FastEmitInst_r(X86::VMOVSLDUPYrr, X86::VR256RegisterClass, Op0, Op0IsKill);
1323  }
1324  return 0;
1325}
1326
1327unsigned FastEmit_X86ISD_MOVSLDUP_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1328  switch (VT.SimpleTy) {
1329  case MVT::v4i32: return FastEmit_X86ISD_MOVSLDUP_MVT_v4i32_r(RetVT, Op0, Op0IsKill);
1330  case MVT::v8i32: return FastEmit_X86ISD_MOVSLDUP_MVT_v8i32_r(RetVT, Op0, Op0IsKill);
1331  case MVT::v4f32: return FastEmit_X86ISD_MOVSLDUP_MVT_v4f32_r(RetVT, Op0, Op0IsKill);
1332  case MVT::v8f32: return FastEmit_X86ISD_MOVSLDUP_MVT_v8f32_r(RetVT, Op0, Op0IsKill);
1333  default: return 0;
1334  }
1335}
1336
1337// FastEmit functions for X86ISD::SEG_ALLOCA.
1338
1339unsigned FastEmit_X86ISD_SEG_ALLOCA_MVT_i32_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1340  if (RetVT.SimpleTy != MVT::i32)
1341    return 0;
1342  if ((!Subtarget->is64Bit())) {
1343    return FastEmitInst_r(X86::SEG_ALLOCA_32, X86::GR32RegisterClass, Op0, Op0IsKill);
1344  }
1345  return 0;
1346}
1347
1348unsigned FastEmit_X86ISD_SEG_ALLOCA_MVT_i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1349  if (RetVT.SimpleTy != MVT::i64)
1350    return 0;
1351  if ((Subtarget->is64Bit())) {
1352    return FastEmitInst_r(X86::SEG_ALLOCA_64, X86::GR64RegisterClass, Op0, Op0IsKill);
1353  }
1354  return 0;
1355}
1356
1357unsigned FastEmit_X86ISD_SEG_ALLOCA_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1358  switch (VT.SimpleTy) {
1359  case MVT::i32: return FastEmit_X86ISD_SEG_ALLOCA_MVT_i32_r(RetVT, Op0, Op0IsKill);
1360  case MVT::i64: return FastEmit_X86ISD_SEG_ALLOCA_MVT_i64_r(RetVT, Op0, Op0IsKill);
1361  default: return 0;
1362  }
1363}
1364
1365// FastEmit functions for X86ISD::VZEXT_MOVL.
1366
1367unsigned FastEmit_X86ISD_VZEXT_MOVL_MVT_v2i64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1368  if (RetVT.SimpleTy != MVT::v2i64)
1369    return 0;
1370  if ((Subtarget->hasAVX())) {
1371    return FastEmitInst_r(X86::VMOVZPQILo2PQIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1372  }
1373  if ((Subtarget->hasSSE2())) {
1374    return FastEmitInst_r(X86::MOVZPQILo2PQIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1375  }
1376  return 0;
1377}
1378
1379unsigned FastEmit_X86ISD_VZEXT_MOVL_MVT_v2f64_r(MVT RetVT, unsigned Op0, bool Op0IsKill) {
1380  if (RetVT.SimpleTy != MVT::v2f64)
1381    return 0;
1382  if ((Subtarget->hasAVX())) {
1383    return FastEmitInst_r(X86::VMOVZPQILo2PQIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1384  }
1385  if ((Subtarget->hasSSE2())) {
1386    return FastEmitInst_r(X86::MOVZPQILo2PQIrr, X86::VR128RegisterClass, Op0, Op0IsKill);
1387  }
1388  return 0;
1389}
1390
1391unsigned FastEmit_X86ISD_VZEXT_MOVL_r(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill) {
1392  switch (VT.SimpleTy) {
1393  case MVT::v2i64: return FastEmit_X86ISD_VZEXT_MOVL_MVT_v2i64_r(RetVT, Op0, Op0IsKill);
1394  case MVT::v2f64: return FastEmit_X86ISD_VZEXT_MOVL_MVT_v2f64_r(RetVT, Op0, Op0IsKill);
1395  default: return 0;
1396  }
1397}
1398
1399// Top-level FastEmit function.
1400
1401unsigned FastEmit_r(MVT VT, MVT RetVT, unsigned Opcode, unsigned Op0, bool Op0IsKill) {
1402  switch (Opcode) {
1403  case ISD::ANY_EXTEND: return FastEmit_ISD_ANY_EXTEND_r(VT, RetVT, Op0, Op0IsKill);
1404  case ISD::BITCAST: return FastEmit_ISD_BITCAST_r(VT, RetVT, Op0, Op0IsKill);
1405  case ISD::BRIND: return FastEmit_ISD_BRIND_r(VT, RetVT, Op0, Op0IsKill);
1406  case ISD::BSWAP: return FastEmit_ISD_BSWAP_r(VT, RetVT, Op0, Op0IsKill);
1407  case ISD::CTLZ: return FastEmit_ISD_CTLZ_r(VT, RetVT, Op0, Op0IsKill);
1408  case ISD::CTPOP: return FastEmit_ISD_CTPOP_r(VT, RetVT, Op0, Op0IsKill);
1409  case ISD::CTTZ: return FastEmit_ISD_CTTZ_r(VT, RetVT, Op0, Op0IsKill);
1410  case ISD::FABS: return FastEmit_ISD_FABS_r(VT, RetVT, Op0, Op0IsKill);
1411  case ISD::FCOS: return FastEmit_ISD_FCOS_r(VT, RetVT, Op0, Op0IsKill);
1412  case ISD::FNEG: return FastEmit_ISD_FNEG_r(VT, RetVT, Op0, Op0IsKill);
1413  case ISD::FP_EXTEND: return FastEmit_ISD_FP_EXTEND_r(VT, RetVT, Op0, Op0IsKill);
1414  case ISD::FP_ROUND: return FastEmit_ISD_FP_ROUND_r(VT, RetVT, Op0, Op0IsKill);
1415  case ISD::FP_TO_SINT: return FastEmit_ISD_FP_TO_SINT_r(VT, RetVT, Op0, Op0IsKill);
1416  case ISD::FSIN: return FastEmit_ISD_FSIN_r(VT, RetVT, Op0, Op0IsKill);
1417  case ISD::FSQRT: return FastEmit_ISD_FSQRT_r(VT, RetVT, Op0, Op0IsKill);
1418  case ISD::SCALAR_TO_VECTOR: return FastEmit_ISD_SCALAR_TO_VECTOR_r(VT, RetVT, Op0, Op0IsKill);
1419  case ISD::SIGN_EXTEND: return FastEmit_ISD_SIGN_EXTEND_r(VT, RetVT, Op0, Op0IsKill);
1420  case ISD::SINT_TO_FP: return FastEmit_ISD_SINT_TO_FP_r(VT, RetVT, Op0, Op0IsKill);
1421  case ISD::TRUNCATE: return FastEmit_ISD_TRUNCATE_r(VT, RetVT, Op0, Op0IsKill);
1422  case ISD::ZERO_EXTEND: return FastEmit_ISD_ZERO_EXTEND_r(VT, RetVT, Op0, Op0IsKill);
1423  case X86ISD::CALL: return FastEmit_X86ISD_CALL_r(VT, RetVT, Op0, Op0IsKill);
1424  case X86ISD::EH_RETURN: return FastEmit_X86ISD_EH_RETURN_r(VT, RetVT, Op0, Op0IsKill);
1425  case X86ISD::FRCP: return FastEmit_X86ISD_FRCP_r(VT, RetVT, Op0, Op0IsKill);
1426  case X86ISD::FRSQRT: return FastEmit_X86ISD_FRSQRT_r(VT, RetVT, Op0, Op0IsKill);
1427  case X86ISD::MEMBARRIER: return FastEmit_X86ISD_MEMBARRIER_r(VT, RetVT, Op0, Op0IsKill);
1428  case X86ISD::MMX_MOVD2W: return FastEmit_X86ISD_MMX_MOVD2W_r(VT, RetVT, Op0, Op0IsKill);
1429  case X86ISD::MMX_MOVW2D: return FastEmit_X86ISD_MMX_MOVW2D_r(VT, RetVT, Op0, Op0IsKill);
1430  case X86ISD::MOVDDUP: return FastEmit_X86ISD_MOVDDUP_r(VT, RetVT, Op0, Op0IsKill);
1431  case X86ISD::MOVDQ2Q: return FastEmit_X86ISD_MOVDQ2Q_r(VT, RetVT, Op0, Op0IsKill);
1432  case X86ISD::MOVQ2DQ: return FastEmit_X86ISD_MOVQ2DQ_r(VT, RetVT, Op0, Op0IsKill);
1433  case X86ISD::MOVSHDUP: return FastEmit_X86ISD_MOVSHDUP_r(VT, RetVT, Op0, Op0IsKill);
1434  case X86ISD::MOVSLDUP: return FastEmit_X86ISD_MOVSLDUP_r(VT, RetVT, Op0, Op0IsKill);
1435  case X86ISD::SEG_ALLOCA: return FastEmit_X86ISD_SEG_ALLOCA_r(VT, RetVT, Op0, Op0IsKill);
1436  case X86ISD::VZEXT_MOVL: return FastEmit_X86ISD_VZEXT_MOVL_r(VT, RetVT, Op0, Op0IsKill);
1437  default: return 0;
1438  }
1439}
1440
1441// FastEmit functions for ISD::ADD.
1442
1443unsigned FastEmit_ISD_ADD_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1444  if (RetVT.SimpleTy != MVT::i8)
1445    return 0;
1446  return FastEmitInst_rr(X86::ADD8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1447}
1448
1449unsigned FastEmit_ISD_ADD_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1450  if (RetVT.SimpleTy != MVT::i16)
1451    return 0;
1452  return FastEmitInst_rr(X86::ADD16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1453}
1454
1455unsigned FastEmit_ISD_ADD_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1456  if (RetVT.SimpleTy != MVT::i32)
1457    return 0;
1458  return FastEmitInst_rr(X86::ADD32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1459}
1460
1461unsigned FastEmit_ISD_ADD_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1462  if (RetVT.SimpleTy != MVT::i64)
1463    return 0;
1464  return FastEmitInst_rr(X86::ADD64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1465}
1466
1467unsigned FastEmit_ISD_ADD_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1468  if (RetVT.SimpleTy != MVT::v16i8)
1469    return 0;
1470  if ((Subtarget->hasAVX())) {
1471    return FastEmitInst_rr(X86::VPADDBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1472  }
1473  if ((Subtarget->hasSSE2())) {
1474    return FastEmitInst_rr(X86::PADDBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1475  }
1476  return 0;
1477}
1478
1479unsigned FastEmit_ISD_ADD_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1480  if (RetVT.SimpleTy != MVT::v8i16)
1481    return 0;
1482  if ((Subtarget->hasAVX())) {
1483    return FastEmitInst_rr(X86::VPADDWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1484  }
1485  if ((Subtarget->hasSSE2())) {
1486    return FastEmitInst_rr(X86::PADDWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1487  }
1488  return 0;
1489}
1490
1491unsigned FastEmit_ISD_ADD_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1492  if (RetVT.SimpleTy != MVT::v4i32)
1493    return 0;
1494  if ((Subtarget->hasAVX())) {
1495    return FastEmitInst_rr(X86::VPADDDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1496  }
1497  if ((Subtarget->hasSSE2())) {
1498    return FastEmitInst_rr(X86::PADDDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1499  }
1500  return 0;
1501}
1502
1503unsigned FastEmit_ISD_ADD_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1504  if (RetVT.SimpleTy != MVT::v2i64)
1505    return 0;
1506  if ((Subtarget->hasAVX())) {
1507    return FastEmitInst_rr(X86::VPADDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1508  }
1509  if ((Subtarget->hasSSE2())) {
1510    return FastEmitInst_rr(X86::PADDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1511  }
1512  return 0;
1513}
1514
1515unsigned FastEmit_ISD_ADD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1516  switch (VT.SimpleTy) {
1517  case MVT::i8: return FastEmit_ISD_ADD_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1518  case MVT::i16: return FastEmit_ISD_ADD_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1519  case MVT::i32: return FastEmit_ISD_ADD_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1520  case MVT::i64: return FastEmit_ISD_ADD_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1521  case MVT::v16i8: return FastEmit_ISD_ADD_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1522  case MVT::v8i16: return FastEmit_ISD_ADD_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1523  case MVT::v4i32: return FastEmit_ISD_ADD_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1524  case MVT::v2i64: return FastEmit_ISD_ADD_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1525  default: return 0;
1526  }
1527}
1528
1529// FastEmit functions for ISD::AND.
1530
1531unsigned FastEmit_ISD_AND_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1532  if (RetVT.SimpleTy != MVT::i8)
1533    return 0;
1534  return FastEmitInst_rr(X86::AND8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1535}
1536
1537unsigned FastEmit_ISD_AND_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1538  if (RetVT.SimpleTy != MVT::i16)
1539    return 0;
1540  return FastEmitInst_rr(X86::AND16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1541}
1542
1543unsigned FastEmit_ISD_AND_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1544  if (RetVT.SimpleTy != MVT::i32)
1545    return 0;
1546  return FastEmitInst_rr(X86::AND32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1547}
1548
1549unsigned FastEmit_ISD_AND_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1550  if (RetVT.SimpleTy != MVT::i64)
1551    return 0;
1552  return FastEmitInst_rr(X86::AND64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1553}
1554
1555unsigned FastEmit_ISD_AND_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1556  if (RetVT.SimpleTy != MVT::v2i64)
1557    return 0;
1558  if ((Subtarget->hasAVX())) {
1559    return FastEmitInst_rr(X86::VPANDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1560  }
1561  if ((Subtarget->hasSSE1())) {
1562    return FastEmitInst_rr(X86::ANDPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1563  }
1564  if ((Subtarget->hasSSE2())) {
1565    return FastEmitInst_rr(X86::PANDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1566  }
1567  return 0;
1568}
1569
1570unsigned FastEmit_ISD_AND_MVT_v4i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1571  if (RetVT.SimpleTy != MVT::v4i64)
1572    return 0;
1573  if ((Subtarget->hasAVX())) {
1574    return FastEmitInst_rr(X86::VANDPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1575  }
1576  return 0;
1577}
1578
1579unsigned FastEmit_ISD_AND_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1580  switch (VT.SimpleTy) {
1581  case MVT::i8: return FastEmit_ISD_AND_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1582  case MVT::i16: return FastEmit_ISD_AND_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1583  case MVT::i32: return FastEmit_ISD_AND_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1584  case MVT::i64: return FastEmit_ISD_AND_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1585  case MVT::v2i64: return FastEmit_ISD_AND_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1586  case MVT::v4i64: return FastEmit_ISD_AND_MVT_v4i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1587  default: return 0;
1588  }
1589}
1590
1591// FastEmit functions for ISD::FADD.
1592
1593unsigned FastEmit_ISD_FADD_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1594  if (RetVT.SimpleTy != MVT::f32)
1595    return 0;
1596  if ((!Subtarget->hasXMM())) {
1597    return FastEmitInst_rr(X86::ADD_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1598  }
1599  if ((Subtarget->hasAVX())) {
1600    return FastEmitInst_rr(X86::VADDSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1601  }
1602  if ((Subtarget->hasSSE1())) {
1603    return FastEmitInst_rr(X86::ADDSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1604  }
1605  return 0;
1606}
1607
1608unsigned FastEmit_ISD_FADD_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1609  if (RetVT.SimpleTy != MVT::f64)
1610    return 0;
1611  if ((!Subtarget->hasXMMInt())) {
1612    return FastEmitInst_rr(X86::ADD_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1613  }
1614  if ((Subtarget->hasAVX())) {
1615    return FastEmitInst_rr(X86::VADDSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1616  }
1617  if ((Subtarget->hasSSE2())) {
1618    return FastEmitInst_rr(X86::ADDSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1619  }
1620  return 0;
1621}
1622
1623unsigned FastEmit_ISD_FADD_MVT_f80_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1624  if (RetVT.SimpleTy != MVT::f80)
1625    return 0;
1626  return FastEmitInst_rr(X86::ADD_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1627}
1628
1629unsigned FastEmit_ISD_FADD_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1630  if (RetVT.SimpleTy != MVT::v4f32)
1631    return 0;
1632  if ((Subtarget->hasAVX())) {
1633    return FastEmitInst_rr(X86::VADDPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1634  }
1635  if ((Subtarget->hasSSE1())) {
1636    return FastEmitInst_rr(X86::ADDPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1637  }
1638  return 0;
1639}
1640
1641unsigned FastEmit_ISD_FADD_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1642  if (RetVT.SimpleTy != MVT::v8f32)
1643    return 0;
1644  if ((Subtarget->hasAVX())) {
1645    return FastEmitInst_rr(X86::VADDPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1646  }
1647  return 0;
1648}
1649
1650unsigned FastEmit_ISD_FADD_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1651  if (RetVT.SimpleTy != MVT::v2f64)
1652    return 0;
1653  if ((Subtarget->hasAVX())) {
1654    return FastEmitInst_rr(X86::VADDPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1655  }
1656  if ((Subtarget->hasSSE2())) {
1657    return FastEmitInst_rr(X86::ADDPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1658  }
1659  return 0;
1660}
1661
1662unsigned FastEmit_ISD_FADD_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1663  if (RetVT.SimpleTy != MVT::v4f64)
1664    return 0;
1665  if ((Subtarget->hasAVX())) {
1666    return FastEmitInst_rr(X86::VADDPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1667  }
1668  return 0;
1669}
1670
1671unsigned FastEmit_ISD_FADD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1672  switch (VT.SimpleTy) {
1673  case MVT::f32: return FastEmit_ISD_FADD_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1674  case MVT::f64: return FastEmit_ISD_FADD_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1675  case MVT::f80: return FastEmit_ISD_FADD_MVT_f80_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1676  case MVT::v4f32: return FastEmit_ISD_FADD_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1677  case MVT::v8f32: return FastEmit_ISD_FADD_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1678  case MVT::v2f64: return FastEmit_ISD_FADD_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1679  case MVT::v4f64: return FastEmit_ISD_FADD_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1680  default: return 0;
1681  }
1682}
1683
1684// FastEmit functions for ISD::FDIV.
1685
1686unsigned FastEmit_ISD_FDIV_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1687  if (RetVT.SimpleTy != MVT::f32)
1688    return 0;
1689  if ((!Subtarget->hasXMM())) {
1690    return FastEmitInst_rr(X86::DIV_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1691  }
1692  if ((Subtarget->hasAVX())) {
1693    return FastEmitInst_rr(X86::VDIVSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1694  }
1695  if ((Subtarget->hasSSE1())) {
1696    return FastEmitInst_rr(X86::DIVSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1697  }
1698  return 0;
1699}
1700
1701unsigned FastEmit_ISD_FDIV_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1702  if (RetVT.SimpleTy != MVT::f64)
1703    return 0;
1704  if ((!Subtarget->hasXMMInt())) {
1705    return FastEmitInst_rr(X86::DIV_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1706  }
1707  if ((Subtarget->hasAVX())) {
1708    return FastEmitInst_rr(X86::VDIVSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1709  }
1710  if ((Subtarget->hasSSE2())) {
1711    return FastEmitInst_rr(X86::DIVSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1712  }
1713  return 0;
1714}
1715
1716unsigned FastEmit_ISD_FDIV_MVT_f80_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1717  if (RetVT.SimpleTy != MVT::f80)
1718    return 0;
1719  return FastEmitInst_rr(X86::DIV_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1720}
1721
1722unsigned FastEmit_ISD_FDIV_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1723  if (RetVT.SimpleTy != MVT::v4f32)
1724    return 0;
1725  if ((Subtarget->hasAVX())) {
1726    return FastEmitInst_rr(X86::VDIVPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1727  }
1728  if ((Subtarget->hasSSE1())) {
1729    return FastEmitInst_rr(X86::DIVPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1730  }
1731  return 0;
1732}
1733
1734unsigned FastEmit_ISD_FDIV_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1735  if (RetVT.SimpleTy != MVT::v8f32)
1736    return 0;
1737  if ((Subtarget->hasAVX())) {
1738    return FastEmitInst_rr(X86::VDIVPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1739  }
1740  return 0;
1741}
1742
1743unsigned FastEmit_ISD_FDIV_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1744  if (RetVT.SimpleTy != MVT::v2f64)
1745    return 0;
1746  if ((Subtarget->hasAVX())) {
1747    return FastEmitInst_rr(X86::VDIVPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1748  }
1749  if ((Subtarget->hasSSE2())) {
1750    return FastEmitInst_rr(X86::DIVPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1751  }
1752  return 0;
1753}
1754
1755unsigned FastEmit_ISD_FDIV_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1756  if (RetVT.SimpleTy != MVT::v4f64)
1757    return 0;
1758  if ((Subtarget->hasAVX())) {
1759    return FastEmitInst_rr(X86::VDIVPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1760  }
1761  return 0;
1762}
1763
1764unsigned FastEmit_ISD_FDIV_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1765  switch (VT.SimpleTy) {
1766  case MVT::f32: return FastEmit_ISD_FDIV_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1767  case MVT::f64: return FastEmit_ISD_FDIV_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1768  case MVT::f80: return FastEmit_ISD_FDIV_MVT_f80_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1769  case MVT::v4f32: return FastEmit_ISD_FDIV_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1770  case MVT::v8f32: return FastEmit_ISD_FDIV_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1771  case MVT::v2f64: return FastEmit_ISD_FDIV_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1772  case MVT::v4f64: return FastEmit_ISD_FDIV_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1773  default: return 0;
1774  }
1775}
1776
1777// FastEmit functions for ISD::FMUL.
1778
1779unsigned FastEmit_ISD_FMUL_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1780  if (RetVT.SimpleTy != MVT::f32)
1781    return 0;
1782  if ((!Subtarget->hasXMM())) {
1783    return FastEmitInst_rr(X86::MUL_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1784  }
1785  if ((Subtarget->hasAVX())) {
1786    return FastEmitInst_rr(X86::VMULSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1787  }
1788  if ((Subtarget->hasSSE1())) {
1789    return FastEmitInst_rr(X86::MULSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1790  }
1791  return 0;
1792}
1793
1794unsigned FastEmit_ISD_FMUL_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1795  if (RetVT.SimpleTy != MVT::f64)
1796    return 0;
1797  if ((!Subtarget->hasXMMInt())) {
1798    return FastEmitInst_rr(X86::MUL_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1799  }
1800  if ((Subtarget->hasAVX())) {
1801    return FastEmitInst_rr(X86::VMULSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1802  }
1803  if ((Subtarget->hasSSE2())) {
1804    return FastEmitInst_rr(X86::MULSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1805  }
1806  return 0;
1807}
1808
1809unsigned FastEmit_ISD_FMUL_MVT_f80_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1810  if (RetVT.SimpleTy != MVT::f80)
1811    return 0;
1812  return FastEmitInst_rr(X86::MUL_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1813}
1814
1815unsigned FastEmit_ISD_FMUL_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1816  if (RetVT.SimpleTy != MVT::v4f32)
1817    return 0;
1818  if ((Subtarget->hasAVX())) {
1819    return FastEmitInst_rr(X86::VMULPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1820  }
1821  if ((Subtarget->hasSSE1())) {
1822    return FastEmitInst_rr(X86::MULPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1823  }
1824  return 0;
1825}
1826
1827unsigned FastEmit_ISD_FMUL_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1828  if (RetVT.SimpleTy != MVT::v8f32)
1829    return 0;
1830  if ((Subtarget->hasAVX())) {
1831    return FastEmitInst_rr(X86::VMULPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1832  }
1833  return 0;
1834}
1835
1836unsigned FastEmit_ISD_FMUL_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1837  if (RetVT.SimpleTy != MVT::v2f64)
1838    return 0;
1839  if ((Subtarget->hasAVX())) {
1840    return FastEmitInst_rr(X86::VMULPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1841  }
1842  if ((Subtarget->hasSSE2())) {
1843    return FastEmitInst_rr(X86::MULPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1844  }
1845  return 0;
1846}
1847
1848unsigned FastEmit_ISD_FMUL_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1849  if (RetVT.SimpleTy != MVT::v4f64)
1850    return 0;
1851  if ((Subtarget->hasAVX())) {
1852    return FastEmitInst_rr(X86::VMULPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1853  }
1854  return 0;
1855}
1856
1857unsigned FastEmit_ISD_FMUL_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1858  switch (VT.SimpleTy) {
1859  case MVT::f32: return FastEmit_ISD_FMUL_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1860  case MVT::f64: return FastEmit_ISD_FMUL_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1861  case MVT::f80: return FastEmit_ISD_FMUL_MVT_f80_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1862  case MVT::v4f32: return FastEmit_ISD_FMUL_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1863  case MVT::v8f32: return FastEmit_ISD_FMUL_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1864  case MVT::v2f64: return FastEmit_ISD_FMUL_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1865  case MVT::v4f64: return FastEmit_ISD_FMUL_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1866  default: return 0;
1867  }
1868}
1869
1870// FastEmit functions for ISD::FSUB.
1871
1872unsigned FastEmit_ISD_FSUB_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1873  if (RetVT.SimpleTy != MVT::f32)
1874    return 0;
1875  if ((!Subtarget->hasXMM())) {
1876    return FastEmitInst_rr(X86::SUB_Fp32, X86::RFP32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1877  }
1878  if ((Subtarget->hasAVX())) {
1879    return FastEmitInst_rr(X86::VSUBSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1880  }
1881  if ((Subtarget->hasSSE1())) {
1882    return FastEmitInst_rr(X86::SUBSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1883  }
1884  return 0;
1885}
1886
1887unsigned FastEmit_ISD_FSUB_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1888  if (RetVT.SimpleTy != MVT::f64)
1889    return 0;
1890  if ((!Subtarget->hasXMMInt())) {
1891    return FastEmitInst_rr(X86::SUB_Fp64, X86::RFP64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1892  }
1893  if ((Subtarget->hasAVX())) {
1894    return FastEmitInst_rr(X86::VSUBSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1895  }
1896  if ((Subtarget->hasSSE2())) {
1897    return FastEmitInst_rr(X86::SUBSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1898  }
1899  return 0;
1900}
1901
1902unsigned FastEmit_ISD_FSUB_MVT_f80_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1903  if (RetVT.SimpleTy != MVT::f80)
1904    return 0;
1905  return FastEmitInst_rr(X86::SUB_Fp80, X86::RFP80RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1906}
1907
1908unsigned FastEmit_ISD_FSUB_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1909  if (RetVT.SimpleTy != MVT::v4f32)
1910    return 0;
1911  if ((Subtarget->hasAVX())) {
1912    return FastEmitInst_rr(X86::VSUBPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1913  }
1914  if ((Subtarget->hasSSE1())) {
1915    return FastEmitInst_rr(X86::SUBPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1916  }
1917  return 0;
1918}
1919
1920unsigned FastEmit_ISD_FSUB_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1921  if (RetVT.SimpleTy != MVT::v8f32)
1922    return 0;
1923  if ((Subtarget->hasAVX())) {
1924    return FastEmitInst_rr(X86::VSUBPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1925  }
1926  return 0;
1927}
1928
1929unsigned FastEmit_ISD_FSUB_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1930  if (RetVT.SimpleTy != MVT::v2f64)
1931    return 0;
1932  if ((Subtarget->hasAVX())) {
1933    return FastEmitInst_rr(X86::VSUBPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1934  }
1935  if ((Subtarget->hasSSE2())) {
1936    return FastEmitInst_rr(X86::SUBPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1937  }
1938  return 0;
1939}
1940
1941unsigned FastEmit_ISD_FSUB_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1942  if (RetVT.SimpleTy != MVT::v4f64)
1943    return 0;
1944  if ((Subtarget->hasAVX())) {
1945    return FastEmitInst_rr(X86::VSUBPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1946  }
1947  return 0;
1948}
1949
1950unsigned FastEmit_ISD_FSUB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1951  switch (VT.SimpleTy) {
1952  case MVT::f32: return FastEmit_ISD_FSUB_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1953  case MVT::f64: return FastEmit_ISD_FSUB_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1954  case MVT::f80: return FastEmit_ISD_FSUB_MVT_f80_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1955  case MVT::v4f32: return FastEmit_ISD_FSUB_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1956  case MVT::v8f32: return FastEmit_ISD_FSUB_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1957  case MVT::v2f64: return FastEmit_ISD_FSUB_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1958  case MVT::v4f64: return FastEmit_ISD_FSUB_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
1959  default: return 0;
1960  }
1961}
1962
1963// FastEmit functions for ISD::MUL.
1964
1965unsigned FastEmit_ISD_MUL_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1966  if (RetVT.SimpleTy != MVT::i8)
1967    return 0;
1968  BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::AL).addReg(Op0);
1969  return FastEmitInst_r(X86::MUL8r, X86::GR8RegisterClass, Op1, Op1IsKill);
1970}
1971
1972unsigned FastEmit_ISD_MUL_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1973  if (RetVT.SimpleTy != MVT::i16)
1974    return 0;
1975  return FastEmitInst_rr(X86::IMUL16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1976}
1977
1978unsigned FastEmit_ISD_MUL_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1979  if (RetVT.SimpleTy != MVT::i32)
1980    return 0;
1981  return FastEmitInst_rr(X86::IMUL32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1982}
1983
1984unsigned FastEmit_ISD_MUL_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1985  if (RetVT.SimpleTy != MVT::i64)
1986    return 0;
1987  return FastEmitInst_rr(X86::IMUL64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1988}
1989
1990unsigned FastEmit_ISD_MUL_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
1991  if (RetVT.SimpleTy != MVT::v8i16)
1992    return 0;
1993  if ((Subtarget->hasAVX())) {
1994    return FastEmitInst_rr(X86::VPMULLWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1995  }
1996  if ((Subtarget->hasSSE2())) {
1997    return FastEmitInst_rr(X86::PMULLWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
1998  }
1999  return 0;
2000}
2001
2002unsigned FastEmit_ISD_MUL_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2003  if (RetVT.SimpleTy != MVT::v4i32)
2004    return 0;
2005  if ((Subtarget->hasAVX())) {
2006    return FastEmitInst_rr(X86::VPMULLDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2007  }
2008  if ((Subtarget->hasSSE41())) {
2009    return FastEmitInst_rr(X86::PMULLDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2010  }
2011  return 0;
2012}
2013
2014unsigned FastEmit_ISD_MUL_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2015  switch (VT.SimpleTy) {
2016  case MVT::i8: return FastEmit_ISD_MUL_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2017  case MVT::i16: return FastEmit_ISD_MUL_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2018  case MVT::i32: return FastEmit_ISD_MUL_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2019  case MVT::i64: return FastEmit_ISD_MUL_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2020  case MVT::v8i16: return FastEmit_ISD_MUL_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2021  case MVT::v4i32: return FastEmit_ISD_MUL_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2022  default: return 0;
2023  }
2024}
2025
2026// FastEmit functions for ISD::OR.
2027
2028unsigned FastEmit_ISD_OR_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2029  if (RetVT.SimpleTy != MVT::i8)
2030    return 0;
2031  return FastEmitInst_rr(X86::OR8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2032}
2033
2034unsigned FastEmit_ISD_OR_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2035  if (RetVT.SimpleTy != MVT::i16)
2036    return 0;
2037  return FastEmitInst_rr(X86::OR16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2038}
2039
2040unsigned FastEmit_ISD_OR_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2041  if (RetVT.SimpleTy != MVT::i32)
2042    return 0;
2043  return FastEmitInst_rr(X86::OR32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2044}
2045
2046unsigned FastEmit_ISD_OR_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2047  if (RetVT.SimpleTy != MVT::i64)
2048    return 0;
2049  return FastEmitInst_rr(X86::OR64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2050}
2051
2052unsigned FastEmit_ISD_OR_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2053  if (RetVT.SimpleTy != MVT::v2i64)
2054    return 0;
2055  if ((Subtarget->hasAVX())) {
2056    return FastEmitInst_rr(X86::VPORrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2057  }
2058  if ((Subtarget->hasSSE1())) {
2059    return FastEmitInst_rr(X86::ORPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2060  }
2061  if ((Subtarget->hasSSE2())) {
2062    return FastEmitInst_rr(X86::PORrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2063  }
2064  return 0;
2065}
2066
2067unsigned FastEmit_ISD_OR_MVT_v4i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2068  if (RetVT.SimpleTy != MVT::v4i64)
2069    return 0;
2070  if ((Subtarget->hasAVX())) {
2071    return FastEmitInst_rr(X86::VORPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2072  }
2073  return 0;
2074}
2075
2076unsigned FastEmit_ISD_OR_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2077  switch (VT.SimpleTy) {
2078  case MVT::i8: return FastEmit_ISD_OR_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2079  case MVT::i16: return FastEmit_ISD_OR_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2080  case MVT::i32: return FastEmit_ISD_OR_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2081  case MVT::i64: return FastEmit_ISD_OR_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2082  case MVT::v2i64: return FastEmit_ISD_OR_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2083  case MVT::v4i64: return FastEmit_ISD_OR_MVT_v4i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2084  default: return 0;
2085  }
2086}
2087
2088// FastEmit functions for ISD::ROTL.
2089
2090unsigned FastEmit_ISD_ROTL_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2091  if (RetVT.SimpleTy != MVT::i8)
2092    return 0;
2093  BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::CL).addReg(Op1);
2094  return FastEmitInst_r(X86::ROL8rCL, X86::GR8RegisterClass, Op0, Op0IsKill);
2095}
2096
2097unsigned FastEmit_ISD_ROTL_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2098  switch (VT.SimpleTy) {
2099  case MVT::i8: return FastEmit_ISD_ROTL_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2100  default: return 0;
2101  }
2102}
2103
2104// FastEmit functions for ISD::ROTR.
2105
2106unsigned FastEmit_ISD_ROTR_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2107  if (RetVT.SimpleTy != MVT::i8)
2108    return 0;
2109  BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::CL).addReg(Op1);
2110  return FastEmitInst_r(X86::ROR8rCL, X86::GR8RegisterClass, Op0, Op0IsKill);
2111}
2112
2113unsigned FastEmit_ISD_ROTR_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2114  switch (VT.SimpleTy) {
2115  case MVT::i8: return FastEmit_ISD_ROTR_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2116  default: return 0;
2117  }
2118}
2119
2120// FastEmit functions for ISD::SHL.
2121
2122unsigned FastEmit_ISD_SHL_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2123  if (RetVT.SimpleTy != MVT::i8)
2124    return 0;
2125  BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::CL).addReg(Op1);
2126  return FastEmitInst_r(X86::SHL8rCL, X86::GR8RegisterClass, Op0, Op0IsKill);
2127}
2128
2129unsigned FastEmit_ISD_SHL_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2130  switch (VT.SimpleTy) {
2131  case MVT::i8: return FastEmit_ISD_SHL_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2132  default: return 0;
2133  }
2134}
2135
2136// FastEmit functions for ISD::SRA.
2137
2138unsigned FastEmit_ISD_SRA_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2139  if (RetVT.SimpleTy != MVT::i8)
2140    return 0;
2141  BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::CL).addReg(Op1);
2142  return FastEmitInst_r(X86::SAR8rCL, X86::GR8RegisterClass, Op0, Op0IsKill);
2143}
2144
2145unsigned FastEmit_ISD_SRA_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2146  switch (VT.SimpleTy) {
2147  case MVT::i8: return FastEmit_ISD_SRA_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2148  default: return 0;
2149  }
2150}
2151
2152// FastEmit functions for ISD::SRL.
2153
2154unsigned FastEmit_ISD_SRL_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2155  if (RetVT.SimpleTy != MVT::i8)
2156    return 0;
2157  BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DL, TII.get(TargetOpcode::COPY), X86::CL).addReg(Op1);
2158  return FastEmitInst_r(X86::SHR8rCL, X86::GR8RegisterClass, Op0, Op0IsKill);
2159}
2160
2161unsigned FastEmit_ISD_SRL_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2162  switch (VT.SimpleTy) {
2163  case MVT::i8: return FastEmit_ISD_SRL_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2164  default: return 0;
2165  }
2166}
2167
2168// FastEmit functions for ISD::SUB.
2169
2170unsigned FastEmit_ISD_SUB_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2171  if (RetVT.SimpleTy != MVT::i8)
2172    return 0;
2173  return FastEmitInst_rr(X86::SUB8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2174}
2175
2176unsigned FastEmit_ISD_SUB_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2177  if (RetVT.SimpleTy != MVT::i16)
2178    return 0;
2179  return FastEmitInst_rr(X86::SUB16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2180}
2181
2182unsigned FastEmit_ISD_SUB_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2183  if (RetVT.SimpleTy != MVT::i32)
2184    return 0;
2185  return FastEmitInst_rr(X86::SUB32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2186}
2187
2188unsigned FastEmit_ISD_SUB_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2189  if (RetVT.SimpleTy != MVT::i64)
2190    return 0;
2191  return FastEmitInst_rr(X86::SUB64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2192}
2193
2194unsigned FastEmit_ISD_SUB_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2195  if (RetVT.SimpleTy != MVT::v16i8)
2196    return 0;
2197  if ((Subtarget->hasAVX())) {
2198    return FastEmitInst_rr(X86::VPSUBBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2199  }
2200  if ((Subtarget->hasSSE2())) {
2201    return FastEmitInst_rr(X86::PSUBBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2202  }
2203  return 0;
2204}
2205
2206unsigned FastEmit_ISD_SUB_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2207  if (RetVT.SimpleTy != MVT::v8i16)
2208    return 0;
2209  if ((Subtarget->hasAVX())) {
2210    return FastEmitInst_rr(X86::VPSUBWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2211  }
2212  if ((Subtarget->hasSSE2())) {
2213    return FastEmitInst_rr(X86::PSUBWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2214  }
2215  return 0;
2216}
2217
2218unsigned FastEmit_ISD_SUB_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2219  if (RetVT.SimpleTy != MVT::v4i32)
2220    return 0;
2221  if ((Subtarget->hasAVX())) {
2222    return FastEmitInst_rr(X86::VPSUBDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2223  }
2224  if ((Subtarget->hasSSE2())) {
2225    return FastEmitInst_rr(X86::PSUBDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2226  }
2227  return 0;
2228}
2229
2230unsigned FastEmit_ISD_SUB_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2231  if (RetVT.SimpleTy != MVT::v2i64)
2232    return 0;
2233  if ((Subtarget->hasAVX())) {
2234    return FastEmitInst_rr(X86::VPSUBQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2235  }
2236  if ((Subtarget->hasSSE2())) {
2237    return FastEmitInst_rr(X86::PSUBQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2238  }
2239  return 0;
2240}
2241
2242unsigned FastEmit_ISD_SUB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2243  switch (VT.SimpleTy) {
2244  case MVT::i8: return FastEmit_ISD_SUB_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2245  case MVT::i16: return FastEmit_ISD_SUB_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2246  case MVT::i32: return FastEmit_ISD_SUB_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2247  case MVT::i64: return FastEmit_ISD_SUB_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2248  case MVT::v16i8: return FastEmit_ISD_SUB_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2249  case MVT::v8i16: return FastEmit_ISD_SUB_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2250  case MVT::v4i32: return FastEmit_ISD_SUB_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2251  case MVT::v2i64: return FastEmit_ISD_SUB_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2252  default: return 0;
2253  }
2254}
2255
2256// FastEmit functions for ISD::XOR.
2257
2258unsigned FastEmit_ISD_XOR_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2259  if (RetVT.SimpleTy != MVT::i8)
2260    return 0;
2261  return FastEmitInst_rr(X86::XOR8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2262}
2263
2264unsigned FastEmit_ISD_XOR_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2265  if (RetVT.SimpleTy != MVT::i16)
2266    return 0;
2267  return FastEmitInst_rr(X86::XOR16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2268}
2269
2270unsigned FastEmit_ISD_XOR_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2271  if (RetVT.SimpleTy != MVT::i32)
2272    return 0;
2273  return FastEmitInst_rr(X86::XOR32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2274}
2275
2276unsigned FastEmit_ISD_XOR_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2277  if (RetVT.SimpleTy != MVT::i64)
2278    return 0;
2279  return FastEmitInst_rr(X86::XOR64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2280}
2281
2282unsigned FastEmit_ISD_XOR_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2283  if (RetVT.SimpleTy != MVT::v2i64)
2284    return 0;
2285  if ((Subtarget->hasAVX())) {
2286    return FastEmitInst_rr(X86::VPXORrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2287  }
2288  if ((Subtarget->hasSSE1())) {
2289    return FastEmitInst_rr(X86::XORPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2290  }
2291  if ((Subtarget->hasSSE2())) {
2292    return FastEmitInst_rr(X86::PXORrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2293  }
2294  return 0;
2295}
2296
2297unsigned FastEmit_ISD_XOR_MVT_v4i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2298  if (RetVT.SimpleTy != MVT::v4i64)
2299    return 0;
2300  if ((Subtarget->hasAVX())) {
2301    return FastEmitInst_rr(X86::VXORPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2302  }
2303  return 0;
2304}
2305
2306unsigned FastEmit_ISD_XOR_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2307  switch (VT.SimpleTy) {
2308  case MVT::i8: return FastEmit_ISD_XOR_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2309  case MVT::i16: return FastEmit_ISD_XOR_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2310  case MVT::i32: return FastEmit_ISD_XOR_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2311  case MVT::i64: return FastEmit_ISD_XOR_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2312  case MVT::v2i64: return FastEmit_ISD_XOR_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2313  case MVT::v4i64: return FastEmit_ISD_XOR_MVT_v4i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2314  default: return 0;
2315  }
2316}
2317
2318// FastEmit functions for X86ISD::ANDNP.
2319
2320unsigned FastEmit_X86ISD_ANDNP_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2321  if (RetVT.SimpleTy != MVT::v2i64)
2322    return 0;
2323  if ((Subtarget->hasAVX())) {
2324    return FastEmitInst_rr(X86::VPANDNrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2325  }
2326  if ((Subtarget->hasSSE1())) {
2327    return FastEmitInst_rr(X86::ANDNPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2328  }
2329  return 0;
2330}
2331
2332unsigned FastEmit_X86ISD_ANDNP_MVT_v4i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2333  if (RetVT.SimpleTy != MVT::v4i64)
2334    return 0;
2335  if ((Subtarget->hasAVX())) {
2336    return FastEmitInst_rr(X86::VANDNPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2337  }
2338  return 0;
2339}
2340
2341unsigned FastEmit_X86ISD_ANDNP_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2342  switch (VT.SimpleTy) {
2343  case MVT::v2i64: return FastEmit_X86ISD_ANDNP_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2344  case MVT::v4i64: return FastEmit_X86ISD_ANDNP_MVT_v4i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2345  default: return 0;
2346  }
2347}
2348
2349// FastEmit functions for X86ISD::BT.
2350
2351unsigned FastEmit_X86ISD_BT_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2352  if (RetVT.SimpleTy != MVT::i32)
2353    return 0;
2354  return FastEmitInst_rr(X86::BT16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2355}
2356
2357unsigned FastEmit_X86ISD_BT_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2358  if (RetVT.SimpleTy != MVT::i32)
2359    return 0;
2360  return FastEmitInst_rr(X86::BT32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2361}
2362
2363unsigned FastEmit_X86ISD_BT_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2364  if (RetVT.SimpleTy != MVT::i32)
2365    return 0;
2366  return FastEmitInst_rr(X86::BT64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2367}
2368
2369unsigned FastEmit_X86ISD_BT_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2370  switch (VT.SimpleTy) {
2371  case MVT::i16: return FastEmit_X86ISD_BT_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2372  case MVT::i32: return FastEmit_X86ISD_BT_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2373  case MVT::i64: return FastEmit_X86ISD_BT_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2374  default: return 0;
2375  }
2376}
2377
2378// FastEmit functions for X86ISD::CMP.
2379
2380unsigned FastEmit_X86ISD_CMP_MVT_i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2381  if (RetVT.SimpleTy != MVT::i32)
2382    return 0;
2383  return FastEmitInst_rr(X86::CMP8rr, X86::GR8RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2384}
2385
2386unsigned FastEmit_X86ISD_CMP_MVT_i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2387  if (RetVT.SimpleTy != MVT::i32)
2388    return 0;
2389  return FastEmitInst_rr(X86::CMP16rr, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2390}
2391
2392unsigned FastEmit_X86ISD_CMP_MVT_i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2393  if (RetVT.SimpleTy != MVT::i32)
2394    return 0;
2395  return FastEmitInst_rr(X86::CMP32rr, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2396}
2397
2398unsigned FastEmit_X86ISD_CMP_MVT_i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2399  if (RetVT.SimpleTy != MVT::i32)
2400    return 0;
2401  return FastEmitInst_rr(X86::CMP64rr, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2402}
2403
2404unsigned FastEmit_X86ISD_CMP_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2405  if (RetVT.SimpleTy != MVT::i32)
2406    return 0;
2407  if ((!Subtarget->hasXMM())) {
2408    return FastEmitInst_rr(X86::UCOM_FpIr32, X86::RFP32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2409  }
2410  if ((Subtarget->hasAVX())) {
2411    return FastEmitInst_rr(X86::VUCOMISSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2412  }
2413  if ((Subtarget->hasSSE1())) {
2414    return FastEmitInst_rr(X86::UCOMISSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2415  }
2416  return 0;
2417}
2418
2419unsigned FastEmit_X86ISD_CMP_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2420  if (RetVT.SimpleTy != MVT::i32)
2421    return 0;
2422  if ((!Subtarget->hasXMMInt())) {
2423    return FastEmitInst_rr(X86::UCOM_FpIr64, X86::RFP64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2424  }
2425  if ((Subtarget->hasAVX())) {
2426    return FastEmitInst_rr(X86::VUCOMISDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2427  }
2428  if ((Subtarget->hasSSE2())) {
2429    return FastEmitInst_rr(X86::UCOMISDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2430  }
2431  return 0;
2432}
2433
2434unsigned FastEmit_X86ISD_CMP_MVT_f80_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2435  if (RetVT.SimpleTy != MVT::i32)
2436    return 0;
2437  return FastEmitInst_rr(X86::UCOM_FpIr80, X86::RFP80RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2438}
2439
2440unsigned FastEmit_X86ISD_CMP_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2441  switch (VT.SimpleTy) {
2442  case MVT::i8: return FastEmit_X86ISD_CMP_MVT_i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2443  case MVT::i16: return FastEmit_X86ISD_CMP_MVT_i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2444  case MVT::i32: return FastEmit_X86ISD_CMP_MVT_i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2445  case MVT::i64: return FastEmit_X86ISD_CMP_MVT_i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2446  case MVT::f32: return FastEmit_X86ISD_CMP_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2447  case MVT::f64: return FastEmit_X86ISD_CMP_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2448  case MVT::f80: return FastEmit_X86ISD_CMP_MVT_f80_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2449  default: return 0;
2450  }
2451}
2452
2453// FastEmit functions for X86ISD::COMI.
2454
2455unsigned FastEmit_X86ISD_COMI_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2456  if (RetVT.SimpleTy != MVT::i32)
2457    return 0;
2458  if ((Subtarget->hasAVX())) {
2459    return FastEmitInst_rr(X86::Int_VCOMISSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2460  }
2461  if ((Subtarget->hasSSE1())) {
2462    return FastEmitInst_rr(X86::Int_COMISSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2463  }
2464  return 0;
2465}
2466
2467unsigned FastEmit_X86ISD_COMI_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2468  if (RetVT.SimpleTy != MVT::i32)
2469    return 0;
2470  if ((Subtarget->hasAVX())) {
2471    return FastEmitInst_rr(X86::Int_VCOMISDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2472  }
2473  if ((Subtarget->hasSSE2())) {
2474    return FastEmitInst_rr(X86::Int_COMISDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2475  }
2476  return 0;
2477}
2478
2479unsigned FastEmit_X86ISD_COMI_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2480  switch (VT.SimpleTy) {
2481  case MVT::v4f32: return FastEmit_X86ISD_COMI_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2482  case MVT::v2f64: return FastEmit_X86ISD_COMI_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2483  default: return 0;
2484  }
2485}
2486
2487// FastEmit functions for X86ISD::FAND.
2488
2489unsigned FastEmit_X86ISD_FAND_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2490  if (RetVT.SimpleTy != MVT::f32)
2491    return 0;
2492  if ((Subtarget->hasAVX())) {
2493    return FastEmitInst_rr(X86::VFsANDPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2494  }
2495  if ((Subtarget->hasSSE1())) {
2496    return FastEmitInst_rr(X86::FsANDPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2497  }
2498  return 0;
2499}
2500
2501unsigned FastEmit_X86ISD_FAND_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2502  if (RetVT.SimpleTy != MVT::f64)
2503    return 0;
2504  if ((Subtarget->hasAVX())) {
2505    return FastEmitInst_rr(X86::VFsANDPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2506  }
2507  if ((Subtarget->hasSSE2())) {
2508    return FastEmitInst_rr(X86::FsANDPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2509  }
2510  return 0;
2511}
2512
2513unsigned FastEmit_X86ISD_FAND_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2514  switch (VT.SimpleTy) {
2515  case MVT::f32: return FastEmit_X86ISD_FAND_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2516  case MVT::f64: return FastEmit_X86ISD_FAND_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2517  default: return 0;
2518  }
2519}
2520
2521// FastEmit functions for X86ISD::FHADD.
2522
2523unsigned FastEmit_X86ISD_FHADD_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2524  if (RetVT.SimpleTy != MVT::v4f32)
2525    return 0;
2526  if ((Subtarget->hasAVX())) {
2527    return FastEmitInst_rr(X86::VHADDPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2528  }
2529  if ((Subtarget->hasSSE3())) {
2530    return FastEmitInst_rr(X86::HADDPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2531  }
2532  return 0;
2533}
2534
2535unsigned FastEmit_X86ISD_FHADD_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2536  if (RetVT.SimpleTy != MVT::v8f32)
2537    return 0;
2538  if ((Subtarget->hasAVX())) {
2539    return FastEmitInst_rr(X86::VHADDPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2540  }
2541  return 0;
2542}
2543
2544unsigned FastEmit_X86ISD_FHADD_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2545  if (RetVT.SimpleTy != MVT::v2f64)
2546    return 0;
2547  if ((Subtarget->hasAVX())) {
2548    return FastEmitInst_rr(X86::VHADDPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2549  }
2550  if ((Subtarget->hasSSE3())) {
2551    return FastEmitInst_rr(X86::HADDPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2552  }
2553  return 0;
2554}
2555
2556unsigned FastEmit_X86ISD_FHADD_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2557  if (RetVT.SimpleTy != MVT::v4f64)
2558    return 0;
2559  if ((Subtarget->hasAVX())) {
2560    return FastEmitInst_rr(X86::VHADDPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2561  }
2562  return 0;
2563}
2564
2565unsigned FastEmit_X86ISD_FHADD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2566  switch (VT.SimpleTy) {
2567  case MVT::v4f32: return FastEmit_X86ISD_FHADD_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2568  case MVT::v8f32: return FastEmit_X86ISD_FHADD_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2569  case MVT::v2f64: return FastEmit_X86ISD_FHADD_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2570  case MVT::v4f64: return FastEmit_X86ISD_FHADD_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2571  default: return 0;
2572  }
2573}
2574
2575// FastEmit functions for X86ISD::FHSUB.
2576
2577unsigned FastEmit_X86ISD_FHSUB_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2578  if (RetVT.SimpleTy != MVT::v4f32)
2579    return 0;
2580  if ((Subtarget->hasAVX())) {
2581    return FastEmitInst_rr(X86::VHSUBPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2582  }
2583  if ((Subtarget->hasSSE3())) {
2584    return FastEmitInst_rr(X86::HSUBPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2585  }
2586  return 0;
2587}
2588
2589unsigned FastEmit_X86ISD_FHSUB_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2590  if (RetVT.SimpleTy != MVT::v8f32)
2591    return 0;
2592  if ((Subtarget->hasAVX())) {
2593    return FastEmitInst_rr(X86::VHSUBPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2594  }
2595  return 0;
2596}
2597
2598unsigned FastEmit_X86ISD_FHSUB_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2599  if (RetVT.SimpleTy != MVT::v2f64)
2600    return 0;
2601  if ((Subtarget->hasAVX())) {
2602    return FastEmitInst_rr(X86::VHSUBPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2603  }
2604  if ((Subtarget->hasSSE3())) {
2605    return FastEmitInst_rr(X86::HSUBPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2606  }
2607  return 0;
2608}
2609
2610unsigned FastEmit_X86ISD_FHSUB_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2611  if (RetVT.SimpleTy != MVT::v4f64)
2612    return 0;
2613  if ((Subtarget->hasAVX())) {
2614    return FastEmitInst_rr(X86::VHSUBPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2615  }
2616  return 0;
2617}
2618
2619unsigned FastEmit_X86ISD_FHSUB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2620  switch (VT.SimpleTy) {
2621  case MVT::v4f32: return FastEmit_X86ISD_FHSUB_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2622  case MVT::v8f32: return FastEmit_X86ISD_FHSUB_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2623  case MVT::v2f64: return FastEmit_X86ISD_FHSUB_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2624  case MVT::v4f64: return FastEmit_X86ISD_FHSUB_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2625  default: return 0;
2626  }
2627}
2628
2629// FastEmit functions for X86ISD::FMAX.
2630
2631unsigned FastEmit_X86ISD_FMAX_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2632  if (RetVT.SimpleTy != MVT::f32)
2633    return 0;
2634  if ((Subtarget->hasAVX())) {
2635    return FastEmitInst_rr(X86::VMAXSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2636  }
2637  if ((Subtarget->hasSSE1())) {
2638    return FastEmitInst_rr(X86::MAXSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2639  }
2640  return 0;
2641}
2642
2643unsigned FastEmit_X86ISD_FMAX_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2644  if (RetVT.SimpleTy != MVT::f64)
2645    return 0;
2646  if ((Subtarget->hasAVX())) {
2647    return FastEmitInst_rr(X86::VMAXSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2648  }
2649  if ((Subtarget->hasSSE2())) {
2650    return FastEmitInst_rr(X86::MAXSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2651  }
2652  return 0;
2653}
2654
2655unsigned FastEmit_X86ISD_FMAX_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2656  if (RetVT.SimpleTy != MVT::v4f32)
2657    return 0;
2658  if ((Subtarget->hasAVX())) {
2659    return FastEmitInst_rr(X86::VMAXPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2660  }
2661  if ((Subtarget->hasSSE1())) {
2662    return FastEmitInst_rr(X86::MAXPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2663  }
2664  return 0;
2665}
2666
2667unsigned FastEmit_X86ISD_FMAX_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2668  if (RetVT.SimpleTy != MVT::v8f32)
2669    return 0;
2670  if ((Subtarget->hasAVX())) {
2671    return FastEmitInst_rr(X86::VMAXPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2672  }
2673  return 0;
2674}
2675
2676unsigned FastEmit_X86ISD_FMAX_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2677  if (RetVT.SimpleTy != MVT::v2f64)
2678    return 0;
2679  if ((Subtarget->hasAVX())) {
2680    return FastEmitInst_rr(X86::VMAXPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2681  }
2682  if ((Subtarget->hasSSE2())) {
2683    return FastEmitInst_rr(X86::MAXPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2684  }
2685  return 0;
2686}
2687
2688unsigned FastEmit_X86ISD_FMAX_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2689  if (RetVT.SimpleTy != MVT::v4f64)
2690    return 0;
2691  if ((Subtarget->hasAVX())) {
2692    return FastEmitInst_rr(X86::VMAXPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2693  }
2694  return 0;
2695}
2696
2697unsigned FastEmit_X86ISD_FMAX_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2698  switch (VT.SimpleTy) {
2699  case MVT::f32: return FastEmit_X86ISD_FMAX_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2700  case MVT::f64: return FastEmit_X86ISD_FMAX_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2701  case MVT::v4f32: return FastEmit_X86ISD_FMAX_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2702  case MVT::v8f32: return FastEmit_X86ISD_FMAX_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2703  case MVT::v2f64: return FastEmit_X86ISD_FMAX_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2704  case MVT::v4f64: return FastEmit_X86ISD_FMAX_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2705  default: return 0;
2706  }
2707}
2708
2709// FastEmit functions for X86ISD::FMIN.
2710
2711unsigned FastEmit_X86ISD_FMIN_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2712  if (RetVT.SimpleTy != MVT::f32)
2713    return 0;
2714  if ((Subtarget->hasAVX())) {
2715    return FastEmitInst_rr(X86::VMINSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2716  }
2717  if ((Subtarget->hasSSE1())) {
2718    return FastEmitInst_rr(X86::MINSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2719  }
2720  return 0;
2721}
2722
2723unsigned FastEmit_X86ISD_FMIN_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2724  if (RetVT.SimpleTy != MVT::f64)
2725    return 0;
2726  if ((Subtarget->hasAVX())) {
2727    return FastEmitInst_rr(X86::VMINSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2728  }
2729  if ((Subtarget->hasSSE2())) {
2730    return FastEmitInst_rr(X86::MINSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2731  }
2732  return 0;
2733}
2734
2735unsigned FastEmit_X86ISD_FMIN_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2736  if (RetVT.SimpleTy != MVT::v4f32)
2737    return 0;
2738  if ((Subtarget->hasAVX())) {
2739    return FastEmitInst_rr(X86::VMINPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2740  }
2741  if ((Subtarget->hasSSE1())) {
2742    return FastEmitInst_rr(X86::MINPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2743  }
2744  return 0;
2745}
2746
2747unsigned FastEmit_X86ISD_FMIN_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2748  if (RetVT.SimpleTy != MVT::v8f32)
2749    return 0;
2750  if ((Subtarget->hasAVX())) {
2751    return FastEmitInst_rr(X86::VMINPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2752  }
2753  return 0;
2754}
2755
2756unsigned FastEmit_X86ISD_FMIN_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2757  if (RetVT.SimpleTy != MVT::v2f64)
2758    return 0;
2759  if ((Subtarget->hasAVX())) {
2760    return FastEmitInst_rr(X86::VMINPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2761  }
2762  if ((Subtarget->hasSSE2())) {
2763    return FastEmitInst_rr(X86::MINPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2764  }
2765  return 0;
2766}
2767
2768unsigned FastEmit_X86ISD_FMIN_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2769  if (RetVT.SimpleTy != MVT::v4f64)
2770    return 0;
2771  if ((Subtarget->hasAVX())) {
2772    return FastEmitInst_rr(X86::VMINPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2773  }
2774  return 0;
2775}
2776
2777unsigned FastEmit_X86ISD_FMIN_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2778  switch (VT.SimpleTy) {
2779  case MVT::f32: return FastEmit_X86ISD_FMIN_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2780  case MVT::f64: return FastEmit_X86ISD_FMIN_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2781  case MVT::v4f32: return FastEmit_X86ISD_FMIN_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2782  case MVT::v8f32: return FastEmit_X86ISD_FMIN_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2783  case MVT::v2f64: return FastEmit_X86ISD_FMIN_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2784  case MVT::v4f64: return FastEmit_X86ISD_FMIN_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2785  default: return 0;
2786  }
2787}
2788
2789// FastEmit functions for X86ISD::FOR.
2790
2791unsigned FastEmit_X86ISD_FOR_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2792  if (RetVT.SimpleTy != MVT::f32)
2793    return 0;
2794  if ((Subtarget->hasAVX())) {
2795    return FastEmitInst_rr(X86::VFsORPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2796  }
2797  if ((Subtarget->hasSSE1())) {
2798    return FastEmitInst_rr(X86::FsORPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2799  }
2800  return 0;
2801}
2802
2803unsigned FastEmit_X86ISD_FOR_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2804  if (RetVT.SimpleTy != MVT::f64)
2805    return 0;
2806  if ((Subtarget->hasAVX())) {
2807    return FastEmitInst_rr(X86::VFsORPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2808  }
2809  if ((Subtarget->hasSSE2())) {
2810    return FastEmitInst_rr(X86::FsORPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2811  }
2812  return 0;
2813}
2814
2815unsigned FastEmit_X86ISD_FOR_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2816  switch (VT.SimpleTy) {
2817  case MVT::f32: return FastEmit_X86ISD_FOR_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2818  case MVT::f64: return FastEmit_X86ISD_FOR_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2819  default: return 0;
2820  }
2821}
2822
2823// FastEmit functions for X86ISD::FXOR.
2824
2825unsigned FastEmit_X86ISD_FXOR_MVT_f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2826  if (RetVT.SimpleTy != MVT::f32)
2827    return 0;
2828  if ((Subtarget->hasAVX())) {
2829    return FastEmitInst_rr(X86::VFsXORPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2830  }
2831  if ((Subtarget->hasSSE1())) {
2832    return FastEmitInst_rr(X86::FsXORPSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2833  }
2834  return 0;
2835}
2836
2837unsigned FastEmit_X86ISD_FXOR_MVT_f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2838  if (RetVT.SimpleTy != MVT::f64)
2839    return 0;
2840  if ((Subtarget->hasAVX())) {
2841    return FastEmitInst_rr(X86::VFsXORPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2842  }
2843  if ((Subtarget->hasSSE2())) {
2844    return FastEmitInst_rr(X86::FsXORPDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2845  }
2846  return 0;
2847}
2848
2849unsigned FastEmit_X86ISD_FXOR_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2850  switch (VT.SimpleTy) {
2851  case MVT::f32: return FastEmit_X86ISD_FXOR_MVT_f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2852  case MVT::f64: return FastEmit_X86ISD_FXOR_MVT_f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2853  default: return 0;
2854  }
2855}
2856
2857// FastEmit functions for X86ISD::MOVHLPS.
2858
2859unsigned FastEmit_X86ISD_MOVHLPS_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2860  if (RetVT.SimpleTy != MVT::v4i32)
2861    return 0;
2862  if ((Subtarget->hasAVX())) {
2863    return FastEmitInst_rr(X86::VMOVHLPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2864  }
2865  if ((Subtarget->hasSSE1())) {
2866    return FastEmitInst_rr(X86::MOVHLPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2867  }
2868  return 0;
2869}
2870
2871unsigned FastEmit_X86ISD_MOVHLPS_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2872  if (RetVT.SimpleTy != MVT::v4f32)
2873    return 0;
2874  if ((Subtarget->hasAVX())) {
2875    return FastEmitInst_rr(X86::VMOVHLPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2876  }
2877  if ((Subtarget->hasSSE1())) {
2878    return FastEmitInst_rr(X86::MOVHLPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2879  }
2880  return 0;
2881}
2882
2883unsigned FastEmit_X86ISD_MOVHLPS_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2884  switch (VT.SimpleTy) {
2885  case MVT::v4i32: return FastEmit_X86ISD_MOVHLPS_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2886  case MVT::v4f32: return FastEmit_X86ISD_MOVHLPS_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2887  default: return 0;
2888  }
2889}
2890
2891// FastEmit functions for X86ISD::MOVLHPS.
2892
2893unsigned FastEmit_X86ISD_MOVLHPS_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2894  if (RetVT.SimpleTy != MVT::v4i32)
2895    return 0;
2896  if ((Subtarget->hasAVX())) {
2897    return FastEmitInst_rr(X86::VMOVLHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2898  }
2899  if ((Subtarget->hasSSE1())) {
2900    return FastEmitInst_rr(X86::MOVLHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2901  }
2902  return 0;
2903}
2904
2905unsigned FastEmit_X86ISD_MOVLHPS_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2906  if (RetVT.SimpleTy != MVT::v2i64)
2907    return 0;
2908  if ((Subtarget->hasAVX())) {
2909    return FastEmitInst_rr(X86::VMOVLHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2910  }
2911  if ((Subtarget->hasSSE1())) {
2912    return FastEmitInst_rr(X86::MOVLHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2913  }
2914  return 0;
2915}
2916
2917unsigned FastEmit_X86ISD_MOVLHPS_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2918  if (RetVT.SimpleTy != MVT::v4f32)
2919    return 0;
2920  if ((Subtarget->hasAVX())) {
2921    return FastEmitInst_rr(X86::VMOVLHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2922  }
2923  if ((Subtarget->hasSSE1())) {
2924    return FastEmitInst_rr(X86::MOVLHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2925  }
2926  return 0;
2927}
2928
2929unsigned FastEmit_X86ISD_MOVLHPS_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2930  switch (VT.SimpleTy) {
2931  case MVT::v4i32: return FastEmit_X86ISD_MOVLHPS_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2932  case MVT::v2i64: return FastEmit_X86ISD_MOVLHPS_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2933  case MVT::v4f32: return FastEmit_X86ISD_MOVLHPS_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2934  default: return 0;
2935  }
2936}
2937
2938// FastEmit functions for X86ISD::PCMPEQB.
2939
2940unsigned FastEmit_X86ISD_PCMPEQB_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2941  if (RetVT.SimpleTy != MVT::v16i8)
2942    return 0;
2943  if ((Subtarget->hasAVX())) {
2944    return FastEmitInst_rr(X86::VPCMPEQBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2945  }
2946  if ((Subtarget->hasSSE2())) {
2947    return FastEmitInst_rr(X86::PCMPEQBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2948  }
2949  return 0;
2950}
2951
2952unsigned FastEmit_X86ISD_PCMPEQB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2953  switch (VT.SimpleTy) {
2954  case MVT::v16i8: return FastEmit_X86ISD_PCMPEQB_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2955  default: return 0;
2956  }
2957}
2958
2959// FastEmit functions for X86ISD::PCMPEQD.
2960
2961unsigned FastEmit_X86ISD_PCMPEQD_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2962  if (RetVT.SimpleTy != MVT::v4i32)
2963    return 0;
2964  if ((Subtarget->hasAVX())) {
2965    return FastEmitInst_rr(X86::VPCMPEQDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2966  }
2967  if ((Subtarget->hasSSE2())) {
2968    return FastEmitInst_rr(X86::PCMPEQDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2969  }
2970  return 0;
2971}
2972
2973unsigned FastEmit_X86ISD_PCMPEQD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2974  switch (VT.SimpleTy) {
2975  case MVT::v4i32: return FastEmit_X86ISD_PCMPEQD_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2976  default: return 0;
2977  }
2978}
2979
2980// FastEmit functions for X86ISD::PCMPEQQ.
2981
2982unsigned FastEmit_X86ISD_PCMPEQQ_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2983  if (RetVT.SimpleTy != MVT::v2i64)
2984    return 0;
2985  return FastEmitInst_rr(X86::PCMPEQQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2986  if ((Subtarget->hasAVX())) {
2987    return FastEmitInst_rr(X86::VPCMPEQQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
2988  }
2989  return 0;
2990}
2991
2992unsigned FastEmit_X86ISD_PCMPEQQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
2993  switch (VT.SimpleTy) {
2994  case MVT::v2i64: return FastEmit_X86ISD_PCMPEQQ_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
2995  default: return 0;
2996  }
2997}
2998
2999// FastEmit functions for X86ISD::PCMPEQW.
3000
3001unsigned FastEmit_X86ISD_PCMPEQW_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3002  if (RetVT.SimpleTy != MVT::v8i16)
3003    return 0;
3004  if ((Subtarget->hasAVX())) {
3005    return FastEmitInst_rr(X86::VPCMPEQWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3006  }
3007  if ((Subtarget->hasSSE2())) {
3008    return FastEmitInst_rr(X86::PCMPEQWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3009  }
3010  return 0;
3011}
3012
3013unsigned FastEmit_X86ISD_PCMPEQW_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3014  switch (VT.SimpleTy) {
3015  case MVT::v8i16: return FastEmit_X86ISD_PCMPEQW_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3016  default: return 0;
3017  }
3018}
3019
3020// FastEmit functions for X86ISD::PCMPGTB.
3021
3022unsigned FastEmit_X86ISD_PCMPGTB_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3023  if (RetVT.SimpleTy != MVT::v16i8)
3024    return 0;
3025  if ((Subtarget->hasAVX())) {
3026    return FastEmitInst_rr(X86::VPCMPGTBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3027  }
3028  if ((Subtarget->hasSSE2())) {
3029    return FastEmitInst_rr(X86::PCMPGTBrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3030  }
3031  return 0;
3032}
3033
3034unsigned FastEmit_X86ISD_PCMPGTB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3035  switch (VT.SimpleTy) {
3036  case MVT::v16i8: return FastEmit_X86ISD_PCMPGTB_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3037  default: return 0;
3038  }
3039}
3040
3041// FastEmit functions for X86ISD::PCMPGTD.
3042
3043unsigned FastEmit_X86ISD_PCMPGTD_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3044  if (RetVT.SimpleTy != MVT::v4i32)
3045    return 0;
3046  if ((Subtarget->hasAVX())) {
3047    return FastEmitInst_rr(X86::VPCMPGTDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3048  }
3049  if ((Subtarget->hasSSE2())) {
3050    return FastEmitInst_rr(X86::PCMPGTDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3051  }
3052  return 0;
3053}
3054
3055unsigned FastEmit_X86ISD_PCMPGTD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3056  switch (VT.SimpleTy) {
3057  case MVT::v4i32: return FastEmit_X86ISD_PCMPGTD_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3058  default: return 0;
3059  }
3060}
3061
3062// FastEmit functions for X86ISD::PCMPGTQ.
3063
3064unsigned FastEmit_X86ISD_PCMPGTQ_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3065  if (RetVT.SimpleTy != MVT::v2i64)
3066    return 0;
3067  return FastEmitInst_rr(X86::PCMPGTQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3068  if ((Subtarget->hasAVX())) {
3069    return FastEmitInst_rr(X86::VPCMPGTQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3070  }
3071  return 0;
3072}
3073
3074unsigned FastEmit_X86ISD_PCMPGTQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3075  switch (VT.SimpleTy) {
3076  case MVT::v2i64: return FastEmit_X86ISD_PCMPGTQ_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3077  default: return 0;
3078  }
3079}
3080
3081// FastEmit functions for X86ISD::PCMPGTW.
3082
3083unsigned FastEmit_X86ISD_PCMPGTW_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3084  if (RetVT.SimpleTy != MVT::v8i16)
3085    return 0;
3086  if ((Subtarget->hasAVX())) {
3087    return FastEmitInst_rr(X86::VPCMPGTWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3088  }
3089  if ((Subtarget->hasSSE2())) {
3090    return FastEmitInst_rr(X86::PCMPGTWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3091  }
3092  return 0;
3093}
3094
3095unsigned FastEmit_X86ISD_PCMPGTW_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3096  switch (VT.SimpleTy) {
3097  case MVT::v8i16: return FastEmit_X86ISD_PCMPGTW_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3098  default: return 0;
3099  }
3100}
3101
3102// FastEmit functions for X86ISD::PSHUFB.
3103
3104unsigned FastEmit_X86ISD_PSHUFB_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3105  if (RetVT.SimpleTy != MVT::v16i8)
3106    return 0;
3107  if ((Subtarget->hasAVX())) {
3108    return FastEmitInst_rr(X86::VPSHUFBrr128, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3109  }
3110  if ((Subtarget->hasSSSE3())) {
3111    return FastEmitInst_rr(X86::PSHUFBrr128, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3112  }
3113  return 0;
3114}
3115
3116unsigned FastEmit_X86ISD_PSHUFB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3117  switch (VT.SimpleTy) {
3118  case MVT::v16i8: return FastEmit_X86ISD_PSHUFB_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3119  default: return 0;
3120  }
3121}
3122
3123// FastEmit functions for X86ISD::PSIGNB.
3124
3125unsigned FastEmit_X86ISD_PSIGNB_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3126  if (RetVT.SimpleTy != MVT::v16i8)
3127    return 0;
3128  if ((Subtarget->hasAVX())) {
3129    return FastEmitInst_rr(X86::VPSIGNBrr128, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3130  }
3131  if ((Subtarget->hasSSSE3())) {
3132    return FastEmitInst_rr(X86::PSIGNBrr128, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3133  }
3134  return 0;
3135}
3136
3137unsigned FastEmit_X86ISD_PSIGNB_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3138  switch (VT.SimpleTy) {
3139  case MVT::v16i8: return FastEmit_X86ISD_PSIGNB_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3140  default: return 0;
3141  }
3142}
3143
3144// FastEmit functions for X86ISD::PSIGND.
3145
3146unsigned FastEmit_X86ISD_PSIGND_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3147  if (RetVT.SimpleTy != MVT::v4i32)
3148    return 0;
3149  if ((Subtarget->hasAVX())) {
3150    return FastEmitInst_rr(X86::VPSIGNDrr128, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3151  }
3152  if ((Subtarget->hasSSSE3())) {
3153    return FastEmitInst_rr(X86::PSIGNDrr128, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3154  }
3155  return 0;
3156}
3157
3158unsigned FastEmit_X86ISD_PSIGND_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3159  switch (VT.SimpleTy) {
3160  case MVT::v4i32: return FastEmit_X86ISD_PSIGND_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3161  default: return 0;
3162  }
3163}
3164
3165// FastEmit functions for X86ISD::PSIGNW.
3166
3167unsigned FastEmit_X86ISD_PSIGNW_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3168  if (RetVT.SimpleTy != MVT::v8i16)
3169    return 0;
3170  if ((Subtarget->hasAVX())) {
3171    return FastEmitInst_rr(X86::VPSIGNWrr128, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3172  }
3173  if ((Subtarget->hasSSSE3())) {
3174    return FastEmitInst_rr(X86::PSIGNWrr128, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3175  }
3176  return 0;
3177}
3178
3179unsigned FastEmit_X86ISD_PSIGNW_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3180  switch (VT.SimpleTy) {
3181  case MVT::v8i16: return FastEmit_X86ISD_PSIGNW_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3182  default: return 0;
3183  }
3184}
3185
3186// FastEmit functions for X86ISD::PTEST.
3187
3188unsigned FastEmit_X86ISD_PTEST_MVT_v4i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3189  if (RetVT.SimpleTy != MVT::i32)
3190    return 0;
3191  if ((Subtarget->hasAVX())) {
3192    return FastEmitInst_rr(X86::VPTESTYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3193  }
3194  return 0;
3195}
3196
3197unsigned FastEmit_X86ISD_PTEST_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3198  if (RetVT.SimpleTy != MVT::i32)
3199    return 0;
3200  if ((Subtarget->hasAVX())) {
3201    return FastEmitInst_rr(X86::VPTESTrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3202  }
3203  if ((Subtarget->hasSSE41())) {
3204    return FastEmitInst_rr(X86::PTESTrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3205  }
3206  return 0;
3207}
3208
3209unsigned FastEmit_X86ISD_PTEST_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3210  switch (VT.SimpleTy) {
3211  case MVT::v4i64: return FastEmit_X86ISD_PTEST_MVT_v4i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3212  case MVT::v4f32: return FastEmit_X86ISD_PTEST_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3213  default: return 0;
3214  }
3215}
3216
3217// FastEmit functions for X86ISD::PUNPCKHBW.
3218
3219unsigned FastEmit_X86ISD_PUNPCKHBW_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3220  if (RetVT.SimpleTy != MVT::v16i8)
3221    return 0;
3222  if ((Subtarget->hasAVX())) {
3223    return FastEmitInst_rr(X86::VPUNPCKHBWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3224  }
3225  if ((Subtarget->hasSSE2())) {
3226    return FastEmitInst_rr(X86::PUNPCKHBWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3227  }
3228  return 0;
3229}
3230
3231unsigned FastEmit_X86ISD_PUNPCKHBW_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3232  switch (VT.SimpleTy) {
3233  case MVT::v16i8: return FastEmit_X86ISD_PUNPCKHBW_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3234  default: return 0;
3235  }
3236}
3237
3238// FastEmit functions for X86ISD::PUNPCKHDQ.
3239
3240unsigned FastEmit_X86ISD_PUNPCKHDQ_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3241  if (RetVT.SimpleTy != MVT::v4i32)
3242    return 0;
3243  if ((Subtarget->hasAVX())) {
3244    return FastEmitInst_rr(X86::VPUNPCKHDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3245  }
3246  if ((Subtarget->hasSSE2())) {
3247    return FastEmitInst_rr(X86::PUNPCKHDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3248  }
3249  return 0;
3250}
3251
3252unsigned FastEmit_X86ISD_PUNPCKHDQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3253  switch (VT.SimpleTy) {
3254  case MVT::v4i32: return FastEmit_X86ISD_PUNPCKHDQ_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3255  default: return 0;
3256  }
3257}
3258
3259// FastEmit functions for X86ISD::PUNPCKHQDQ.
3260
3261unsigned FastEmit_X86ISD_PUNPCKHQDQ_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3262  if (RetVT.SimpleTy != MVT::v2i64)
3263    return 0;
3264  if ((Subtarget->hasAVX())) {
3265    return FastEmitInst_rr(X86::VPUNPCKHQDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3266  }
3267  if ((Subtarget->hasSSE2())) {
3268    return FastEmitInst_rr(X86::PUNPCKHQDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3269  }
3270  return 0;
3271}
3272
3273unsigned FastEmit_X86ISD_PUNPCKHQDQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3274  switch (VT.SimpleTy) {
3275  case MVT::v2i64: return FastEmit_X86ISD_PUNPCKHQDQ_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3276  default: return 0;
3277  }
3278}
3279
3280// FastEmit functions for X86ISD::PUNPCKHWD.
3281
3282unsigned FastEmit_X86ISD_PUNPCKHWD_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3283  if (RetVT.SimpleTy != MVT::v8i16)
3284    return 0;
3285  if ((Subtarget->hasAVX())) {
3286    return FastEmitInst_rr(X86::VPUNPCKHWDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3287  }
3288  if ((Subtarget->hasSSE2())) {
3289    return FastEmitInst_rr(X86::PUNPCKHWDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3290  }
3291  return 0;
3292}
3293
3294unsigned FastEmit_X86ISD_PUNPCKHWD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3295  switch (VT.SimpleTy) {
3296  case MVT::v8i16: return FastEmit_X86ISD_PUNPCKHWD_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3297  default: return 0;
3298  }
3299}
3300
3301// FastEmit functions for X86ISD::PUNPCKLBW.
3302
3303unsigned FastEmit_X86ISD_PUNPCKLBW_MVT_v16i8_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3304  if (RetVT.SimpleTy != MVT::v16i8)
3305    return 0;
3306  if ((Subtarget->hasAVX())) {
3307    return FastEmitInst_rr(X86::VPUNPCKLBWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3308  }
3309  if ((Subtarget->hasSSE2())) {
3310    return FastEmitInst_rr(X86::PUNPCKLBWrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3311  }
3312  return 0;
3313}
3314
3315unsigned FastEmit_X86ISD_PUNPCKLBW_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3316  switch (VT.SimpleTy) {
3317  case MVT::v16i8: return FastEmit_X86ISD_PUNPCKLBW_MVT_v16i8_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3318  default: return 0;
3319  }
3320}
3321
3322// FastEmit functions for X86ISD::PUNPCKLDQ.
3323
3324unsigned FastEmit_X86ISD_PUNPCKLDQ_MVT_v4i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3325  if (RetVT.SimpleTy != MVT::v4i32)
3326    return 0;
3327  if ((Subtarget->hasAVX())) {
3328    return FastEmitInst_rr(X86::VPUNPCKLDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3329  }
3330  if ((Subtarget->hasSSE2())) {
3331    return FastEmitInst_rr(X86::PUNPCKLDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3332  }
3333  return 0;
3334}
3335
3336unsigned FastEmit_X86ISD_PUNPCKLDQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3337  switch (VT.SimpleTy) {
3338  case MVT::v4i32: return FastEmit_X86ISD_PUNPCKLDQ_MVT_v4i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3339  default: return 0;
3340  }
3341}
3342
3343// FastEmit functions for X86ISD::PUNPCKLQDQ.
3344
3345unsigned FastEmit_X86ISD_PUNPCKLQDQ_MVT_v2i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3346  if (RetVT.SimpleTy != MVT::v2i64)
3347    return 0;
3348  if ((Subtarget->hasAVX())) {
3349    return FastEmitInst_rr(X86::VPUNPCKLQDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3350  }
3351  if ((Subtarget->hasSSE2())) {
3352    return FastEmitInst_rr(X86::PUNPCKLQDQrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3353  }
3354  return 0;
3355}
3356
3357unsigned FastEmit_X86ISD_PUNPCKLQDQ_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3358  switch (VT.SimpleTy) {
3359  case MVT::v2i64: return FastEmit_X86ISD_PUNPCKLQDQ_MVT_v2i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3360  default: return 0;
3361  }
3362}
3363
3364// FastEmit functions for X86ISD::PUNPCKLWD.
3365
3366unsigned FastEmit_X86ISD_PUNPCKLWD_MVT_v8i16_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3367  if (RetVT.SimpleTy != MVT::v8i16)
3368    return 0;
3369  if ((Subtarget->hasAVX())) {
3370    return FastEmitInst_rr(X86::VPUNPCKLWDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3371  }
3372  if ((Subtarget->hasSSE2())) {
3373    return FastEmitInst_rr(X86::PUNPCKLWDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3374  }
3375  return 0;
3376}
3377
3378unsigned FastEmit_X86ISD_PUNPCKLWD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3379  switch (VT.SimpleTy) {
3380  case MVT::v8i16: return FastEmit_X86ISD_PUNPCKLWD_MVT_v8i16_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3381  default: return 0;
3382  }
3383}
3384
3385// FastEmit functions for X86ISD::TESTP.
3386
3387unsigned FastEmit_X86ISD_TESTP_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3388  if (RetVT.SimpleTy != MVT::i32)
3389    return 0;
3390  if ((Subtarget->hasAVX())) {
3391    return FastEmitInst_rr(X86::VTESTPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3392  }
3393  return 0;
3394}
3395
3396unsigned FastEmit_X86ISD_TESTP_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3397  if (RetVT.SimpleTy != MVT::i32)
3398    return 0;
3399  if ((Subtarget->hasAVX())) {
3400    return FastEmitInst_rr(X86::VTESTPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3401  }
3402  return 0;
3403}
3404
3405unsigned FastEmit_X86ISD_TESTP_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3406  if (RetVT.SimpleTy != MVT::i32)
3407    return 0;
3408  if ((Subtarget->hasAVX())) {
3409    return FastEmitInst_rr(X86::VTESTPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3410  }
3411  return 0;
3412}
3413
3414unsigned FastEmit_X86ISD_TESTP_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3415  if (RetVT.SimpleTy != MVT::i32)
3416    return 0;
3417  if ((Subtarget->hasAVX())) {
3418    return FastEmitInst_rr(X86::VTESTPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3419  }
3420  return 0;
3421}
3422
3423unsigned FastEmit_X86ISD_TESTP_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3424  switch (VT.SimpleTy) {
3425  case MVT::v4f32: return FastEmit_X86ISD_TESTP_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3426  case MVT::v8f32: return FastEmit_X86ISD_TESTP_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3427  case MVT::v2f64: return FastEmit_X86ISD_TESTP_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3428  case MVT::v4f64: return FastEmit_X86ISD_TESTP_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3429  default: return 0;
3430  }
3431}
3432
3433// FastEmit functions for X86ISD::UCOMI.
3434
3435unsigned FastEmit_X86ISD_UCOMI_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3436  if (RetVT.SimpleTy != MVT::i32)
3437    return 0;
3438  if ((Subtarget->hasAVX())) {
3439    return FastEmitInst_rr(X86::Int_VUCOMISSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3440  }
3441  if ((Subtarget->hasSSE1())) {
3442    return FastEmitInst_rr(X86::Int_UCOMISSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3443  }
3444  return 0;
3445}
3446
3447unsigned FastEmit_X86ISD_UCOMI_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3448  if (RetVT.SimpleTy != MVT::i32)
3449    return 0;
3450  if ((Subtarget->hasAVX())) {
3451    return FastEmitInst_rr(X86::Int_VUCOMISDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3452  }
3453  if ((Subtarget->hasSSE2())) {
3454    return FastEmitInst_rr(X86::Int_UCOMISDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3455  }
3456  return 0;
3457}
3458
3459unsigned FastEmit_X86ISD_UCOMI_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3460  switch (VT.SimpleTy) {
3461  case MVT::v4f32: return FastEmit_X86ISD_UCOMI_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3462  case MVT::v2f64: return FastEmit_X86ISD_UCOMI_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3463  default: return 0;
3464  }
3465}
3466
3467// FastEmit functions for X86ISD::UNPCKHPD.
3468
3469unsigned FastEmit_X86ISD_UNPCKHPD_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3470  if (RetVT.SimpleTy != MVT::v2f64)
3471    return 0;
3472  if ((Subtarget->hasAVX())) {
3473    return FastEmitInst_rr(X86::VUNPCKHPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3474  }
3475  if ((Subtarget->hasSSE2())) {
3476    return FastEmitInst_rr(X86::UNPCKHPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3477  }
3478  return 0;
3479}
3480
3481unsigned FastEmit_X86ISD_UNPCKHPD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3482  switch (VT.SimpleTy) {
3483  case MVT::v2f64: return FastEmit_X86ISD_UNPCKHPD_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3484  default: return 0;
3485  }
3486}
3487
3488// FastEmit functions for X86ISD::UNPCKHPS.
3489
3490unsigned FastEmit_X86ISD_UNPCKHPS_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3491  if (RetVT.SimpleTy != MVT::v4f32)
3492    return 0;
3493  if ((Subtarget->hasAVX())) {
3494    return FastEmitInst_rr(X86::VUNPCKHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3495  }
3496  if ((Subtarget->hasSSE1())) {
3497    return FastEmitInst_rr(X86::UNPCKHPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3498  }
3499  return 0;
3500}
3501
3502unsigned FastEmit_X86ISD_UNPCKHPS_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3503  switch (VT.SimpleTy) {
3504  case MVT::v4f32: return FastEmit_X86ISD_UNPCKHPS_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3505  default: return 0;
3506  }
3507}
3508
3509// FastEmit functions for X86ISD::UNPCKLPD.
3510
3511unsigned FastEmit_X86ISD_UNPCKLPD_MVT_v2f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3512  if (RetVT.SimpleTy != MVT::v2f64)
3513    return 0;
3514  if ((Subtarget->hasAVX())) {
3515    return FastEmitInst_rr(X86::VUNPCKLPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3516  }
3517  if ((Subtarget->hasSSE2())) {
3518    return FastEmitInst_rr(X86::UNPCKLPDrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3519  }
3520  return 0;
3521}
3522
3523unsigned FastEmit_X86ISD_UNPCKLPD_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3524  switch (VT.SimpleTy) {
3525  case MVT::v2f64: return FastEmit_X86ISD_UNPCKLPD_MVT_v2f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3526  default: return 0;
3527  }
3528}
3529
3530// FastEmit functions for X86ISD::UNPCKLPS.
3531
3532unsigned FastEmit_X86ISD_UNPCKLPS_MVT_v4f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3533  if (RetVT.SimpleTy != MVT::v4f32)
3534    return 0;
3535  if ((Subtarget->hasAVX())) {
3536    return FastEmitInst_rr(X86::VUNPCKLPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3537  }
3538  if ((Subtarget->hasSSE1())) {
3539    return FastEmitInst_rr(X86::UNPCKLPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3540  }
3541  return 0;
3542}
3543
3544unsigned FastEmit_X86ISD_UNPCKLPS_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3545  switch (VT.SimpleTy) {
3546  case MVT::v4f32: return FastEmit_X86ISD_UNPCKLPS_MVT_v4f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3547  default: return 0;
3548  }
3549}
3550
3551// FastEmit functions for X86ISD::VUNPCKHPDY.
3552
3553unsigned FastEmit_X86ISD_VUNPCKHPDY_MVT_v4i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3554  if (RetVT.SimpleTy != MVT::v4i64)
3555    return 0;
3556  if ((Subtarget->hasAVX())) {
3557    return FastEmitInst_rr(X86::VUNPCKHPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3558  }
3559  return 0;
3560}
3561
3562unsigned FastEmit_X86ISD_VUNPCKHPDY_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3563  if (RetVT.SimpleTy != MVT::v4f64)
3564    return 0;
3565  if ((Subtarget->hasAVX())) {
3566    return FastEmitInst_rr(X86::VUNPCKHPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3567  }
3568  return 0;
3569}
3570
3571unsigned FastEmit_X86ISD_VUNPCKHPDY_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3572  switch (VT.SimpleTy) {
3573  case MVT::v4i64: return FastEmit_X86ISD_VUNPCKHPDY_MVT_v4i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3574  case MVT::v4f64: return FastEmit_X86ISD_VUNPCKHPDY_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3575  default: return 0;
3576  }
3577}
3578
3579// FastEmit functions for X86ISD::VUNPCKHPSY.
3580
3581unsigned FastEmit_X86ISD_VUNPCKHPSY_MVT_v8i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3582  if (RetVT.SimpleTy != MVT::v8i32)
3583    return 0;
3584  if ((Subtarget->hasAVX())) {
3585    return FastEmitInst_rr(X86::VUNPCKHPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3586  }
3587  return 0;
3588}
3589
3590unsigned FastEmit_X86ISD_VUNPCKHPSY_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3591  if (RetVT.SimpleTy != MVT::v8f32)
3592    return 0;
3593  if ((Subtarget->hasAVX())) {
3594    return FastEmitInst_rr(X86::VUNPCKHPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3595  }
3596  return 0;
3597}
3598
3599unsigned FastEmit_X86ISD_VUNPCKHPSY_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3600  switch (VT.SimpleTy) {
3601  case MVT::v8i32: return FastEmit_X86ISD_VUNPCKHPSY_MVT_v8i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3602  case MVT::v8f32: return FastEmit_X86ISD_VUNPCKHPSY_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3603  default: return 0;
3604  }
3605}
3606
3607// FastEmit functions for X86ISD::VUNPCKLPDY.
3608
3609unsigned FastEmit_X86ISD_VUNPCKLPDY_MVT_v4i64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3610  if (RetVT.SimpleTy != MVT::v4i64)
3611    return 0;
3612  if ((Subtarget->hasAVX())) {
3613    return FastEmitInst_rr(X86::VUNPCKLPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3614  }
3615  return 0;
3616}
3617
3618unsigned FastEmit_X86ISD_VUNPCKLPDY_MVT_v4f64_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3619  if (RetVT.SimpleTy != MVT::v4f64)
3620    return 0;
3621  if ((Subtarget->hasAVX())) {
3622    return FastEmitInst_rr(X86::VUNPCKLPDYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3623  }
3624  return 0;
3625}
3626
3627unsigned FastEmit_X86ISD_VUNPCKLPDY_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3628  switch (VT.SimpleTy) {
3629  case MVT::v4i64: return FastEmit_X86ISD_VUNPCKLPDY_MVT_v4i64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3630  case MVT::v4f64: return FastEmit_X86ISD_VUNPCKLPDY_MVT_v4f64_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3631  default: return 0;
3632  }
3633}
3634
3635// FastEmit functions for X86ISD::VUNPCKLPSY.
3636
3637unsigned FastEmit_X86ISD_VUNPCKLPSY_MVT_v8i32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3638  if (RetVT.SimpleTy != MVT::v8i32)
3639    return 0;
3640  if ((Subtarget->hasAVX())) {
3641    return FastEmitInst_rr(X86::VUNPCKLPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3642  }
3643  return 0;
3644}
3645
3646unsigned FastEmit_X86ISD_VUNPCKLPSY_MVT_v8f32_rr(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3647  if (RetVT.SimpleTy != MVT::v8f32)
3648    return 0;
3649  if ((Subtarget->hasAVX())) {
3650    return FastEmitInst_rr(X86::VUNPCKLPSYrr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill);
3651  }
3652  return 0;
3653}
3654
3655unsigned FastEmit_X86ISD_VUNPCKLPSY_rr(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3656  switch (VT.SimpleTy) {
3657  case MVT::v8i32: return FastEmit_X86ISD_VUNPCKLPSY_MVT_v8i32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3658  case MVT::v8f32: return FastEmit_X86ISD_VUNPCKLPSY_MVT_v8f32_rr(RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3659  default: return 0;
3660  }
3661}
3662
3663// Top-level FastEmit function.
3664
3665unsigned FastEmit_rr(MVT VT, MVT RetVT, unsigned Opcode, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill) {
3666  switch (Opcode) {
3667  case ISD::ADD: return FastEmit_ISD_ADD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3668  case ISD::AND: return FastEmit_ISD_AND_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3669  case ISD::FADD: return FastEmit_ISD_FADD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3670  case ISD::FDIV: return FastEmit_ISD_FDIV_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3671  case ISD::FMUL: return FastEmit_ISD_FMUL_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3672  case ISD::FSUB: return FastEmit_ISD_FSUB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3673  case ISD::MUL: return FastEmit_ISD_MUL_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3674  case ISD::OR: return FastEmit_ISD_OR_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3675  case ISD::ROTL: return FastEmit_ISD_ROTL_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3676  case ISD::ROTR: return FastEmit_ISD_ROTR_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3677  case ISD::SHL: return FastEmit_ISD_SHL_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3678  case ISD::SRA: return FastEmit_ISD_SRA_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3679  case ISD::SRL: return FastEmit_ISD_SRL_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3680  case ISD::SUB: return FastEmit_ISD_SUB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3681  case ISD::XOR: return FastEmit_ISD_XOR_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3682  case X86ISD::ANDNP: return FastEmit_X86ISD_ANDNP_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3683  case X86ISD::BT: return FastEmit_X86ISD_BT_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3684  case X86ISD::CMP: return FastEmit_X86ISD_CMP_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3685  case X86ISD::COMI: return FastEmit_X86ISD_COMI_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3686  case X86ISD::FAND: return FastEmit_X86ISD_FAND_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3687  case X86ISD::FHADD: return FastEmit_X86ISD_FHADD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3688  case X86ISD::FHSUB: return FastEmit_X86ISD_FHSUB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3689  case X86ISD::FMAX: return FastEmit_X86ISD_FMAX_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3690  case X86ISD::FMIN: return FastEmit_X86ISD_FMIN_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3691  case X86ISD::FOR: return FastEmit_X86ISD_FOR_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3692  case X86ISD::FXOR: return FastEmit_X86ISD_FXOR_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3693  case X86ISD::MOVHLPS: return FastEmit_X86ISD_MOVHLPS_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3694  case X86ISD::MOVLHPS: return FastEmit_X86ISD_MOVLHPS_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3695  case X86ISD::PCMPEQB: return FastEmit_X86ISD_PCMPEQB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3696  case X86ISD::PCMPEQD: return FastEmit_X86ISD_PCMPEQD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3697  case X86ISD::PCMPEQQ: return FastEmit_X86ISD_PCMPEQQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3698  case X86ISD::PCMPEQW: return FastEmit_X86ISD_PCMPEQW_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3699  case X86ISD::PCMPGTB: return FastEmit_X86ISD_PCMPGTB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3700  case X86ISD::PCMPGTD: return FastEmit_X86ISD_PCMPGTD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3701  case X86ISD::PCMPGTQ: return FastEmit_X86ISD_PCMPGTQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3702  case X86ISD::PCMPGTW: return FastEmit_X86ISD_PCMPGTW_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3703  case X86ISD::PSHUFB: return FastEmit_X86ISD_PSHUFB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3704  case X86ISD::PSIGNB: return FastEmit_X86ISD_PSIGNB_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3705  case X86ISD::PSIGND: return FastEmit_X86ISD_PSIGND_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3706  case X86ISD::PSIGNW: return FastEmit_X86ISD_PSIGNW_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3707  case X86ISD::PTEST: return FastEmit_X86ISD_PTEST_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3708  case X86ISD::PUNPCKHBW: return FastEmit_X86ISD_PUNPCKHBW_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3709  case X86ISD::PUNPCKHDQ: return FastEmit_X86ISD_PUNPCKHDQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3710  case X86ISD::PUNPCKHQDQ: return FastEmit_X86ISD_PUNPCKHQDQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3711  case X86ISD::PUNPCKHWD: return FastEmit_X86ISD_PUNPCKHWD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3712  case X86ISD::PUNPCKLBW: return FastEmit_X86ISD_PUNPCKLBW_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3713  case X86ISD::PUNPCKLDQ: return FastEmit_X86ISD_PUNPCKLDQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3714  case X86ISD::PUNPCKLQDQ: return FastEmit_X86ISD_PUNPCKLQDQ_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3715  case X86ISD::PUNPCKLWD: return FastEmit_X86ISD_PUNPCKLWD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3716  case X86ISD::TESTP: return FastEmit_X86ISD_TESTP_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3717  case X86ISD::UCOMI: return FastEmit_X86ISD_UCOMI_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3718  case X86ISD::UNPCKHPD: return FastEmit_X86ISD_UNPCKHPD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3719  case X86ISD::UNPCKHPS: return FastEmit_X86ISD_UNPCKHPS_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3720  case X86ISD::UNPCKLPD: return FastEmit_X86ISD_UNPCKLPD_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3721  case X86ISD::UNPCKLPS: return FastEmit_X86ISD_UNPCKLPS_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3722  case X86ISD::VUNPCKHPDY: return FastEmit_X86ISD_VUNPCKHPDY_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3723  case X86ISD::VUNPCKHPSY: return FastEmit_X86ISD_VUNPCKHPSY_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3724  case X86ISD::VUNPCKLPDY: return FastEmit_X86ISD_VUNPCKLPDY_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3725  case X86ISD::VUNPCKLPSY: return FastEmit_X86ISD_VUNPCKLPSY_rr(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill);
3726  default: return 0;
3727  }
3728}
3729
3730// FastEmit functions for X86ISD::CMPPD.
3731
3732unsigned FastEmit_X86ISD_CMPPD_MVT_v2f64_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3733  if (RetVT.SimpleTy != MVT::v2i64)
3734    return 0;
3735  if ((Subtarget->hasAVX())) {
3736    return FastEmitInst_rri(X86::VCMPPDrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3737  }
3738  if ((Subtarget->hasSSE2())) {
3739    return FastEmitInst_rri(X86::CMPPDrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3740  }
3741  return 0;
3742}
3743
3744unsigned FastEmit_X86ISD_CMPPD_MVT_v4f64_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3745  if (RetVT.SimpleTy != MVT::v4i64)
3746    return 0;
3747  if ((Subtarget->hasAVX())) {
3748    return FastEmitInst_rri(X86::VCMPPDYrri, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3749  }
3750  return 0;
3751}
3752
3753unsigned FastEmit_X86ISD_CMPPD_rri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3754  switch (VT.SimpleTy) {
3755  case MVT::v2f64: return FastEmit_X86ISD_CMPPD_MVT_v2f64_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3756  case MVT::v4f64: return FastEmit_X86ISD_CMPPD_MVT_v4f64_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3757  default: return 0;
3758  }
3759}
3760
3761// FastEmit functions for X86ISD::CMPPS.
3762
3763unsigned FastEmit_X86ISD_CMPPS_MVT_v4f32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3764  if (RetVT.SimpleTy != MVT::v4i32)
3765    return 0;
3766  if ((Subtarget->hasAVX())) {
3767    return FastEmitInst_rri(X86::VCMPPSrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3768  }
3769  if ((Subtarget->hasSSE1())) {
3770    return FastEmitInst_rri(X86::CMPPSrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3771  }
3772  return 0;
3773}
3774
3775unsigned FastEmit_X86ISD_CMPPS_MVT_v8f32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3776  if (RetVT.SimpleTy != MVT::v8i32)
3777    return 0;
3778  if ((Subtarget->hasAVX())) {
3779    return FastEmitInst_rri(X86::VCMPPSYrri, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3780  }
3781  return 0;
3782}
3783
3784unsigned FastEmit_X86ISD_CMPPS_rri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3785  switch (VT.SimpleTy) {
3786  case MVT::v4f32: return FastEmit_X86ISD_CMPPS_MVT_v4f32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3787  case MVT::v8f32: return FastEmit_X86ISD_CMPPS_MVT_v8f32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3788  default: return 0;
3789  }
3790}
3791
3792// FastEmit functions for X86ISD::FSETCCsd.
3793
3794unsigned FastEmit_X86ISD_FSETCCsd_MVT_f64_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3795  if (RetVT.SimpleTy != MVT::f64)
3796    return 0;
3797  if ((Subtarget->hasAVX())) {
3798    return FastEmitInst_rri(X86::VCMPSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3799  }
3800  if ((Subtarget->hasSSE2())) {
3801    return FastEmitInst_rri(X86::CMPSDrr, X86::FR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3802  }
3803  return 0;
3804}
3805
3806unsigned FastEmit_X86ISD_FSETCCsd_rri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3807  switch (VT.SimpleTy) {
3808  case MVT::f64: return FastEmit_X86ISD_FSETCCsd_MVT_f64_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3809  default: return 0;
3810  }
3811}
3812
3813// FastEmit functions for X86ISD::FSETCCss.
3814
3815unsigned FastEmit_X86ISD_FSETCCss_MVT_f32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3816  if (RetVT.SimpleTy != MVT::f32)
3817    return 0;
3818  if ((Subtarget->hasAVX())) {
3819    return FastEmitInst_rri(X86::VCMPSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3820  }
3821  if ((Subtarget->hasSSE1())) {
3822    return FastEmitInst_rri(X86::CMPSSrr, X86::FR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3823  }
3824  return 0;
3825}
3826
3827unsigned FastEmit_X86ISD_FSETCCss_rri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3828  switch (VT.SimpleTy) {
3829  case MVT::f32: return FastEmit_X86ISD_FSETCCss_MVT_f32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3830  default: return 0;
3831  }
3832}
3833
3834// FastEmit functions for X86ISD::INSERTPS.
3835
3836unsigned FastEmit_X86ISD_INSERTPS_MVT_v4f32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3837  if (RetVT.SimpleTy != MVT::v4f32)
3838    return 0;
3839  if ((Subtarget->hasAVX())) {
3840    return FastEmitInst_rri(X86::VINSERTPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3841  }
3842  if ((Subtarget->hasSSE41())) {
3843    return FastEmitInst_rri(X86::INSERTPSrr, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3844  }
3845  return 0;
3846}
3847
3848unsigned FastEmit_X86ISD_INSERTPS_rri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3849  switch (VT.SimpleTy) {
3850  case MVT::v4f32: return FastEmit_X86ISD_INSERTPS_MVT_v4f32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3851  default: return 0;
3852  }
3853}
3854
3855// FastEmit functions for X86ISD::SHLD.
3856
3857unsigned FastEmit_X86ISD_SHLD_MVT_i16_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3858  if (RetVT.SimpleTy != MVT::i16)
3859    return 0;
3860  return FastEmitInst_rri(X86::SHLD16rri8, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3861}
3862
3863unsigned FastEmit_X86ISD_SHLD_MVT_i32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3864  if (RetVT.SimpleTy != MVT::i32)
3865    return 0;
3866  return FastEmitInst_rri(X86::SHLD32rri8, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3867}
3868
3869unsigned FastEmit_X86ISD_SHLD_MVT_i64_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3870  if (RetVT.SimpleTy != MVT::i64)
3871    return 0;
3872  return FastEmitInst_rri(X86::SHLD64rri8, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3873}
3874
3875unsigned FastEmit_X86ISD_SHLD_rri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3876  switch (VT.SimpleTy) {
3877  case MVT::i16: return FastEmit_X86ISD_SHLD_MVT_i16_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3878  case MVT::i32: return FastEmit_X86ISD_SHLD_MVT_i32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3879  case MVT::i64: return FastEmit_X86ISD_SHLD_MVT_i64_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3880  default: return 0;
3881  }
3882}
3883
3884// FastEmit functions for X86ISD::SHRD.
3885
3886unsigned FastEmit_X86ISD_SHRD_MVT_i16_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3887  if (RetVT.SimpleTy != MVT::i16)
3888    return 0;
3889  return FastEmitInst_rri(X86::SHRD16rri8, X86::GR16RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3890}
3891
3892unsigned FastEmit_X86ISD_SHRD_MVT_i32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3893  if (RetVT.SimpleTy != MVT::i32)
3894    return 0;
3895  return FastEmitInst_rri(X86::SHRD32rri8, X86::GR32RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3896}
3897
3898unsigned FastEmit_X86ISD_SHRD_MVT_i64_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3899  if (RetVT.SimpleTy != MVT::i64)
3900    return 0;
3901  return FastEmitInst_rri(X86::SHRD64rri8, X86::GR64RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3902}
3903
3904unsigned FastEmit_X86ISD_SHRD_rri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3905  switch (VT.SimpleTy) {
3906  case MVT::i16: return FastEmit_X86ISD_SHRD_MVT_i16_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3907  case MVT::i32: return FastEmit_X86ISD_SHRD_MVT_i32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3908  case MVT::i64: return FastEmit_X86ISD_SHRD_MVT_i64_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3909  default: return 0;
3910  }
3911}
3912
3913// FastEmit functions for X86ISD::SHUFPD.
3914
3915unsigned FastEmit_X86ISD_SHUFPD_MVT_v2i64_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3916  if (RetVT.SimpleTy != MVT::v2i64)
3917    return 0;
3918  if ((Subtarget->hasAVX())) {
3919    return FastEmitInst_rri(X86::VSHUFPDrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3920  }
3921  if ((Subtarget->hasSSE2())) {
3922    return FastEmitInst_rri(X86::SHUFPDrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3923  }
3924  return 0;
3925}
3926
3927unsigned FastEmit_X86ISD_SHUFPD_MVT_v4i64_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3928  if (RetVT.SimpleTy != MVT::v4i64)
3929    return 0;
3930  if ((Subtarget->hasAVX())) {
3931    return FastEmitInst_rri(X86::VSHUFPDYrri, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3932  }
3933  return 0;
3934}
3935
3936unsigned FastEmit_X86ISD_SHUFPD_MVT_v2f64_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3937  if (RetVT.SimpleTy != MVT::v2f64)
3938    return 0;
3939  if ((Subtarget->hasAVX())) {
3940    return FastEmitInst_rri(X86::VSHUFPDrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3941  }
3942  if ((Subtarget->hasSSE2())) {
3943    return FastEmitInst_rri(X86::SHUFPDrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3944  }
3945  return 0;
3946}
3947
3948unsigned FastEmit_X86ISD_SHUFPD_MVT_v4f64_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3949  if (RetVT.SimpleTy != MVT::v4f64)
3950    return 0;
3951  if ((Subtarget->hasAVX())) {
3952    return FastEmitInst_rri(X86::VSHUFPDYrri, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3953  }
3954  return 0;
3955}
3956
3957unsigned FastEmit_X86ISD_SHUFPD_rri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3958  switch (VT.SimpleTy) {
3959  case MVT::v2i64: return FastEmit_X86ISD_SHUFPD_MVT_v2i64_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3960  case MVT::v4i64: return FastEmit_X86ISD_SHUFPD_MVT_v4i64_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3961  case MVT::v2f64: return FastEmit_X86ISD_SHUFPD_MVT_v2f64_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3962  case MVT::v4f64: return FastEmit_X86ISD_SHUFPD_MVT_v4f64_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3963  default: return 0;
3964  }
3965}
3966
3967// FastEmit functions for X86ISD::SHUFPS.
3968
3969unsigned FastEmit_X86ISD_SHUFPS_MVT_v4i32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3970  if (RetVT.SimpleTy != MVT::v4i32)
3971    return 0;
3972  if ((Subtarget->hasAVX())) {
3973    return FastEmitInst_rri(X86::VSHUFPSrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3974  }
3975  if ((Subtarget->hasSSE1())) {
3976    return FastEmitInst_rri(X86::SHUFPSrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3977  }
3978  return 0;
3979}
3980
3981unsigned FastEmit_X86ISD_SHUFPS_MVT_v8i32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3982  if (RetVT.SimpleTy != MVT::v8i32)
3983    return 0;
3984  if ((Subtarget->hasAVX())) {
3985    return FastEmitInst_rri(X86::VSHUFPSYrri, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3986  }
3987  return 0;
3988}
3989
3990unsigned FastEmit_X86ISD_SHUFPS_MVT_v4f32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
3991  if (RetVT.SimpleTy != MVT::v4f32)
3992    return 0;
3993  if ((Subtarget->hasAVX())) {
3994    return FastEmitInst_rri(X86::VSHUFPSrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3995  }
3996  if ((Subtarget->hasSSE1())) {
3997    return FastEmitInst_rri(X86::SHUFPSrri, X86::VR128RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
3998  }
3999  return 0;
4000}
4001
4002unsigned FastEmit_X86ISD_SHUFPS_MVT_v8f32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
4003  if (RetVT.SimpleTy != MVT::v8f32)
4004    return 0;
4005  if ((Subtarget->hasAVX())) {
4006    return FastEmitInst_rri(X86::VSHUFPSYrri, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4007  }
4008  return 0;
4009}
4010
4011unsigned FastEmit_X86ISD_SHUFPS_rri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
4012  switch (VT.SimpleTy) {
4013  case MVT::v4i32: return FastEmit_X86ISD_SHUFPS_MVT_v4i32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4014  case MVT::v8i32: return FastEmit_X86ISD_SHUFPS_MVT_v8i32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4015  case MVT::v4f32: return FastEmit_X86ISD_SHUFPS_MVT_v4f32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4016  case MVT::v8f32: return FastEmit_X86ISD_SHUFPS_MVT_v8f32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4017  default: return 0;
4018  }
4019}
4020
4021// FastEmit functions for X86ISD::VPERM2F128.
4022
4023unsigned FastEmit_X86ISD_VPERM2F128_MVT_v32i8_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
4024  if (RetVT.SimpleTy != MVT::v32i8)
4025    return 0;
4026  return FastEmitInst_rri(X86::VPERM2F128rr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4027}
4028
4029unsigned FastEmit_X86ISD_VPERM2F128_MVT_v16i16_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
4030  if (RetVT.SimpleTy != MVT::v16i16)
4031    return 0;
4032  return FastEmitInst_rri(X86::VPERM2F128rr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4033}
4034
4035unsigned FastEmit_X86ISD_VPERM2F128_MVT_v8i32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
4036  if (RetVT.SimpleTy != MVT::v8i32)
4037    return 0;
4038  return FastEmitInst_rri(X86::VPERM2F128rr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4039}
4040
4041unsigned FastEmit_X86ISD_VPERM2F128_MVT_v4i64_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
4042  if (RetVT.SimpleTy != MVT::v4i64)
4043    return 0;
4044  return FastEmitInst_rri(X86::VPERM2F128rr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4045}
4046
4047unsigned FastEmit_X86ISD_VPERM2F128_MVT_v8f32_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
4048  if (RetVT.SimpleTy != MVT::v8f32)
4049    return 0;
4050  return FastEmitInst_rri(X86::VPERM2F128rr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4051}
4052
4053unsigned FastEmit_X86ISD_VPERM2F128_MVT_v4f64_rri(MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
4054  if (RetVT.SimpleTy != MVT::v4f64)
4055    return 0;
4056  return FastEmitInst_rri(X86::VPERM2F128rr, X86::VR256RegisterClass, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4057}
4058
4059unsigned FastEmit_X86ISD_VPERM2F128_rri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
4060  switch (VT.SimpleTy) {
4061  case MVT::v32i8: return FastEmit_X86ISD_VPERM2F128_MVT_v32i8_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4062  case MVT::v16i16: return FastEmit_X86ISD_VPERM2F128_MVT_v16i16_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4063  case MVT::v8i32: return FastEmit_X86ISD_VPERM2F128_MVT_v8i32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4064  case MVT::v4i64: return FastEmit_X86ISD_VPERM2F128_MVT_v4i64_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4065  case MVT::v8f32: return FastEmit_X86ISD_VPERM2F128_MVT_v8f32_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4066  case MVT::v4f64: return FastEmit_X86ISD_VPERM2F128_MVT_v4f64_rri(RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4067  default: return 0;
4068  }
4069}
4070
4071// Top-level FastEmit function.
4072
4073unsigned FastEmit_rri(MVT VT, MVT RetVT, unsigned Opcode, unsigned Op0, bool Op0IsKill, unsigned Op1, bool Op1IsKill, uint64_t imm2) {
4074  switch (Opcode) {
4075  case X86ISD::CMPPD: return FastEmit_X86ISD_CMPPD_rri(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4076  case X86ISD::CMPPS: return FastEmit_X86ISD_CMPPS_rri(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4077  case X86ISD::FSETCCsd: return FastEmit_X86ISD_FSETCCsd_rri(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4078  case X86ISD::FSETCCss: return FastEmit_X86ISD_FSETCCss_rri(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4079  case X86ISD::INSERTPS: return FastEmit_X86ISD_INSERTPS_rri(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4080  case X86ISD::SHLD: return FastEmit_X86ISD_SHLD_rri(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4081  case X86ISD::SHRD: return FastEmit_X86ISD_SHRD_rri(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4082  case X86ISD::SHUFPD: return FastEmit_X86ISD_SHUFPD_rri(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4083  case X86ISD::SHUFPS: return FastEmit_X86ISD_SHUFPS_rri(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4084  case X86ISD::VPERM2F128: return FastEmit_X86ISD_VPERM2F128_rri(VT, RetVT, Op0, Op0IsKill, Op1, Op1IsKill, imm2);
4085  default: return 0;
4086  }
4087}
4088
4089// FastEmit functions for ISD::ADD.
4090
4091unsigned FastEmit_ISD_ADD_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4092  if (RetVT.SimpleTy != MVT::i8)
4093    return 0;
4094  return FastEmitInst_ri(X86::ADD8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
4095}
4096
4097unsigned FastEmit_ISD_ADD_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4098  if (RetVT.SimpleTy != MVT::i16)
4099    return 0;
4100  return FastEmitInst_ri(X86::ADD16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4101}
4102
4103unsigned FastEmit_ISD_ADD_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4104  if (RetVT.SimpleTy != MVT::i32)
4105    return 0;
4106  return FastEmitInst_ri(X86::ADD32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4107}
4108
4109unsigned FastEmit_ISD_ADD_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4110  switch (VT.SimpleTy) {
4111  case MVT::i8: return FastEmit_ISD_ADD_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
4112  case MVT::i16: return FastEmit_ISD_ADD_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4113  case MVT::i32: return FastEmit_ISD_ADD_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4114  default: return 0;
4115  }
4116}
4117
4118// FastEmit functions for ISD::AND.
4119
4120unsigned FastEmit_ISD_AND_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4121  if (RetVT.SimpleTy != MVT::i8)
4122    return 0;
4123  return FastEmitInst_ri(X86::AND8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
4124}
4125
4126unsigned FastEmit_ISD_AND_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4127  if (RetVT.SimpleTy != MVT::i16)
4128    return 0;
4129  return FastEmitInst_ri(X86::AND16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4130}
4131
4132unsigned FastEmit_ISD_AND_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4133  if (RetVT.SimpleTy != MVT::i32)
4134    return 0;
4135  return FastEmitInst_ri(X86::AND32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4136}
4137
4138unsigned FastEmit_ISD_AND_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4139  switch (VT.SimpleTy) {
4140  case MVT::i8: return FastEmit_ISD_AND_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
4141  case MVT::i16: return FastEmit_ISD_AND_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4142  case MVT::i32: return FastEmit_ISD_AND_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4143  default: return 0;
4144  }
4145}
4146
4147// FastEmit functions for ISD::EXTRACT_VECTOR_ELT.
4148
4149unsigned FastEmit_ISD_EXTRACT_VECTOR_ELT_MVT_v4i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4150  if (RetVT.SimpleTy != MVT::i32)
4151    return 0;
4152  if ((Subtarget->hasAVX())) {
4153    return FastEmitInst_ri(X86::VPEXTRDrr, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4154  }
4155  if ((Subtarget->hasSSE41())) {
4156    return FastEmitInst_ri(X86::PEXTRDrr, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4157  }
4158  return 0;
4159}
4160
4161unsigned FastEmit_ISD_EXTRACT_VECTOR_ELT_MVT_v2i64_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4162  if (RetVT.SimpleTy != MVT::i64)
4163    return 0;
4164  if ((Subtarget->hasAVX())) {
4165    return FastEmitInst_ri(X86::VPEXTRQrr, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4166  }
4167  if ((Subtarget->hasSSE41())) {
4168    return FastEmitInst_ri(X86::PEXTRQrr, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4169  }
4170  return 0;
4171}
4172
4173unsigned FastEmit_ISD_EXTRACT_VECTOR_ELT_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4174  switch (VT.SimpleTy) {
4175  case MVT::v4i32: return FastEmit_ISD_EXTRACT_VECTOR_ELT_MVT_v4i32_ri(RetVT, Op0, Op0IsKill, imm1);
4176  case MVT::v2i64: return FastEmit_ISD_EXTRACT_VECTOR_ELT_MVT_v2i64_ri(RetVT, Op0, Op0IsKill, imm1);
4177  default: return 0;
4178  }
4179}
4180
4181// FastEmit functions for ISD::MUL.
4182
4183unsigned FastEmit_ISD_MUL_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4184  if (RetVT.SimpleTy != MVT::i16)
4185    return 0;
4186  return FastEmitInst_ri(X86::IMUL16rri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4187}
4188
4189unsigned FastEmit_ISD_MUL_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4190  if (RetVT.SimpleTy != MVT::i32)
4191    return 0;
4192  return FastEmitInst_ri(X86::IMUL32rri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4193}
4194
4195unsigned FastEmit_ISD_MUL_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4196  switch (VT.SimpleTy) {
4197  case MVT::i16: return FastEmit_ISD_MUL_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4198  case MVT::i32: return FastEmit_ISD_MUL_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4199  default: return 0;
4200  }
4201}
4202
4203// FastEmit functions for ISD::OR.
4204
4205unsigned FastEmit_ISD_OR_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4206  if (RetVT.SimpleTy != MVT::i8)
4207    return 0;
4208  return FastEmitInst_ri(X86::OR8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
4209}
4210
4211unsigned FastEmit_ISD_OR_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4212  if (RetVT.SimpleTy != MVT::i16)
4213    return 0;
4214  return FastEmitInst_ri(X86::OR16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4215}
4216
4217unsigned FastEmit_ISD_OR_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4218  if (RetVT.SimpleTy != MVT::i32)
4219    return 0;
4220  return FastEmitInst_ri(X86::OR32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4221}
4222
4223unsigned FastEmit_ISD_OR_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4224  switch (VT.SimpleTy) {
4225  case MVT::i8: return FastEmit_ISD_OR_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
4226  case MVT::i16: return FastEmit_ISD_OR_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4227  case MVT::i32: return FastEmit_ISD_OR_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4228  default: return 0;
4229  }
4230}
4231
4232// FastEmit functions for ISD::ROTL.
4233
4234unsigned FastEmit_ISD_ROTL_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4235  if (RetVT.SimpleTy != MVT::i8)
4236    return 0;
4237  return FastEmitInst_ri(X86::ROL8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
4238}
4239
4240unsigned FastEmit_ISD_ROTL_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4241  if (RetVT.SimpleTy != MVT::i16)
4242    return 0;
4243  return FastEmitInst_ri(X86::ROL16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4244}
4245
4246unsigned FastEmit_ISD_ROTL_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4247  if (RetVT.SimpleTy != MVT::i32)
4248    return 0;
4249  return FastEmitInst_ri(X86::ROL32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4250}
4251
4252unsigned FastEmit_ISD_ROTL_MVT_i64_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4253  if (RetVT.SimpleTy != MVT::i64)
4254    return 0;
4255  return FastEmitInst_ri(X86::ROL64ri, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4256}
4257
4258unsigned FastEmit_ISD_ROTL_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4259  switch (VT.SimpleTy) {
4260  case MVT::i8: return FastEmit_ISD_ROTL_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
4261  case MVT::i16: return FastEmit_ISD_ROTL_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4262  case MVT::i32: return FastEmit_ISD_ROTL_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4263  case MVT::i64: return FastEmit_ISD_ROTL_MVT_i64_ri(RetVT, Op0, Op0IsKill, imm1);
4264  default: return 0;
4265  }
4266}
4267
4268// FastEmit functions for ISD::ROTR.
4269
4270unsigned FastEmit_ISD_ROTR_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4271  if (RetVT.SimpleTy != MVT::i8)
4272    return 0;
4273  return FastEmitInst_ri(X86::ROR8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
4274}
4275
4276unsigned FastEmit_ISD_ROTR_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4277  if (RetVT.SimpleTy != MVT::i16)
4278    return 0;
4279  return FastEmitInst_ri(X86::ROR16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4280}
4281
4282unsigned FastEmit_ISD_ROTR_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4283  if (RetVT.SimpleTy != MVT::i32)
4284    return 0;
4285  return FastEmitInst_ri(X86::ROR32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4286}
4287
4288unsigned FastEmit_ISD_ROTR_MVT_i64_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4289  if (RetVT.SimpleTy != MVT::i64)
4290    return 0;
4291  return FastEmitInst_ri(X86::ROR64ri, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4292}
4293
4294unsigned FastEmit_ISD_ROTR_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4295  switch (VT.SimpleTy) {
4296  case MVT::i8: return FastEmit_ISD_ROTR_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
4297  case MVT::i16: return FastEmit_ISD_ROTR_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4298  case MVT::i32: return FastEmit_ISD_ROTR_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4299  case MVT::i64: return FastEmit_ISD_ROTR_MVT_i64_ri(RetVT, Op0, Op0IsKill, imm1);
4300  default: return 0;
4301  }
4302}
4303
4304// FastEmit functions for ISD::SHL.
4305
4306unsigned FastEmit_ISD_SHL_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4307  if (RetVT.SimpleTy != MVT::i8)
4308    return 0;
4309  return FastEmitInst_ri(X86::SHL8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
4310}
4311
4312unsigned FastEmit_ISD_SHL_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4313  if (RetVT.SimpleTy != MVT::i16)
4314    return 0;
4315  return FastEmitInst_ri(X86::SHL16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4316}
4317
4318unsigned FastEmit_ISD_SHL_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4319  if (RetVT.SimpleTy != MVT::i32)
4320    return 0;
4321  return FastEmitInst_ri(X86::SHL32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4322}
4323
4324unsigned FastEmit_ISD_SHL_MVT_i64_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4325  if (RetVT.SimpleTy != MVT::i64)
4326    return 0;
4327  return FastEmitInst_ri(X86::SHL64ri, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4328}
4329
4330unsigned FastEmit_ISD_SHL_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4331  switch (VT.SimpleTy) {
4332  case MVT::i8: return FastEmit_ISD_SHL_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
4333  case MVT::i16: return FastEmit_ISD_SHL_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4334  case MVT::i32: return FastEmit_ISD_SHL_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4335  case MVT::i64: return FastEmit_ISD_SHL_MVT_i64_ri(RetVT, Op0, Op0IsKill, imm1);
4336  default: return 0;
4337  }
4338}
4339
4340// FastEmit functions for ISD::SRA.
4341
4342unsigned FastEmit_ISD_SRA_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4343  if (RetVT.SimpleTy != MVT::i8)
4344    return 0;
4345  return FastEmitInst_ri(X86::SAR8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
4346}
4347
4348unsigned FastEmit_ISD_SRA_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4349  if (RetVT.SimpleTy != MVT::i16)
4350    return 0;
4351  return FastEmitInst_ri(X86::SAR16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4352}
4353
4354unsigned FastEmit_ISD_SRA_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4355  if (RetVT.SimpleTy != MVT::i32)
4356    return 0;
4357  return FastEmitInst_ri(X86::SAR32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4358}
4359
4360unsigned FastEmit_ISD_SRA_MVT_i64_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4361  if (RetVT.SimpleTy != MVT::i64)
4362    return 0;
4363  return FastEmitInst_ri(X86::SAR64ri, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4364}
4365
4366unsigned FastEmit_ISD_SRA_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4367  switch (VT.SimpleTy) {
4368  case MVT::i8: return FastEmit_ISD_SRA_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
4369  case MVT::i16: return FastEmit_ISD_SRA_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4370  case MVT::i32: return FastEmit_ISD_SRA_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4371  case MVT::i64: return FastEmit_ISD_SRA_MVT_i64_ri(RetVT, Op0, Op0IsKill, imm1);
4372  default: return 0;
4373  }
4374}
4375
4376// FastEmit functions for ISD::SRL.
4377
4378unsigned FastEmit_ISD_SRL_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4379  if (RetVT.SimpleTy != MVT::i8)
4380    return 0;
4381  return FastEmitInst_ri(X86::SHR8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
4382}
4383
4384unsigned FastEmit_ISD_SRL_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4385  if (RetVT.SimpleTy != MVT::i16)
4386    return 0;
4387  return FastEmitInst_ri(X86::SHR16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4388}
4389
4390unsigned FastEmit_ISD_SRL_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4391  if (RetVT.SimpleTy != MVT::i32)
4392    return 0;
4393  return FastEmitInst_ri(X86::SHR32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4394}
4395
4396unsigned FastEmit_ISD_SRL_MVT_i64_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4397  if (RetVT.SimpleTy != MVT::i64)
4398    return 0;
4399  return FastEmitInst_ri(X86::SHR64ri, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4400}
4401
4402unsigned FastEmit_ISD_SRL_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4403  switch (VT.SimpleTy) {
4404  case MVT::i8: return FastEmit_ISD_SRL_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
4405  case MVT::i16: return FastEmit_ISD_SRL_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4406  case MVT::i32: return FastEmit_ISD_SRL_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4407  case MVT::i64: return FastEmit_ISD_SRL_MVT_i64_ri(RetVT, Op0, Op0IsKill, imm1);
4408  default: return 0;
4409  }
4410}
4411
4412// FastEmit functions for ISD::SUB.
4413
4414unsigned FastEmit_ISD_SUB_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4415  if (RetVT.SimpleTy != MVT::i8)
4416    return 0;
4417  return FastEmitInst_ri(X86::SUB8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
4418}
4419
4420unsigned FastEmit_ISD_SUB_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4421  if (RetVT.SimpleTy != MVT::i16)
4422    return 0;
4423  return FastEmitInst_ri(X86::SUB16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4424}
4425
4426unsigned FastEmit_ISD_SUB_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4427  if (RetVT.SimpleTy != MVT::i32)
4428    return 0;
4429  return FastEmitInst_ri(X86::SUB32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4430}
4431
4432unsigned FastEmit_ISD_SUB_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4433  switch (VT.SimpleTy) {
4434  case MVT::i8: return FastEmit_ISD_SUB_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
4435  case MVT::i16: return FastEmit_ISD_SUB_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4436  case MVT::i32: return FastEmit_ISD_SUB_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4437  default: return 0;
4438  }
4439}
4440
4441// FastEmit functions for ISD::XOR.
4442
4443unsigned FastEmit_ISD_XOR_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4444  if (RetVT.SimpleTy != MVT::i8)
4445    return 0;
4446  return FastEmitInst_ri(X86::XOR8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
4447}
4448
4449unsigned FastEmit_ISD_XOR_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4450  if (RetVT.SimpleTy != MVT::i16)
4451    return 0;
4452  return FastEmitInst_ri(X86::XOR16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4453}
4454
4455unsigned FastEmit_ISD_XOR_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4456  if (RetVT.SimpleTy != MVT::i32)
4457    return 0;
4458  return FastEmitInst_ri(X86::XOR32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4459}
4460
4461unsigned FastEmit_ISD_XOR_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4462  switch (VT.SimpleTy) {
4463  case MVT::i8: return FastEmit_ISD_XOR_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
4464  case MVT::i16: return FastEmit_ISD_XOR_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4465  case MVT::i32: return FastEmit_ISD_XOR_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4466  default: return 0;
4467  }
4468}
4469
4470// FastEmit functions for X86ISD::CMP.
4471
4472unsigned FastEmit_X86ISD_CMP_MVT_i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4473  if (RetVT.SimpleTy != MVT::i32)
4474    return 0;
4475  return FastEmitInst_ri(X86::CMP8ri, X86::GR8RegisterClass, Op0, Op0IsKill, imm1);
4476}
4477
4478unsigned FastEmit_X86ISD_CMP_MVT_i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4479  if (RetVT.SimpleTy != MVT::i32)
4480    return 0;
4481  return FastEmitInst_ri(X86::CMP16ri, X86::GR16RegisterClass, Op0, Op0IsKill, imm1);
4482}
4483
4484unsigned FastEmit_X86ISD_CMP_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4485  if (RetVT.SimpleTy != MVT::i32)
4486    return 0;
4487  return FastEmitInst_ri(X86::CMP32ri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4488}
4489
4490unsigned FastEmit_X86ISD_CMP_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4491  switch (VT.SimpleTy) {
4492  case MVT::i8: return FastEmit_X86ISD_CMP_MVT_i8_ri(RetVT, Op0, Op0IsKill, imm1);
4493  case MVT::i16: return FastEmit_X86ISD_CMP_MVT_i16_ri(RetVT, Op0, Op0IsKill, imm1);
4494  case MVT::i32: return FastEmit_X86ISD_CMP_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4495  default: return 0;
4496  }
4497}
4498
4499// FastEmit functions for X86ISD::PEXTRB.
4500
4501unsigned FastEmit_X86ISD_PEXTRB_MVT_v16i8_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4502  if (RetVT.SimpleTy != MVT::i32)
4503    return 0;
4504  if ((Subtarget->hasAVX())) {
4505    return FastEmitInst_ri(X86::VPEXTRBrr, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4506  }
4507  if ((Subtarget->hasSSE41())) {
4508    return FastEmitInst_ri(X86::PEXTRBrr, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4509  }
4510  return 0;
4511}
4512
4513unsigned FastEmit_X86ISD_PEXTRB_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4514  switch (VT.SimpleTy) {
4515  case MVT::v16i8: return FastEmit_X86ISD_PEXTRB_MVT_v16i8_ri(RetVT, Op0, Op0IsKill, imm1);
4516  default: return 0;
4517  }
4518}
4519
4520// FastEmit functions for X86ISD::PEXTRW.
4521
4522unsigned FastEmit_X86ISD_PEXTRW_MVT_v8i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4523  if (RetVT.SimpleTy != MVT::i32)
4524    return 0;
4525  if ((Subtarget->hasAVX())) {
4526    return FastEmitInst_ri(X86::VPEXTRWri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4527  }
4528  if ((Subtarget->hasSSE2())) {
4529    return FastEmitInst_ri(X86::PEXTRWri, X86::GR32RegisterClass, Op0, Op0IsKill, imm1);
4530  }
4531  return 0;
4532}
4533
4534unsigned FastEmit_X86ISD_PEXTRW_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4535  switch (VT.SimpleTy) {
4536  case MVT::v8i16: return FastEmit_X86ISD_PEXTRW_MVT_v8i16_ri(RetVT, Op0, Op0IsKill, imm1);
4537  default: return 0;
4538  }
4539}
4540
4541// FastEmit functions for X86ISD::PSHUFD.
4542
4543unsigned FastEmit_X86ISD_PSHUFD_MVT_v4i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4544  if (RetVT.SimpleTy != MVT::v4i32)
4545    return 0;
4546  if ((Subtarget->hasAVX())) {
4547    return FastEmitInst_ri(X86::VPSHUFDri, X86::VR128RegisterClass, Op0, Op0IsKill, imm1);
4548  }
4549  if ((Subtarget->hasSSE2())) {
4550    return FastEmitInst_ri(X86::PSHUFDri, X86::VR128RegisterClass, Op0, Op0IsKill, imm1);
4551  }
4552  return 0;
4553}
4554
4555unsigned FastEmit_X86ISD_PSHUFD_MVT_v4f32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4556  if (RetVT.SimpleTy != MVT::v4f32)
4557    return 0;
4558  if ((Subtarget->hasAVX())) {
4559    return FastEmitInst_ri(X86::VPSHUFDri, X86::VR128RegisterClass, Op0, Op0IsKill, imm1);
4560  }
4561  if ((Subtarget->hasSSE2())) {
4562    return FastEmitInst_ri(X86::PSHUFDri, X86::VR128RegisterClass, Op0, Op0IsKill, imm1);
4563  }
4564  return 0;
4565}
4566
4567unsigned FastEmit_X86ISD_PSHUFD_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4568  switch (VT.SimpleTy) {
4569  case MVT::v4i32: return FastEmit_X86ISD_PSHUFD_MVT_v4i32_ri(RetVT, Op0, Op0IsKill, imm1);
4570  case MVT::v4f32: return FastEmit_X86ISD_PSHUFD_MVT_v4f32_ri(RetVT, Op0, Op0IsKill, imm1);
4571  default: return 0;
4572  }
4573}
4574
4575// FastEmit functions for X86ISD::PSHUFHW.
4576
4577unsigned FastEmit_X86ISD_PSHUFHW_MVT_v8i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4578  if (RetVT.SimpleTy != MVT::v8i16)
4579    return 0;
4580  if ((Subtarget->hasAVX())) {
4581    return FastEmitInst_ri(X86::VPSHUFHWri, X86::VR128RegisterClass, Op0, Op0IsKill, imm1);
4582  }
4583  if ((Subtarget->hasSSE2())) {
4584    return FastEmitInst_ri(X86::PSHUFHWri, X86::VR128RegisterClass, Op0, Op0IsKill, imm1);
4585  }
4586  return 0;
4587}
4588
4589unsigned FastEmit_X86ISD_PSHUFHW_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4590  switch (VT.SimpleTy) {
4591  case MVT::v8i16: return FastEmit_X86ISD_PSHUFHW_MVT_v8i16_ri(RetVT, Op0, Op0IsKill, imm1);
4592  default: return 0;
4593  }
4594}
4595
4596// FastEmit functions for X86ISD::PSHUFLW.
4597
4598unsigned FastEmit_X86ISD_PSHUFLW_MVT_v8i16_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4599  if (RetVT.SimpleTy != MVT::v8i16)
4600    return 0;
4601  if ((Subtarget->hasAVX())) {
4602    return FastEmitInst_ri(X86::VPSHUFLWri, X86::VR128RegisterClass, Op0, Op0IsKill, imm1);
4603  }
4604  if ((Subtarget->hasSSE2())) {
4605    return FastEmitInst_ri(X86::PSHUFLWri, X86::VR128RegisterClass, Op0, Op0IsKill, imm1);
4606  }
4607  return 0;
4608}
4609
4610unsigned FastEmit_X86ISD_PSHUFLW_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4611  switch (VT.SimpleTy) {
4612  case MVT::v8i16: return FastEmit_X86ISD_PSHUFLW_MVT_v8i16_ri(RetVT, Op0, Op0IsKill, imm1);
4613  default: return 0;
4614  }
4615}
4616
4617// FastEmit functions for X86ISD::TC_RETURN.
4618
4619unsigned FastEmit_X86ISD_TC_RETURN_MVT_i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4620  if (RetVT.SimpleTy != MVT::isVoid)
4621    return 0;
4622  if ((!Subtarget->is64Bit())) {
4623    return FastEmitInst_ri(X86::TCRETURNri, X86::GR32_TCRegisterClass, Op0, Op0IsKill, imm1);
4624  }
4625  return 0;
4626}
4627
4628unsigned FastEmit_X86ISD_TC_RETURN_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4629  switch (VT.SimpleTy) {
4630  case MVT::i32: return FastEmit_X86ISD_TC_RETURN_MVT_i32_ri(RetVT, Op0, Op0IsKill, imm1);
4631  default: return 0;
4632  }
4633}
4634
4635// FastEmit functions for X86ISD::VPERMILPDY.
4636
4637unsigned FastEmit_X86ISD_VPERMILPDY_MVT_v4i64_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4638  if (RetVT.SimpleTy != MVT::v4i64)
4639    return 0;
4640  return FastEmitInst_ri(X86::VPERMILPDYri, X86::VR256RegisterClass, Op0, Op0IsKill, imm1);
4641}
4642
4643unsigned FastEmit_X86ISD_VPERMILPDY_MVT_v4f64_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4644  if (RetVT.SimpleTy != MVT::v4f64)
4645    return 0;
4646  return FastEmitInst_ri(X86::VPERMILPDYri, X86::VR256RegisterClass, Op0, Op0IsKill, imm1);
4647}
4648
4649unsigned FastEmit_X86ISD_VPERMILPDY_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4650  switch (VT.SimpleTy) {
4651  case MVT::v4i64: return FastEmit_X86ISD_VPERMILPDY_MVT_v4i64_ri(RetVT, Op0, Op0IsKill, imm1);
4652  case MVT::v4f64: return FastEmit_X86ISD_VPERMILPDY_MVT_v4f64_ri(RetVT, Op0, Op0IsKill, imm1);
4653  default: return 0;
4654  }
4655}
4656
4657// FastEmit functions for X86ISD::VPERMILPSY.
4658
4659unsigned FastEmit_X86ISD_VPERMILPSY_MVT_v8i32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4660  if (RetVT.SimpleTy != MVT::v8i32)
4661    return 0;
4662  return FastEmitInst_ri(X86::VPERMILPSYri, X86::VR256RegisterClass, Op0, Op0IsKill, imm1);
4663}
4664
4665unsigned FastEmit_X86ISD_VPERMILPSY_MVT_v8f32_ri(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4666  if (RetVT.SimpleTy != MVT::v8f32)
4667    return 0;
4668  return FastEmitInst_ri(X86::VPERMILPSYri, X86::VR256RegisterClass, Op0, Op0IsKill, imm1);
4669}
4670
4671unsigned FastEmit_X86ISD_VPERMILPSY_ri(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4672  switch (VT.SimpleTy) {
4673  case MVT::v8i32: return FastEmit_X86ISD_VPERMILPSY_MVT_v8i32_ri(RetVT, Op0, Op0IsKill, imm1);
4674  case MVT::v8f32: return FastEmit_X86ISD_VPERMILPSY_MVT_v8f32_ri(RetVT, Op0, Op0IsKill, imm1);
4675  default: return 0;
4676  }
4677}
4678
4679// Top-level FastEmit function.
4680
4681unsigned FastEmit_ri(MVT VT, MVT RetVT, unsigned Opcode, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4682  if (VT == MVT::i64 && Predicate_i64immSExt32(imm1))
4683    if (unsigned Reg = FastEmit_ri_Predicate_i64immSExt32(VT, RetVT, Opcode, Op0, Op0IsKill, imm1))
4684      return Reg;
4685
4686  switch (Opcode) {
4687  case ISD::ADD: return FastEmit_ISD_ADD_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4688  case ISD::AND: return FastEmit_ISD_AND_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4689  case ISD::EXTRACT_VECTOR_ELT: return FastEmit_ISD_EXTRACT_VECTOR_ELT_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4690  case ISD::MUL: return FastEmit_ISD_MUL_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4691  case ISD::OR: return FastEmit_ISD_OR_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4692  case ISD::ROTL: return FastEmit_ISD_ROTL_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4693  case ISD::ROTR: return FastEmit_ISD_ROTR_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4694  case ISD::SHL: return FastEmit_ISD_SHL_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4695  case ISD::SRA: return FastEmit_ISD_SRA_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4696  case ISD::SRL: return FastEmit_ISD_SRL_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4697  case ISD::SUB: return FastEmit_ISD_SUB_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4698  case ISD::XOR: return FastEmit_ISD_XOR_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4699  case X86ISD::CMP: return FastEmit_X86ISD_CMP_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4700  case X86ISD::PEXTRB: return FastEmit_X86ISD_PEXTRB_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4701  case X86ISD::PEXTRW: return FastEmit_X86ISD_PEXTRW_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4702  case X86ISD::PSHUFD: return FastEmit_X86ISD_PSHUFD_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4703  case X86ISD::PSHUFHW: return FastEmit_X86ISD_PSHUFHW_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4704  case X86ISD::PSHUFLW: return FastEmit_X86ISD_PSHUFLW_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4705  case X86ISD::TC_RETURN: return FastEmit_X86ISD_TC_RETURN_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4706  case X86ISD::VPERMILPDY: return FastEmit_X86ISD_VPERMILPDY_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4707  case X86ISD::VPERMILPSY: return FastEmit_X86ISD_VPERMILPSY_ri(VT, RetVT, Op0, Op0IsKill, imm1);
4708  default: return 0;
4709  }
4710}
4711
4712// FastEmit functions for X86ISD::VASTART_SAVE_XMM_REGS.
4713
4714unsigned FastEmit_X86ISD_VASTART_SAVE_XMM_REGS_MVT_i8_rii(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1, uint64_t imm2) {
4715  if (RetVT.SimpleTy != MVT::isVoid)
4716    return 0;
4717  return FastEmitInst_rii(X86::VASTART_SAVE_XMM_REGS, X86::GR8RegisterClass, Op0, Op0IsKill, imm1, imm2);
4718}
4719
4720unsigned FastEmit_X86ISD_VASTART_SAVE_XMM_REGS_rii(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1, uint64_t imm2) {
4721  switch (VT.SimpleTy) {
4722  case MVT::i8: return FastEmit_X86ISD_VASTART_SAVE_XMM_REGS_MVT_i8_rii(RetVT, Op0, Op0IsKill, imm1, imm2);
4723  default: return 0;
4724  }
4725}
4726
4727// Top-level FastEmit function.
4728
4729unsigned FastEmit_rii(MVT VT, MVT RetVT, unsigned Opcode, unsigned Op0, bool Op0IsKill, uint64_t imm1, uint64_t imm2) {
4730  switch (Opcode) {
4731  case X86ISD::VASTART_SAVE_XMM_REGS: return FastEmit_X86ISD_VASTART_SAVE_XMM_REGS_rii(VT, RetVT, Op0, Op0IsKill, imm1, imm2);
4732  default: return 0;
4733  }
4734}
4735
4736// FastEmit functions for ISD::ADD.
4737
4738unsigned FastEmit_ISD_ADD_MVT_i64_ri_Predicate_i64immSExt32(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4739  if (RetVT.SimpleTy != MVT::i64)
4740    return 0;
4741  return FastEmitInst_ri(X86::ADD64ri32, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4742}
4743
4744unsigned FastEmit_ISD_ADD_ri_Predicate_i64immSExt32(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4745  switch (VT.SimpleTy) {
4746  case MVT::i64: return FastEmit_ISD_ADD_MVT_i64_ri_Predicate_i64immSExt32(RetVT, Op0, Op0IsKill, imm1);
4747  default: return 0;
4748  }
4749}
4750
4751// FastEmit functions for ISD::AND.
4752
4753unsigned FastEmit_ISD_AND_MVT_i64_ri_Predicate_i64immSExt32(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4754  if (RetVT.SimpleTy != MVT::i64)
4755    return 0;
4756  return FastEmitInst_ri(X86::AND64ri32, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4757}
4758
4759unsigned FastEmit_ISD_AND_ri_Predicate_i64immSExt32(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4760  switch (VT.SimpleTy) {
4761  case MVT::i64: return FastEmit_ISD_AND_MVT_i64_ri_Predicate_i64immSExt32(RetVT, Op0, Op0IsKill, imm1);
4762  default: return 0;
4763  }
4764}
4765
4766// FastEmit functions for ISD::MUL.
4767
4768unsigned FastEmit_ISD_MUL_MVT_i64_ri_Predicate_i64immSExt32(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4769  if (RetVT.SimpleTy != MVT::i64)
4770    return 0;
4771  return FastEmitInst_ri(X86::IMUL64rri32, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4772}
4773
4774unsigned FastEmit_ISD_MUL_ri_Predicate_i64immSExt32(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4775  switch (VT.SimpleTy) {
4776  case MVT::i64: return FastEmit_ISD_MUL_MVT_i64_ri_Predicate_i64immSExt32(RetVT, Op0, Op0IsKill, imm1);
4777  default: return 0;
4778  }
4779}
4780
4781// FastEmit functions for ISD::OR.
4782
4783unsigned FastEmit_ISD_OR_MVT_i64_ri_Predicate_i64immSExt32(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4784  if (RetVT.SimpleTy != MVT::i64)
4785    return 0;
4786  return FastEmitInst_ri(X86::OR64ri32, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4787}
4788
4789unsigned FastEmit_ISD_OR_ri_Predicate_i64immSExt32(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4790  switch (VT.SimpleTy) {
4791  case MVT::i64: return FastEmit_ISD_OR_MVT_i64_ri_Predicate_i64immSExt32(RetVT, Op0, Op0IsKill, imm1);
4792  default: return 0;
4793  }
4794}
4795
4796// FastEmit functions for ISD::SUB.
4797
4798unsigned FastEmit_ISD_SUB_MVT_i64_ri_Predicate_i64immSExt32(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4799  if (RetVT.SimpleTy != MVT::i64)
4800    return 0;
4801  return FastEmitInst_ri(X86::SUB64ri32, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4802}
4803
4804unsigned FastEmit_ISD_SUB_ri_Predicate_i64immSExt32(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4805  switch (VT.SimpleTy) {
4806  case MVT::i64: return FastEmit_ISD_SUB_MVT_i64_ri_Predicate_i64immSExt32(RetVT, Op0, Op0IsKill, imm1);
4807  default: return 0;
4808  }
4809}
4810
4811// FastEmit functions for ISD::XOR.
4812
4813unsigned FastEmit_ISD_XOR_MVT_i64_ri_Predicate_i64immSExt32(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4814  if (RetVT.SimpleTy != MVT::i64)
4815    return 0;
4816  return FastEmitInst_ri(X86::XOR64ri32, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4817}
4818
4819unsigned FastEmit_ISD_XOR_ri_Predicate_i64immSExt32(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4820  switch (VT.SimpleTy) {
4821  case MVT::i64: return FastEmit_ISD_XOR_MVT_i64_ri_Predicate_i64immSExt32(RetVT, Op0, Op0IsKill, imm1);
4822  default: return 0;
4823  }
4824}
4825
4826// FastEmit functions for X86ISD::CMP.
4827
4828unsigned FastEmit_X86ISD_CMP_MVT_i64_ri_Predicate_i64immSExt32(MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4829  if (RetVT.SimpleTy != MVT::i32)
4830    return 0;
4831  return FastEmitInst_ri(X86::CMP64ri32, X86::GR64RegisterClass, Op0, Op0IsKill, imm1);
4832}
4833
4834unsigned FastEmit_X86ISD_CMP_ri_Predicate_i64immSExt32(MVT VT, MVT RetVT, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4835  switch (VT.SimpleTy) {
4836  case MVT::i64: return FastEmit_X86ISD_CMP_MVT_i64_ri_Predicate_i64immSExt32(RetVT, Op0, Op0IsKill, imm1);
4837  default: return 0;
4838  }
4839}
4840
4841// Top-level FastEmit function.
4842
4843unsigned FastEmit_ri_Predicate_i64immSExt32(MVT VT, MVT RetVT, unsigned Opcode, unsigned Op0, bool Op0IsKill, uint64_t imm1) {
4844  switch (Opcode) {
4845  case ISD::ADD: return FastEmit_ISD_ADD_ri_Predicate_i64immSExt32(VT, RetVT, Op0, Op0IsKill, imm1);
4846  case ISD::AND: return FastEmit_ISD_AND_ri_Predicate_i64immSExt32(VT, RetVT, Op0, Op0IsKill, imm1);
4847  case ISD::MUL: return FastEmit_ISD_MUL_ri_Predicate_i64immSExt32(VT, RetVT, Op0, Op0IsKill, imm1);
4848  case ISD::OR: return FastEmit_ISD_OR_ri_Predicate_i64immSExt32(VT, RetVT, Op0, Op0IsKill, imm1);
4849  case ISD::SUB: return FastEmit_ISD_SUB_ri_Predicate_i64immSExt32(VT, RetVT, Op0, Op0IsKill, imm1);
4850  case ISD::XOR: return FastEmit_ISD_XOR_ri_Predicate_i64immSExt32(VT, RetVT, Op0, Op0IsKill, imm1);
4851  case X86ISD::CMP: return FastEmit_X86ISD_CMP_ri_Predicate_i64immSExt32(VT, RetVT, Op0, Op0IsKill, imm1);
4852  default: return 0;
4853  }
4854}
4855
4856// FastEmit functions for ISD::Constant.
4857
4858unsigned FastEmit_ISD_Constant_MVT_i8_i(MVT RetVT, uint64_t imm0) {
4859  if (RetVT.SimpleTy != MVT::i8)
4860    return 0;
4861  return FastEmitInst_i(X86::MOV8ri, X86::GR8RegisterClass, imm0);
4862}
4863
4864unsigned FastEmit_ISD_Constant_MVT_i16_i(MVT RetVT, uint64_t imm0) {
4865  if (RetVT.SimpleTy != MVT::i16)
4866    return 0;
4867  return FastEmitInst_i(X86::MOV16ri, X86::GR16RegisterClass, imm0);
4868}
4869
4870unsigned FastEmit_ISD_Constant_MVT_i32_i(MVT RetVT, uint64_t imm0) {
4871  if (RetVT.SimpleTy != MVT::i32)
4872    return 0;
4873  return FastEmitInst_i(X86::MOV32ri, X86::GR32RegisterClass, imm0);
4874}
4875
4876unsigned FastEmit_ISD_Constant_MVT_i64_i(MVT RetVT, uint64_t imm0) {
4877  if (RetVT.SimpleTy != MVT::i64)
4878    return 0;
4879  return FastEmitInst_i(X86::MOV64ri, X86::GR64RegisterClass, imm0);
4880}
4881
4882unsigned FastEmit_ISD_Constant_i(MVT VT, MVT RetVT, uint64_t imm0) {
4883  switch (VT.SimpleTy) {
4884  case MVT::i8: return FastEmit_ISD_Constant_MVT_i8_i(RetVT, imm0);
4885  case MVT::i16: return FastEmit_ISD_Constant_MVT_i16_i(RetVT, imm0);
4886  case MVT::i32: return FastEmit_ISD_Constant_MVT_i32_i(RetVT, imm0);
4887  case MVT::i64: return FastEmit_ISD_Constant_MVT_i64_i(RetVT, imm0);
4888  default: return 0;
4889  }
4890}
4891
4892// Top-level FastEmit function.
4893
4894unsigned FastEmit_i(MVT VT, MVT RetVT, unsigned Opcode, uint64_t imm0) {
4895  switch (Opcode) {
4896  case ISD::Constant: return FastEmit_ISD_Constant_i(VT, RetVT, imm0);
4897  default: return 0;
4898  }
4899}
4900
4901