Lines Matching refs:rA
33 SELBInst<(outs R64C:$rT), (ins R64C:$rA, R64C:$rB, VECREG:$rC),
45 Pat<(select (i32 (cond R64C:$rA, R64C:$rB)), R64C:$rTrue, R64C:$rFalse),
50 Pat<(cond R64C:$rA, R64C:$rB),
57 CodeFrag<(CGTIv4i32 (GBv4i32 (CEQv4i32 (COPY_TO_REGCLASS R64C:$rA, VECREG),
62 CodeFrag<(CEQIv4i32 (GBv4i32 (CEQv4i32 VECREG:$rA, VECREG:$rB)), 0xf)>;
82 def : Pat<(seteq R64C:$rA, R64C:$rB), I64EQr64.Fragment>;
83 def : Pat<(seteq (v2i64 VECREG:$rA), (v2i64 VECREG:$rB)), I64EQv2i64.Fragment>;
94 CodeFrag<(CLGTv4i32 (COPY_TO_REGCLASS R64C:$rA, VECREG),
98 CodeFrag<(CEQv4i32 (COPY_TO_REGCLASS R64C:$rA, VECREG),
107 CodeFrag<(CLGTv4i32 VECREG:$rA, VECREG:$rB)>;
110 CodeFrag<(CEQv4i32 VECREG:$rA, VECREG:$rB)>;
131 def : Pat<(setugt R64C:$rA, R64C:$rB), I64LGTr64.Fragment>;
132 //def : Pat<(setugt (v2i64 VECREG:$rA), (v2i64 VECREG:$rB)),
165 def : Pat<(setuge R64C:$rA, R64C:$rB), I64LGEr64.Fragment>;
166 def : Pat<(v2i64 (setuge (v2i64 VECREG:$rA), (v2i64 VECREG:$rB))),
179 CodeFrag<(CGTv4i32 (COPY_TO_REGCLASS R64C:$rA, VECREG),
183 CodeFrag<(CEQv4i32 (COPY_TO_REGCLASS R64C:$rA, VECREG),
192 CodeFrag<(CGTv4i32 VECREG:$rA, VECREG:$rB)>;
195 CodeFrag<(CEQv4i32 VECREG:$rA, VECREG:$rB)>;
216 def : Pat<(setgt R64C:$rA, R64C:$rB), I64GTr64.Fragment>;
217 //def : Pat<(setgt (v2i64 VECREG:$rA), (v2i64 VECREG:$rB)),
248 def : Pat<(setge R64C:$rA, R64C:$rB), I64GEr64.Fragment>;
249 def : Pat<(v2i64 (setge (v2i64 VECREG:$rA), (v2i64 VECREG:$rB))),
269 def : Pat<(SPUadd64 R64C:$rA, R64C:$rB, (v4i32 VECREG:$rCGmask)),
270 (COPY_TO_REGCLASS v2i64_add<(COPY_TO_REGCLASS R64C:$rA, VECREG),
274 def : Pat<(SPUadd64 (v2i64 VECREG:$rA), (v2i64 VECREG:$rB),
276 v2i64_add<(v2i64 VECREG:$rA),
289 def : Pat<(SPUsub64 R64C:$rA, R64C:$rB, (v4i32 VECREG:$rCGmask)),
291 v2i64_sub<(COPY_TO_REGCLASS R64C:$rA, VECREG),
293 v2i64_sub_bg<(COPY_TO_REGCLASS R64C:$rA, VECREG),
297 def : Pat<(SPUsub64 (v2i64 VECREG:$rA), (v2i64 VECREG:$rB),
299 v2i64_sub<(v2i64 VECREG:$rA),
301 v2i64_sub_bg<(v2i64 VECREG:$rA),
313 class v2i64_mul_ahi64<dag rA> :
314 CodeFrag<(SELBv4i32 rA, (ILv4i32 0), (FSMBIv4i32 0x0f0f))>;
325 class v2i64_mul_ashlq2<dag rA>:
326 CodeFrag<(SHLQBYIv4i32 rA, 0x2)>;
328 class v2i64_mul_ashlq4<dag rA>:
329 CodeFrag<(SHLQBYIv4i32 rA, 0x4)>;
337 class v2i64_highprod<dag rA, dag rB>:
341 v2i64_mul_ahi64<rA>.Fragment),
342 (MPYHv4i32 v2i64_mul_ahi64<rA>.Fragment, // a0 x b3
346 v2i64_mul_ashlq4<rA>.Fragment),
348 (MPYHv4i32 v2i64_mul_ashlq4<rA>.Fragment,
351 (MPYUv4i32 v2i64_mul_ashlq4<rA>.Fragment,
354 (MPYHv4i32 v2i64_mul_ashlq2<rA>.Fragment,
356 (MPYUv4i32 v2i64_mul_ashlq2<rA>.Fragment,
359 class v2i64_mul_a3_b3<dag rA, dag rB>:
360 CodeFrag<(MPYUv4i32 v2i64_mul_alo64<rA>.Fragment,
363 class v2i64_mul_a2_b3<dag rA, dag rB>:
365 (MPYHHUv4i32 v2i64_mul_alo64<rA>.Fragment,
370 class v2i64_mul_a3_b2<dag rA, dag rB>:
373 v2i64_mul_ashlq2<rA>.Fragment), 0x2),
377 class v2i64_lowsum<dag rA, dag rB, dag rCGmask>:
378 v2i64_add<v2i64_add<v2i64_mul_a3_b3<rA, rB>.Fragment,
379 v2i64_mul_a2_b3<rA, rB>.Fragment, rCGmask>.Fragment,
380 v2i64_mul_a3_b2<rA, rB>.Fragment, rCGmask>;
382 class v2i64_mul<dag rA, dag rB, dag rCGmask>:
383 v2i64_add<v2i64_lowsum<rA, rB, rCGmask>.Fragment,
384 (SELBv4i32 v2i64_highprod<rA, rB>.Fragment,
389 def : Pat<(SPUmul64 R64C:$rA, R64C:$rB, (v4i32 VECREG:$rCGmask)),
390 (COPY_TO_REGCLASS v2i64_mul<(COPY_TO_REGCLASS R64C:$rA, VECREG),
394 def : Pat<(SPUmul64 (v2i64 VECREG:$rA), (v2i64 VECREG:$rB),
396 v2i64_mul<(v2i64 VECREG:$rA), (v2i64 VECREG:$rB),
406 SELBInst<(outs R64FP:$rT), (ins R64FP:$rA, R64FP:$rB, R32C:$rC),
408 (select R32C:$rC, R64FP:$rB, R64FP:$rA))]>;