• Home
  • Raw
  • Download

Lines Matching refs:AVX

2 …known-unknown -mcpu=x86-64 -mattr=avx -enable-unsafe-fp-math < %s | FileCheck %s --check-prefix=AVX
15 ; AVX-LABEL: reassociate_adds1:
16 ; AVX: # BB#0:
17 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
18 ; AVX-NEXT: vaddss %xmm3, %xmm2, %xmm1
19 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
20 ; AVX-NEXT: retq
35 ; AVX-LABEL: reassociate_adds2:
36 ; AVX: # BB#0:
37 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
38 ; AVX-NEXT: vaddss %xmm3, %xmm2, %xmm1
39 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
40 ; AVX-NEXT: retq
55 ; AVX-LABEL: reassociate_adds3:
56 ; AVX: # BB#0:
57 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
58 ; AVX-NEXT: vaddss %xmm3, %xmm2, %xmm1
59 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
60 ; AVX-NEXT: retq
75 ; AVX-LABEL: reassociate_adds4:
76 ; AVX: # BB#0:
77 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
78 ; AVX-NEXT: vaddss %xmm3, %xmm2, %xmm1
79 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
80 ; AVX-NEXT: retq
102 ; AVX-LABEL: reassociate_adds5:
103 ; AVX: # BB#0:
104 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
105 ; AVX-NEXT: vaddss %xmm3, %xmm2, %xmm1
106 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
107 ; AVX-NEXT: vaddss %xmm5, %xmm4, %xmm1
108 ; AVX-NEXT: vaddss %xmm6, %xmm1, %xmm1
109 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
110 ; AVX-NEXT: vaddss %xmm7, %xmm0, %xmm0
111 ; AVX-NEXT: retq
135 ; AVX-LABEL: reassociate_adds6:
136 ; AVX: # BB#0:
137 ; AVX-NEXT: vdivss %xmm1, %xmm0, %xmm0
138 ; AVX-NEXT: vaddss %xmm3, %xmm2, %xmm1
139 ; AVX-NEXT: vaddss %xmm1, %xmm0, %xmm0
140 ; AVX-NEXT: retq
147 ; Verify that SSE and AVX scalar single-precision multiplies are reassociated.
157 ; AVX-LABEL: reassociate_muls1:
158 ; AVX: # BB#0:
159 ; AVX-NEXT: vdivss %xmm1, %xmm0, %xmm0
160 ; AVX-NEXT: vmulss %xmm3, %xmm2, %xmm1
161 ; AVX-NEXT: vmulss %xmm1, %xmm0, %xmm0
162 ; AVX-NEXT: retq
169 ; Verify that SSE and AVX scalar double-precision adds are reassociated.
179 ; AVX-LABEL: reassociate_adds_double:
180 ; AVX: # BB#0:
181 ; AVX-NEXT: vdivsd %xmm1, %xmm0, %xmm0
182 ; AVX-NEXT: vaddsd %xmm3, %xmm2, %xmm1
183 ; AVX-NEXT: vaddsd %xmm1, %xmm0, %xmm0
184 ; AVX-NEXT: retq
191 ; Verify that SSE and AVX scalar double-precision multiplies are reassociated.
201 ; AVX-LABEL: reassociate_muls_double:
202 ; AVX: # BB#0:
203 ; AVX-NEXT: vdivsd %xmm1, %xmm0, %xmm0
204 ; AVX-NEXT: vmulsd %xmm3, %xmm2, %xmm1
205 ; AVX-NEXT: vmulsd %xmm1, %xmm0, %xmm0
206 ; AVX-NEXT: retq
213 ; Verify that SSE and AVX 128-bit vector single-precision adds are reassociated.
223 ; AVX-LABEL: reassociate_adds_v4f32:
224 ; AVX: # BB#0:
225 ; AVX-NEXT: vmulps %xmm1, %xmm0, %xmm0
226 ; AVX-NEXT: vaddps %xmm3, %xmm2, %xmm1
227 ; AVX-NEXT: vaddps %xmm1, %xmm0, %xmm0
228 ; AVX-NEXT: retq
235 ; Verify that SSE and AVX 128-bit vector double-precision adds are reassociated.
245 ; AVX-LABEL: reassociate_adds_v2f64:
246 ; AVX: # BB#0:
247 ; AVX-NEXT: vmulpd %xmm1, %xmm0, %xmm0
248 ; AVX-NEXT: vaddpd %xmm3, %xmm2, %xmm1
249 ; AVX-NEXT: vaddpd %xmm1, %xmm0, %xmm0
250 ; AVX-NEXT: retq
257 ; Verify that SSE and AVX 128-bit vector single-precision multiplies are reassociated.
267 ; AVX-LABEL: reassociate_muls_v4f32:
268 ; AVX: # BB#0:
269 ; AVX-NEXT: vaddps %xmm1, %xmm0, %xmm0
270 ; AVX-NEXT: vmulps %xmm3, %xmm2, %xmm1
271 ; AVX-NEXT: vmulps %xmm1, %xmm0, %xmm0
272 ; AVX-NEXT: retq
279 ; Verify that SSE and AVX 128-bit vector double-precision multiplies are reassociated.
289 ; AVX-LABEL: reassociate_muls_v2f64:
290 ; AVX: # BB#0:
291 ; AVX-NEXT: vaddpd %xmm1, %xmm0, %xmm0
292 ; AVX-NEXT: vmulpd %xmm3, %xmm2, %xmm1
293 ; AVX-NEXT: vmulpd %xmm1, %xmm0, %xmm0
294 ; AVX-NEXT: retq
301 ; Verify that AVX 256-bit vector single-precision adds are reassociated.
304 ; AVX-LABEL: reassociate_adds_v8f32:
305 ; AVX: # BB#0:
306 ; AVX-NEXT: vmulps %ymm1, %ymm0, %ymm0
307 ; AVX-NEXT: vaddps %ymm3, %ymm2, %ymm1
308 ; AVX-NEXT: vaddps %ymm1, %ymm0, %ymm0
309 ; AVX-NEXT: retq
316 ; Verify that AVX 256-bit vector double-precision adds are reassociated.
319 ; AVX-LABEL: reassociate_adds_v4f64:
320 ; AVX: # BB#0:
321 ; AVX-NEXT: vmulpd %ymm1, %ymm0, %ymm0
322 ; AVX-NEXT: vaddpd %ymm3, %ymm2, %ymm1
323 ; AVX-NEXT: vaddpd %ymm1, %ymm0, %ymm0
324 ; AVX-NEXT: retq
331 ; Verify that AVX 256-bit vector single-precision multiplies are reassociated.
334 ; AVX-LABEL: reassociate_muls_v8f32:
335 ; AVX: # BB#0:
336 ; AVX-NEXT: vaddps %ymm1, %ymm0, %ymm0
337 ; AVX-NEXT: vmulps %ymm3, %ymm2, %ymm1
338 ; AVX-NEXT: vmulps %ymm1, %ymm0, %ymm0
339 ; AVX-NEXT: retq
346 ; Verify that AVX 256-bit vector double-precision multiplies are reassociated.
349 ; AVX-LABEL: reassociate_muls_v4f64:
350 ; AVX: # BB#0:
351 ; AVX-NEXT: vaddpd %ymm1, %ymm0, %ymm0
352 ; AVX-NEXT: vmulpd %ymm3, %ymm2, %ymm1
353 ; AVX-NEXT: vmulpd %ymm1, %ymm0, %ymm0
354 ; AVX-NEXT: retq
361 ; Verify that SSE and AVX scalar single-precision minimum ops are reassociated.
371 ; AVX-LABEL: reassociate_mins_single:
372 ; AVX: # BB#0:
373 ; AVX-NEXT: vdivss %xmm1, %xmm0, %xmm0
374 ; AVX-NEXT: vminss %xmm3, %xmm2, %xmm1
375 ; AVX-NEXT: vminss %xmm1, %xmm0, %xmm0
376 ; AVX-NEXT: retq
385 ; Verify that SSE and AVX scalar single-precision maximum ops are reassociated.
395 ; AVX-LABEL: reassociate_maxs_single:
396 ; AVX: # BB#0:
397 ; AVX-NEXT: vdivss %xmm1, %xmm0, %xmm0
398 ; AVX-NEXT: vmaxss %xmm3, %xmm2, %xmm1
399 ; AVX-NEXT: vmaxss %xmm1, %xmm0, %xmm0
400 ; AVX-NEXT: retq
409 ; Verify that SSE and AVX scalar double-precision minimum ops are reassociated.
419 ; AVX-LABEL: reassociate_mins_double:
420 ; AVX: # BB#0:
421 ; AVX-NEXT: vdivsd %xmm1, %xmm0, %xmm0
422 ; AVX-NEXT: vminsd %xmm3, %xmm2, %xmm1
423 ; AVX-NEXT: vminsd %xmm1, %xmm0, %xmm0
424 ; AVX-NEXT: retq
433 ; Verify that SSE and AVX scalar double-precision maximum ops are reassociated.
443 ; AVX-LABEL: reassociate_maxs_double:
444 ; AVX: # BB#0:
445 ; AVX-NEXT: vdivsd %xmm1, %xmm0, %xmm0
446 ; AVX-NEXT: vmaxsd %xmm3, %xmm2, %xmm1
447 ; AVX-NEXT: vmaxsd %xmm1, %xmm0, %xmm0
448 ; AVX-NEXT: retq
457 ; Verify that SSE and AVX 128-bit vector single-precision minimum ops are reassociated.
467 ; AVX-LABEL: reassociate_mins_v4f32:
468 ; AVX: # BB#0:
469 ; AVX-NEXT: vaddps %xmm1, %xmm0, %xmm0
470 ; AVX-NEXT: vminps %xmm3, %xmm2, %xmm1
471 ; AVX-NEXT: vminps %xmm1, %xmm0, %xmm0
472 ; AVX-NEXT: retq
481 ; Verify that SSE and AVX 128-bit vector single-precision maximum ops are reassociated.
491 ; AVX-LABEL: reassociate_maxs_v4f32:
492 ; AVX: # BB#0:
493 ; AVX-NEXT: vaddps %xmm1, %xmm0, %xmm0
494 ; AVX-NEXT: vmaxps %xmm3, %xmm2, %xmm1
495 ; AVX-NEXT: vmaxps %xmm1, %xmm0, %xmm0
496 ; AVX-NEXT: retq
505 ; Verify that SSE and AVX 128-bit vector double-precision minimum ops are reassociated.
515 ; AVX-LABEL: reassociate_mins_v2f64:
516 ; AVX: # BB#0:
517 ; AVX-NEXT: vaddpd %xmm1, %xmm0, %xmm0
518 ; AVX-NEXT: vminpd %xmm3, %xmm2, %xmm1
519 ; AVX-NEXT: vminpd %xmm1, %xmm0, %xmm0
520 ; AVX-NEXT: retq
529 ; Verify that SSE and AVX 128-bit vector double-precision maximum ops are reassociated.
539 ; AVX-LABEL: reassociate_maxs_v2f64:
540 ; AVX: # BB#0:
541 ; AVX-NEXT: vaddpd %xmm1, %xmm0, %xmm0
542 ; AVX-NEXT: vmaxpd %xmm3, %xmm2, %xmm1
543 ; AVX-NEXT: vmaxpd %xmm1, %xmm0, %xmm0
544 ; AVX-NEXT: retq
553 ; Verify that AVX 256-bit vector single-precision minimum ops are reassociated.
556 ; AVX-LABEL: reassociate_mins_v8f32:
557 ; AVX: # BB#0:
558 ; AVX-NEXT: vaddps %ymm1, %ymm0, %ymm0
559 ; AVX-NEXT: vminps %ymm3, %ymm2, %ymm1
560 ; AVX-NEXT: vminps %ymm1, %ymm0, %ymm0
561 ; AVX-NEXT: retq
570 ; Verify that AVX 256-bit vector single-precision maximum ops are reassociated.
573 ; AVX-LABEL: reassociate_maxs_v8f32:
574 ; AVX: # BB#0:
575 ; AVX-NEXT: vaddps %ymm1, %ymm0, %ymm0
576 ; AVX-NEXT: vmaxps %ymm3, %ymm2, %ymm1
577 ; AVX-NEXT: vmaxps %ymm1, %ymm0, %ymm0
578 ; AVX-NEXT: retq
587 ; Verify that AVX 256-bit vector double-precision minimum ops are reassociated.
590 ; AVX-LABEL: reassociate_mins_v4f64:
591 ; AVX: # BB#0:
592 ; AVX-NEXT: vaddpd %ymm1, %ymm0, %ymm0
593 ; AVX-NEXT: vminpd %ymm3, %ymm2, %ymm1
594 ; AVX-NEXT: vminpd %ymm1, %ymm0, %ymm0
595 ; AVX-NEXT: retq
604 ; Verify that AVX 256-bit vector double-precision maximum ops are reassociated.
607 ; AVX-LABEL: reassociate_maxs_v4f64:
608 ; AVX: # BB#0:
609 ; AVX-NEXT: vaddpd %ymm1, %ymm0, %ymm0
610 ; AVX-NEXT: vmaxpd %ymm3, %ymm2, %ymm1
611 ; AVX-NEXT: vmaxpd %ymm1, %ymm0, %ymm0
612 ; AVX-NEXT: retq
627 ; AVX-LABEL: reassociate_adds_from_calls:
628 ; AVX: callq bar
629 ; AVX-NEXT: vmovsd %xmm0, 16(%rsp)
630 ; AVX-NEXT: callq bar
631 ; AVX-NEXT: vmovsd %xmm0, 8(%rsp)
632 ; AVX-NEXT: callq bar
633 ; AVX-NEXT: vmovsd %xmm0, (%rsp)
634 ; AVX-NEXT: callq bar
635 ; AVX-NEXT: vmovsd 8(%rsp), %xmm1
636 ; AVX: vaddsd 16(%rsp), %xmm1, %xmm1
637 ; AVX-NEXT: vaddsd (%rsp), %xmm0, %xmm0
638 ; AVX-NEXT: vaddsd %xmm0, %xmm1, %xmm0
651 ; AVX-LABEL: already_reassociated:
652 ; AVX: callq bar
653 ; AVX-NEXT: vmovsd %xmm0, 16(%rsp)
654 ; AVX-NEXT: callq bar
655 ; AVX-NEXT: vmovsd %xmm0, 8(%rsp)
656 ; AVX-NEXT: callq bar
657 ; AVX-NEXT: vmovsd %xmm0, (%rsp)
658 ; AVX-NEXT: callq bar
659 ; AVX-NEXT: vmovsd 8(%rsp), %xmm1
660 ; AVX: vaddsd 16(%rsp), %xmm1, %xmm1
661 ; AVX-NEXT: vaddsd (%rsp), %xmm0, %xmm0
662 ; AVX-NEXT: vaddsd %xmm0, %xmm1, %xmm0