1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -mtriple=x86_64-windows-msvc -mattr=avx2 | FileCheck %s 3 4; Make sure we don't try to copy elide these arguments since they will be 5; passed indirectly. 6define void @baz(<16 x double> %arg, <16 x double> %arg1) #0 { 7; CHECK-LABEL: baz: 8; CHECK: # %bb.0: # %bb 9; CHECK-NEXT: pushq %rbp 10; CHECK-NEXT: subq $368, %rsp # imm = 0x170 11; CHECK-NEXT: leaq {{[0-9]+}}(%rsp), %rbp 12; CHECK-NEXT: vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill 13; CHECK-NEXT: vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill 14; CHECK-NEXT: andq $-128, %rsp 15; CHECK-NEXT: movq 288(%rbp), %rax 16; CHECK-NEXT: vmovaps (%rax), %ymm0 17; CHECK-NEXT: movq 296(%rbp), %rax 18; CHECK-NEXT: vmovaps (%rax), %ymm1 19; CHECK-NEXT: movq 304(%rbp), %rax 20; CHECK-NEXT: vmovaps (%rax), %ymm2 21; CHECK-NEXT: movq 312(%rbp), %rax 22; CHECK-NEXT: vmovaps (%rax), %ymm3 23; CHECK-NEXT: vmovaps (%rcx), %ymm4 24; CHECK-NEXT: vmovaps (%rdx), %ymm5 25; CHECK-NEXT: vmovaps (%r8), %ymm6 26; CHECK-NEXT: vmovaps (%r9), %ymm7 27; CHECK-NEXT: vmovaps %ymm7, {{[0-9]+}}(%rsp) 28; CHECK-NEXT: vmovaps %ymm6, {{[0-9]+}}(%rsp) 29; CHECK-NEXT: vmovaps %ymm5, {{[0-9]+}}(%rsp) 30; CHECK-NEXT: vmovaps %ymm4, {{[0-9]+}}(%rsp) 31; CHECK-NEXT: vmovaps %ymm3, {{[0-9]+}}(%rsp) 32; CHECK-NEXT: vmovaps %ymm2, {{[0-9]+}}(%rsp) 33; CHECK-NEXT: vmovaps %ymm1, {{[0-9]+}}(%rsp) 34; CHECK-NEXT: vmovaps %ymm0, (%rsp) 35; CHECK-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload 36; CHECK-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload 37; CHECK-NEXT: leaq 240(%rbp), %rsp 38; CHECK-NEXT: popq %rbp 39; CHECK-NEXT: vzeroupper 40; CHECK-NEXT: retq 41bb: 42 %tmp = alloca <16 x double> 43 %tmp2 = alloca <16 x double> 44 store <16 x double> %arg, <16 x double>* %tmp 45 store <16 x double> %arg1, <16 x double>* %tmp2 46 ret void 47} 48 49attributes #0 = { nounwind } 50