llvm/llvm/test/CodeGen/X86/arg-copy-elide-win64.ll

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=x86_64-windows-msvc -mattr=avx2 | FileCheck %s

; Make sure we don't try to copy elide these arguments since they will be
; passed indirectly.
define void @baz(<16 x double> %arg, <16 x double> %arg1) #0 {
; CHECK-LABEL: baz:
; CHECK:       # %bb.0: # %bb
; CHECK-NEXT:    pushq %rbp
; CHECK-NEXT:    subq $368, %rsp # imm = 0x170
; CHECK-NEXT:    leaq {{[0-9]+}}(%rsp), %rbp
; CHECK-NEXT:    vmovaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
; CHECK-NEXT:    vmovaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
; CHECK-NEXT:    andq $-128, %rsp
; CHECK-NEXT:    movq 288(%rbp), %rax
; CHECK-NEXT:    vmovaps (%rax), %ymm0
; CHECK-NEXT:    movq 296(%rbp), %rax
; CHECK-NEXT:    vmovaps (%rax), %ymm1
; CHECK-NEXT:    movq 304(%rbp), %rax
; CHECK-NEXT:    vmovaps (%rax), %ymm2
; CHECK-NEXT:    movq 312(%rbp), %rax
; CHECK-NEXT:    vmovaps (%rax), %ymm3
; CHECK-NEXT:    vmovaps (%rcx), %ymm4
; CHECK-NEXT:    vmovaps (%rdx), %ymm5
; CHECK-NEXT:    vmovaps (%r8), %ymm6
; CHECK-NEXT:    vmovaps (%r9), %ymm7
; CHECK-NEXT:    vmovaps %ymm7, {{[0-9]+}}(%rsp)
; CHECK-NEXT:    vmovaps %ymm6, {{[0-9]+}}(%rsp)
; CHECK-NEXT:    vmovaps %ymm5, {{[0-9]+}}(%rsp)
; CHECK-NEXT:    vmovaps %ymm4, {{[0-9]+}}(%rsp)
; CHECK-NEXT:    vmovaps %ymm3, {{[0-9]+}}(%rsp)
; CHECK-NEXT:    vmovaps %ymm2, {{[0-9]+}}(%rsp)
; CHECK-NEXT:    vmovaps %ymm1, {{[0-9]+}}(%rsp)
; CHECK-NEXT:    vmovaps %ymm0, (%rsp)
; CHECK-NEXT:    vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
; CHECK-NEXT:    vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
; CHECK-NEXT:    leaq 240(%rbp), %rsp
; CHECK-NEXT:    popq %rbp
; CHECK-NEXT:    vzeroupper
; CHECK-NEXT:    retq
bb:
  %tmp = alloca <16 x double>
  %tmp2 = alloca <16 x double>
  store <16 x double> %arg, ptr %tmp
  store <16 x double> %arg1, ptr %tmp2
  ret void
}

attributes #0 = { nounwind }