llvm/llvm/test/CodeGen/X86/ipra-local-linkage-2.ll

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
; RUN: llc --mtriple=x86_64-- < %s | FileCheck --check-prefix=X64 %s
; RUN: llc --mtriple=i386-- < %s | FileCheck --check-prefix=X86 %s

; This test is to ensure rbp/rbx/ebp/esi is correctly saved/restored before clobbered when enable ipra.

define internal void @callee_clobber_rbp() nounwind norecurse {
; X64-LABEL: callee_clobber_rbp:
; X64:       # %bb.0:
; X64-NEXT:    pushq %rbp
; X64-NEXT:    #APP
; X64-NEXT:    xorl %ebp, %ebp
; X64-NEXT:    #NO_APP
; X64-NEXT:    popq %rbp
; X64-NEXT:    retq
;
; X86-LABEL: callee_clobber_rbp:
; X86:       # %bb.0:
; X86-NEXT:    pushl %ebp
; X86-NEXT:    #APP
; X86-NEXT:    xorl %ebp, %ebp
; X86-NEXT:    #NO_APP
; X86-NEXT:    popl %ebp
; X86-NEXT:    retl
  call void asm sideeffect "xor %ebp, %ebp", "~{ebp}"()
  ret void
}

define internal void @callee_clobber_rbx(ptr %addr) nounwind norecurse {
; X64-LABEL: callee_clobber_rbx:
; X64:       # %bb.0:
; X64-NEXT:    pushq %rbx
; X64-NEXT:    #APP
; X64-NEXT:    xorl %ebx, %ebx
; X64-NEXT:    #NO_APP
; X64-NEXT:    popq %rbx
; X64-NEXT:    retq
  call void asm sideeffect "xor %ebx, %ebx", "~{ebx}"()
  ret void
}

define internal void @callee_clobber_esi(ptr %addr) nounwind norecurse {
; X86-LABEL: callee_clobber_esi:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    #APP
; X86-NEXT:    xorl %esi, %esi
; X86-NEXT:    #NO_APP
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
  call void asm sideeffect "xor %esi, %esi", "~{esi}"()
  ret void
}

define void @caller_use_rbp() "frame-pointer"="all" nounwind {
; X64-LABEL: caller_use_rbp:
; X64:       # %bb.0:
; X64-NEXT:    pushq %rbp
; X64-NEXT:    movq %rsp, %rbp
; X64-NEXT:    subq $16, %rsp
; X64-NEXT:    callq callee_clobber_rbp
; X64-NEXT:    movl $5, -4(%rbp)
; X64-NEXT:    addq $16, %rsp
; X64-NEXT:    popq %rbp
; X64-NEXT:    retq
;
; X86-LABEL: caller_use_rbp:
; X86:       # %bb.0:
; X86-NEXT:    pushl %ebp
; X86-NEXT:    movl %esp, %ebp
; X86-NEXT:    pushl %eax
; X86-NEXT:    calll callee_clobber_rbp
; X86-NEXT:    movl $5, -4(%ebp)
; X86-NEXT:    addl $4, %esp
; X86-NEXT:    popl %ebp
; X86-NEXT:    retl
  call void @callee_clobber_rbp()
  %addr = alloca i32, align 4
  store i32 5, ptr %addr, align 4
  ret void
}

define void @caller_use_rbx(i32 %X) nounwind ssp {
; X64-LABEL: caller_use_rbx:
; X64:       # %bb.0:
; X64-NEXT:    pushq %rbp
; X64-NEXT:    movq %rsp, %rbp
; X64-NEXT:    pushq %rbx
; X64-NEXT:    andq $-32, %rsp
; X64-NEXT:    subq $64, %rsp
; X64-NEXT:    movq %rsp, %rbx
; X64-NEXT:    movq __stack_chk_guard(%rip), %rax
; X64-NEXT:    movq %rax, 32(%rbx)
; X64-NEXT:    movq %rsp, %rax
; X64-NEXT:    movl %edi, %ecx
; X64-NEXT:    leaq 15(,%rcx,4), %rcx
; X64-NEXT:    andq $-16, %rcx
; X64-NEXT:    subq %rcx, %rax
; X64-NEXT:    movq %rax, %rsp
; X64-NEXT:    movq %rbx, %rdi
; X64-NEXT:    callq callee_clobber_rbx
; X64-NEXT:    movq __stack_chk_guard(%rip), %rax
; X64-NEXT:    cmpq 32(%rbx), %rax
; X64-NEXT:    jne .LBB4_2
; X64-NEXT:  # %bb.1:
; X64-NEXT:    leaq -8(%rbp), %rsp
; X64-NEXT:    popq %rbx
; X64-NEXT:    popq %rbp
; X64-NEXT:    retq
; X64-NEXT:  .LBB4_2:
; X64-NEXT:    callq __stack_chk_fail@PLT
  %realign = alloca i32, align 32
  %addr = alloca i32, i32 %X
  call void @callee_clobber_rbx(ptr %realign)
  ret void
}

define void @caller_use_esi(i32 %X) nounwind ssp {
; X86-LABEL: caller_use_esi:
; X86:       # %bb.0:
; X86-NEXT:    pushl %ebp
; X86-NEXT:    movl %esp, %ebp
; X86-NEXT:    pushl %esi
; X86-NEXT:    andl $-32, %esp
; X86-NEXT:    subl $32, %esp
; X86-NEXT:    movl %esp, %esi
; X86-NEXT:    movl 8(%ebp), %eax
; X86-NEXT:    movl __stack_chk_guard, %ecx
; X86-NEXT:    movl %ecx, 16(%esi)
; X86-NEXT:    movl %esp, %ecx
; X86-NEXT:    shll $2, %eax
; X86-NEXT:    subl %eax, %ecx
; X86-NEXT:    movl %ecx, %esp
; X86-NEXT:    movl %esi, %eax
; X86-NEXT:    pushl %eax
; X86-NEXT:    calll callee_clobber_esi
; X86-NEXT:    addl $4, %esp
; X86-NEXT:    movl __stack_chk_guard, %eax
; X86-NEXT:    cmpl 16(%esi), %eax
; X86-NEXT:    jne .LBB5_2
; X86-NEXT:  # %bb.1:
; X86-NEXT:    leal -4(%ebp), %esp
; X86-NEXT:    popl %esi
; X86-NEXT:    popl %ebp
; X86-NEXT:    retl
; X86-NEXT:  .LBB5_2:
; X86-NEXT:    calll __stack_chk_fail
  %realign = alloca i32, align 32
  %addr = alloca i32, i32 %X
  call void @callee_clobber_esi(ptr %realign)
  ret void
}