llvm/llvm/test/CodeGen/AMDGPU/remat-dead-subreg.mir

# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple=amdgcn -start-before=greedy -stop-after=virtregrewriter -stress-regalloc=3 -verify-machineinstrs -o - %s | FileCheck -check-prefix=GCN %s

# Check that subreg use is live at the point of materialization, not just the main range.
# Do not rematerialize if used subreg is dead at a new index.

---
name:            dead_subreg
tracksRegLiveness: true
body:             |
  bb.0.entry:
    ; GCN-LABEL: name: dead_subreg
    ; GCN: $m0 = IMPLICIT_DEF
    ; GCN-NEXT: renamable $sgpr0_sgpr1 = S_MOV_B64 1, implicit $m0
    ; GCN-NEXT: renamable $sgpr1 = S_MUL_I32 renamable $sgpr1, 3
    ; GCN-NEXT: SI_SPILL_S32_SAVE killed renamable $sgpr1, %stack.0, implicit $exec, implicit $sp_reg :: (store (s32) into %stack.0, addrspace 5)
    ; GCN-NEXT: renamable $sgpr2 = S_MOV_B32 2, implicit $m0
    ; GCN-NEXT: renamable $sgpr1 = S_MOV_B32 3, implicit $m0
    ; GCN-NEXT: dead [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 $sgpr0, implicit $exec, implicit killed $sgpr2, implicit killed $sgpr1
    ; GCN-NEXT: renamable $sgpr1 = SI_SPILL_S32_RESTORE %stack.0, implicit $exec, implicit $sp_reg :: (load (s32) from %stack.0, addrspace 5)
    ; GCN-NEXT: dead [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 killed $sgpr1, implicit $exec
    ; GCN-NEXT: S_NOP 0, implicit killed renamable $sgpr0
    $m0 = IMPLICIT_DEF
    %0:sreg_64_xexec = S_MOV_B64 1, implicit $m0
    %1:sreg_32 = S_MUL_I32 %0.sub1:sreg_64_xexec, 3
    %2:sreg_32 = S_MOV_B32 2, implicit $m0
    %3:sreg_32 = S_MOV_B32 3, implicit $m0
    %4:vgpr_32 = V_MOV_B32_e32 %0.sub0:sreg_64_xexec, implicit $exec, implicit %2, implicit %3
    %5:vgpr_32 = V_MOV_B32_e32 %1:sreg_32, implicit $exec
    S_NOP 0, implicit %0.sub0
...
---
name:            live_subreg
tracksRegLiveness: true
body:             |
  bb.0.entry:
    ; GCN-LABEL: name: live_subreg
    ; GCN: $m0 = IMPLICIT_DEF
    ; GCN-NEXT: renamable $sgpr4_sgpr5 = S_MOV_B64 1, implicit $m0
    ; GCN-NEXT: renamable $sgpr2 = S_MOV_B32 2, implicit $m0
    ; GCN-NEXT: SI_SPILL_S32_SAVE killed renamable $sgpr2, %stack.0, implicit $exec, implicit $sp_reg :: (store (s32) into %stack.0, addrspace 5)
    ; GCN-NEXT: renamable $sgpr2 = S_MOV_B32 3, implicit $m0
    ; GCN-NEXT: renamable $sgpr0 = SI_SPILL_S32_RESTORE %stack.0, implicit $exec, implicit $sp_reg :: (load (s32) from %stack.0, addrspace 5)
    ; GCN-NEXT: dead [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 $sgpr4, implicit $exec, implicit killed $sgpr0, implicit killed $sgpr2
    ; GCN-NEXT: renamable $sgpr0 = S_MUL_I32 renamable $sgpr5, 3
    ; GCN-NEXT: dead [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 killed $sgpr0, implicit $exec
    ; GCN-NEXT: S_NOP 0, implicit killed renamable $sgpr5
    $m0 = IMPLICIT_DEF
    %0:sreg_64_xexec = S_MOV_B64 1, implicit $m0
    %1:sreg_32 = S_MUL_I32 %0.sub1:sreg_64_xexec, 3
    %2:sreg_32 = S_MOV_B32 2, implicit $m0
    %3:sreg_32 = S_MOV_B32 3, implicit $m0
    %4:vgpr_32 = V_MOV_B32_e32 %0.sub0:sreg_64_xexec, implicit $exec, implicit %2, implicit %3
    %5:vgpr_32 = V_MOV_B32_e32 %1:sreg_32, implicit $exec
    S_NOP 0, implicit %0.sub1
...
---
name:            partially_live_wide_subreg
tracksRegLiveness: true
body:             |
  bb.0.entry:
    ; GCN-LABEL: name: partially_live_wide_subreg
    ; GCN: renamable $sgpr0_sgpr1 = IMPLICIT_DEF
    ; GCN-NEXT: renamable $sgpr8_sgpr9_sgpr10_sgpr11 = S_LOAD_DWORDX4_IMM killed renamable $sgpr0_sgpr1, 1, 0
    ; GCN-NEXT: renamable $sgpr4_sgpr5 = S_MOV_B64 renamable $sgpr10_sgpr11
    ; GCN-NEXT: SI_SPILL_S64_SAVE killed renamable $sgpr4_sgpr5, %stack.0, implicit $exec, implicit $sp_reg :: (store (s64) into %stack.0, align 4, addrspace 5)
    ; GCN-NEXT: renamable $sgpr4_sgpr5 = S_MOV_B64 3, implicit $m0
    ; GCN-NEXT: renamable $sgpr0_sgpr1 = S_MOV_B64 2, implicit $m0
    ; GCN-NEXT: dead [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 $sgpr8, implicit $exec, implicit killed $sgpr0_sgpr1, implicit killed $sgpr4_sgpr5
    ; GCN-NEXT: renamable $sgpr4_sgpr5 = SI_SPILL_S64_RESTORE %stack.0, implicit $exec, implicit $sp_reg :: (load (s64) from %stack.0, align 4, addrspace 5)
    ; GCN-NEXT: dead [[V_MOV_B:%[0-9]+]]:vreg_64 = V_MOV_B64_PSEUDO killed $sgpr4_sgpr5, implicit $exec
    ; GCN-NEXT: S_NOP 0, implicit killed renamable $sgpr8, implicit renamable $sgpr11
    %0:sreg_64 = IMPLICIT_DEF
    %1:sgpr_128 = S_LOAD_DWORDX4_IMM %0, 1, 0
    %2:sreg_64 = S_MOV_B64 %1.sub2_sub3
    %3:sreg_64 = S_MOV_B64 2, implicit $m0
    %4:sreg_64 = S_MOV_B64 3, implicit $m0
    %5:vgpr_32 = V_MOV_B32_e32 %1.sub0, implicit $exec, implicit %3, implicit %4
    %6:vreg_64 = V_MOV_B64_PSEUDO %2, implicit $exec
    S_NOP 0, implicit %1.sub0, implicit %1.sub3
...
---
name:            dead_subreg_whole_reg
tracksRegLiveness: true
body:             |
  bb.0.entry:
    ; GCN-LABEL: name: dead_subreg_whole_reg
    ; GCN: $m0 = IMPLICIT_DEF
    ; GCN-NEXT: renamable $sgpr0_sgpr1 = S_MOV_B64 1, implicit $m0
    ; GCN-NEXT: renamable $sgpr2_sgpr3 = S_MOV_B64 renamable $sgpr0_sgpr1
    ; GCN-NEXT: SI_SPILL_S64_SAVE killed renamable $sgpr2_sgpr3, %stack.0, implicit $exec, implicit $sp_reg :: (store (s64) into %stack.0, align 4, addrspace 5)
    ; GCN-NEXT: renamable $sgpr4_sgpr5 = S_MOV_B64 2, implicit $m0
    ; GCN-NEXT: renamable $sgpr2_sgpr3 = S_MOV_B64 3, implicit $m0
    ; GCN-NEXT: dead [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 $sgpr0, implicit $exec, implicit killed $sgpr4_sgpr5, implicit killed $sgpr2_sgpr3
    ; GCN-NEXT: renamable $sgpr2_sgpr3 = SI_SPILL_S64_RESTORE %stack.0, implicit $exec, implicit $sp_reg :: (load (s64) from %stack.0, align 4, addrspace 5)
    ; GCN-NEXT: dead [[V_MOV_B:%[0-9]+]]:vreg_64 = V_MOV_B64_PSEUDO killed $sgpr2_sgpr3, implicit $exec
    ; GCN-NEXT: S_NOP 0, implicit killed renamable $sgpr0
    $m0 = IMPLICIT_DEF
    %0:sreg_64_xexec = S_MOV_B64 1, implicit $m0
    %1:sreg_64 = S_MOV_B64 %0:sreg_64_xexec
    %2:sreg_64 = S_MOV_B64 2, implicit $m0
    %3:sreg_64 = S_MOV_B64 3, implicit $m0
    %4:vgpr_32 = V_MOV_B32_e32 %0.sub0:sreg_64_xexec, implicit $exec, implicit %2, implicit %3
    %5:vreg_64 = V_MOV_B64_PSEUDO %1, implicit $exec
    S_NOP 0, implicit %0.sub0
...