# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple=riscv64 -run-pass=instruction-select -simplify-mir -verify-machineinstrs %s -o - \
# RUN: | FileCheck -check-prefix=RV64I %s
---
name: brcond
legalized: true
regBankSelected: true
tracksRegLiveness: true
body: |
; RV64I-LABEL: name: brcond
; RV64I: bb.0:
; RV64I-NEXT: liveins: $x10, $x11, $x12
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: [[COPY:%[0-9]+]]:gpr = COPY $x10
; RV64I-NEXT: [[COPY1:%[0-9]+]]:gpr = COPY $x11
; RV64I-NEXT: [[COPY2:%[0-9]+]]:gpr = COPY $x12
; RV64I-NEXT: [[LD:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BEQ [[LD]], [[COPY]], %bb.14
; RV64I-NEXT: PseudoBR %bb.1
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.1:
; RV64I-NEXT: [[LD1:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BNE [[LD1]], [[COPY]], %bb.14
; RV64I-NEXT: PseudoBR %bb.2
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.2:
; RV64I-NEXT: [[LD2:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BLT [[LD2]], [[COPY]], %bb.14
; RV64I-NEXT: PseudoBR %bb.3
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.3:
; RV64I-NEXT: [[LD3:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BGE [[LD3]], [[COPY]], %bb.14
; RV64I-NEXT: PseudoBR %bb.4
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.4:
; RV64I-NEXT: [[LD4:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BLTU [[LD4]], [[COPY]], %bb.14
; RV64I-NEXT: PseudoBR %bb.5
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.5:
; RV64I-NEXT: [[LD5:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BGEU [[LD5]], [[COPY]], %bb.14
; RV64I-NEXT: PseudoBR %bb.6
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.6:
; RV64I-NEXT: [[LD6:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BLT [[COPY]], [[LD6]], %bb.14
; RV64I-NEXT: PseudoBR %bb.7
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.7:
; RV64I-NEXT: [[LD7:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BGE [[COPY]], [[LD7]], %bb.14
; RV64I-NEXT: PseudoBR %bb.8
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.8:
; RV64I-NEXT: [[LD8:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BLTU [[COPY]], [[LD8]], %bb.14
; RV64I-NEXT: PseudoBR %bb.9
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.9:
; RV64I-NEXT: [[LD9:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BGEU [[COPY]], [[LD9]], %bb.14
; RV64I-NEXT: PseudoBR %bb.10
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.10:
; RV64I-NEXT: [[LD10:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: [[ANDI:%[0-9]+]]:gpr = ANDI [[COPY2]], 1
; RV64I-NEXT: BNE [[ANDI]], $x0, %bb.14
; RV64I-NEXT: PseudoBR %bb.11
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.11:
; RV64I-NEXT: successors: %bb.14(0x50000000), %bb.12(0x30000000)
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: [[LD11:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BGE [[LD11]], $x0, %bb.14
; RV64I-NEXT: PseudoBR %bb.12
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.12:
; RV64I-NEXT: successors: %bb.14(0x30000000), %bb.13(0x50000000)
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: [[LD12:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: BGE $x0, [[LD12]], %bb.14
; RV64I-NEXT: PseudoBR %bb.13
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.13:
; RV64I-NEXT: [[LD13:%[0-9]+]]:gpr = LD [[COPY1]], 0 :: (volatile load (s64))
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: bb.14:
; RV64I-NEXT: PseudoRET
bb.1:
liveins: $x10, $x11, $x12
%0:gprb(s64) = COPY $x10
%1:gprb(p0) = COPY $x11
%3:gprb(s64) = COPY $x12
%26:gprb(s64) = G_CONSTANT i64 -1
%29:gprb(s64) = G_CONSTANT i64 1
%4:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%56:gprb(s64) = G_ICMP intpred(eq), %4(s64), %0
G_BRCOND %56(s64), %bb.15
G_BR %bb.2
bb.2:
%6:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%54:gprb(s64) = G_ICMP intpred(ne), %6(s64), %0
G_BRCOND %54(s64), %bb.15
G_BR %bb.3
bb.3:
%8:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%52:gprb(s64) = G_ICMP intpred(slt), %8(s64), %0
G_BRCOND %52(s64), %bb.15
G_BR %bb.4
bb.4:
%10:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%50:gprb(s64) = G_ICMP intpred(sge), %10(s64), %0
G_BRCOND %50(s64), %bb.15
G_BR %bb.5
bb.5:
%12:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%48:gprb(s64) = G_ICMP intpred(ult), %12(s64), %0
G_BRCOND %48(s64), %bb.15
G_BR %bb.6
bb.6:
%14:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%46:gprb(s64) = G_ICMP intpred(uge), %14(s64), %0
G_BRCOND %46(s64), %bb.15
G_BR %bb.7
bb.7:
%16:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%44:gprb(s64) = G_ICMP intpred(sgt), %16(s64), %0
G_BRCOND %44(s64), %bb.15
G_BR %bb.8
bb.8:
%18:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%42:gprb(s64) = G_ICMP intpred(sle), %18(s64), %0
G_BRCOND %42(s64), %bb.15
G_BR %bb.9
bb.9:
%20:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%40:gprb(s64) = G_ICMP intpred(ugt), %20(s64), %0
G_BRCOND %40(s64), %bb.15
G_BR %bb.10
bb.10:
%22:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%38:gprb(s64) = G_ICMP intpred(ule), %22(s64), %0
G_BRCOND %38(s64), %bb.15
G_BR %bb.11
bb.11:
%24:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%57:gprb(s64) = G_CONSTANT i64 1
%36:gprb(s64) = G_AND %3, %57
G_BRCOND %36(s64), %bb.15
G_BR %bb.12
bb.12:
successors: %bb.15(0x50000000), %bb.13(0x30000000)
%25:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%35:gprb(s64) = G_ICMP intpred(sgt), %25(s64), %26
G_BRCOND %35(s64), %bb.15
G_BR %bb.13
bb.13:
successors: %bb.15(0x30000000), %bb.14(0x50000000)
%28:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
%33:gprb(s64) = G_ICMP intpred(slt), %28(s64), %29
G_BRCOND %33(s64), %bb.15
G_BR %bb.14
bb.14:
%31:gprb(s64) = G_LOAD %1(p0) :: (volatile load (s64))
bb.15:
PseudoRET
...