llvm/llvm/test/CodeGen/RISCV/GlobalISel/legalizer/legalize-abs-rv32.mir

# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple=riscv32 -run-pass=legalizer %s -o - \
# RUN:   | FileCheck %s --check-prefixes=CHECK,RV32I
# RUN: llc -mtriple=riscv32 -mattr=+zbb -run-pass=legalizer %s -o -\
# RUN:   | FileCheck %s --check-prefixes=CHECK,RV32ZBB

---
name:            abs_i8
body:             |
  bb.0.entry:
    ; RV32I-LABEL: name: abs_i8
    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
    ; RV32I-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY]], 8
    ; RV32I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 7
    ; RV32I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
    ; RV32I-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[ASSERT_ZEXT]], [[C1]](s32)
    ; RV32I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C1]](s32)
    ; RV32I-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[ASHR]], [[C]](s32)
    ; RV32I-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[ASSERT_ZEXT]], [[ASHR1]]
    ; RV32I-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[ADD]], [[ASHR1]]
    ; RV32I-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
    ; RV32I-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[XOR]], [[C2]]
    ; RV32I-NEXT: $x10 = COPY [[AND]](s32)
    ; RV32I-NEXT: PseudoRET implicit $x10
    ;
    ; RV32ZBB-LABEL: name: abs_i8
    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
    ; RV32ZBB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s32) = G_ASSERT_ZEXT [[COPY]], 8
    ; RV32ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[ASSERT_ZEXT]], 8
    ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
    ; RV32ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[SEXT_INREG]]
    ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[SEXT_INREG]], [[SUB]]
    ; RV32ZBB-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
    ; RV32ZBB-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[SMAX]], [[C1]]
    ; RV32ZBB-NEXT: $x10 = COPY [[AND]](s32)
    ; RV32ZBB-NEXT: PseudoRET implicit $x10
    %1:_(s32) = COPY $x10
    %2:_(s32) = G_ASSERT_ZEXT %1, 8
    %0:_(s8) = G_TRUNC %2(s32)
    %3:_(s8) = G_ABS %0
    %4:_(s32) = G_ZEXT %3(s8)
    $x10 = COPY %4(s32)
    PseudoRET implicit $x10
...
---
name:            abs_i16
body:             |
  bb.0.entry:
    ; RV32I-LABEL: name: abs_i16
    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
    ; RV32I-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY]], 16
    ; RV32I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 15
    ; RV32I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[ASSERT_SEXT]], [[C]](s32)
    ; RV32I-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[ASSERT_SEXT]], [[ASHR]]
    ; RV32I-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[ADD]], [[ASHR]]
    ; RV32I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 16
    ; RV32I-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[XOR]], [[C1]](s32)
    ; RV32I-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C1]](s32)
    ; RV32I-NEXT: $x10 = COPY [[ASHR1]](s32)
    ; RV32I-NEXT: PseudoRET implicit $x10
    ;
    ; RV32ZBB-LABEL: name: abs_i16
    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
    ; RV32ZBB-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s32) = G_ASSERT_SEXT [[COPY]], 16
    ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
    ; RV32ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[ASSERT_SEXT]]
    ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[ASSERT_SEXT]], [[SUB]]
    ; RV32ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[SMAX]], 16
    ; RV32ZBB-NEXT: $x10 = COPY [[SEXT_INREG]](s32)
    ; RV32ZBB-NEXT: PseudoRET implicit $x10
    %1:_(s32) = COPY $x10
    %2:_(s32) = G_ASSERT_SEXT %1, 16
    %0:_(s16) = G_TRUNC %2(s32)
    %3:_(s16) = G_ABS %0
    %4:_(s32) = G_SEXT %3(s16)
    $x10 = COPY %4(s32)
    PseudoRET implicit $x10
...
---
name:            abs_i32
body:             |
  bb.0.entry:
    ; RV32I-LABEL: name: abs_i32
    ; RV32I: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
    ; RV32I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
    ; RV32I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[COPY]], [[C]](s32)
    ; RV32I-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY]], [[ASHR]]
    ; RV32I-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[ADD]], [[ASHR]]
    ; RV32I-NEXT: $x10 = COPY [[XOR]](s32)
    ; RV32I-NEXT: PseudoRET implicit $x10
    ;
    ; RV32ZBB-LABEL: name: abs_i32
    ; RV32ZBB: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
    ; RV32ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
    ; RV32ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[COPY]]
    ; RV32ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s32) = G_SMAX [[COPY]], [[SUB]]
    ; RV32ZBB-NEXT: $x10 = COPY [[SMAX]](s32)
    ; RV32ZBB-NEXT: PseudoRET implicit $x10
    %0:_(s32) = COPY $x10
    %1:_(s32) = G_ABS %0
    $x10 = COPY %1(s32)
    PseudoRET implicit $x10
...
---
name:            abs_i64
body:             |
  bb.0.entry:
    ; CHECK-LABEL: name: abs_i64
    ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $x10
    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY $x11
    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
    ; CHECK-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[COPY1]], [[C]](s32)
    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
    ; CHECK-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[COPY1]], [[C1]](s32)
    ; CHECK-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY]], [[ASHR]]
    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(ult), [[ADD]](s32), [[ASHR]]
    ; CHECK-NEXT: [[COPY2:%[0-9]+]]:_(s32) = COPY [[ADD]](s32)
    ; CHECK-NEXT: [[ADD1:%[0-9]+]]:_(s32) = G_ADD [[COPY1]], [[ASHR1]]
    ; CHECK-NEXT: [[ADD2:%[0-9]+]]:_(s32) = G_ADD [[ADD1]], [[ICMP]]
    ; CHECK-NEXT: [[COPY3:%[0-9]+]]:_(s32) = COPY [[ADD2]](s32)
    ; CHECK-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[COPY2]], [[ASHR]]
    ; CHECK-NEXT: [[XOR1:%[0-9]+]]:_(s32) = G_XOR [[COPY3]], [[ASHR1]]
    ; CHECK-NEXT: $x10 = COPY [[XOR]](s32)
    ; CHECK-NEXT: $x11 = COPY [[XOR1]](s32)
    ; CHECK-NEXT: PseudoRET implicit $x10, implicit $x11
    %1:_(s32) = COPY $x10
    %2:_(s32) = COPY $x11
    %0:_(s64) = G_MERGE_VALUES %1(s32), %2(s32)
    %3:_(s64) = G_ABS %0
    %4:_(s32), %5:_(s32) = G_UNMERGE_VALUES %3(s64)
    $x10 = COPY %4(s32)
    $x11 = COPY %5(s32)
    PseudoRET implicit $x10, implicit $x11
...