# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - \
# RUN: | FileCheck %s --check-prefix=RV64I
# RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=legalizer %s -o - \
# RUN: | FileCheck %s --check-prefix=RV64ZBB
---
name: abs_i8
body: |
bb.0.entry:
; RV64I-LABEL: name: abs_i8
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64I-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 8
; RV64I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 7
; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_ZEXT]](s64)
; RV64I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
; RV64I-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[TRUNC]], [[C1]](s32)
; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[SHL]], [[C1]](s32)
; RV64I-NEXT: [[ASHR1:%[0-9]+]]:_(s32) = G_ASHR [[ASHR]], [[C]](s32)
; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY [[TRUNC]](s32)
; RV64I-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY1]], [[ASHR1]]
; RV64I-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[ADD]], [[ASHR1]]
; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[XOR]](s32)
; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ANYEXT]], [[C2]]
; RV64I-NEXT: $x10 = COPY [[AND]](s64)
; RV64I-NEXT: PseudoRET implicit $x10
;
; RV64ZBB-LABEL: name: abs_i8
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 8
; RV64ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[ASSERT_ZEXT]], 8
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[C]], [[SEXT_INREG]]
; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[SEXT_INREG]], [[SUB]]
; RV64ZBB-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[SMAX]], [[C1]]
; RV64ZBB-NEXT: $x10 = COPY [[AND]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%1:_(s64) = COPY $x10
%2:_(s64) = G_ASSERT_ZEXT %1, 8
%0:_(s8) = G_TRUNC %2(s64)
%3:_(s8) = G_ABS %0
%4:_(s64) = G_ZEXT %3(s8)
$x10 = COPY %4(s64)
PseudoRET implicit $x10
...
---
name: abs_i16
body: |
bb.0.entry:
; RV64I-LABEL: name: abs_i16
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64I-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 16
; RV64I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 15
; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_SEXT]](s64)
; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[TRUNC]], [[C]](s32)
; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s32) = COPY [[TRUNC]](s32)
; RV64I-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[COPY1]], [[ASHR]]
; RV64I-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[ADD]], [[ASHR]]
; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[XOR]](s32)
; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
; RV64I-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[ANYEXT]], [[C1]](s64)
; RV64I-NEXT: [[ASHR1:%[0-9]+]]:_(s64) = G_ASHR [[SHL]], [[C1]](s64)
; RV64I-NEXT: $x10 = COPY [[ASHR1]](s64)
; RV64I-NEXT: PseudoRET implicit $x10
;
; RV64ZBB-LABEL: name: abs_i16
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 16
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[C]], [[ASSERT_SEXT]]
; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASSERT_SEXT]], [[SUB]]
; RV64ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SMAX]], 16
; RV64ZBB-NEXT: $x10 = COPY [[SEXT_INREG]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%1:_(s64) = COPY $x10
%2:_(s64) = G_ASSERT_SEXT %1, 16
%0:_(s16) = G_TRUNC %2(s64)
%3:_(s16) = G_ABS %0
%4:_(s64) = G_SEXT %3(s16)
$x10 = COPY %4(s64)
PseudoRET implicit $x10
...
---
name: abs_i32
body: |
bb.0.entry:
; RV64I-LABEL: name: abs_i32
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64I-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32
; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_SEXT]](s64)
; RV64I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s32) = G_ASHR [[TRUNC]], [[C]](s32)
; RV64I-NEXT: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[TRUNC]], [[ASHR]]
; RV64I-NEXT: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[ADD]], [[ASHR]]
; RV64I-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[XOR]](s32)
; RV64I-NEXT: $x10 = COPY [[SEXT]](s64)
; RV64I-NEXT: PseudoRET implicit $x10
;
; RV64ZBB-LABEL: name: abs_i32
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[ASSERT_SEXT:%[0-9]+]]:_(s64) = G_ASSERT_SEXT [[COPY]], 32
; RV64ZBB-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_SEXT]](s64)
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s32) = G_SUB [[C]], [[TRUNC]]
; RV64ZBB-NEXT: [[SEXT:%[0-9]+]]:_(s64) = G_SEXT [[SUB]](s32)
; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[ASSERT_SEXT]], [[SEXT]]
; RV64ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[SMAX]], 32
; RV64ZBB-NEXT: $x10 = COPY [[SEXT_INREG]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%1:_(s64) = COPY $x10
%2:_(s64) = G_ASSERT_SEXT %1, 32
%0:_(s32) = G_TRUNC %2(s64)
%3:_(s32) = G_ABS %0
%4:_(s64) = G_SEXT %3(s32)
$x10 = COPY %4(s64)
PseudoRET implicit $x10
...
---
name: abs_i64
body: |
bb.0.entry:
; RV64I-LABEL: name: abs_i64
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 63
; RV64I-NEXT: [[ASHR:%[0-9]+]]:_(s64) = G_ASHR [[COPY]], [[C]](s64)
; RV64I-NEXT: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[COPY]], [[ASHR]]
; RV64I-NEXT: [[XOR:%[0-9]+]]:_(s64) = G_XOR [[ADD]], [[ASHR]]
; RV64I-NEXT: $x10 = COPY [[XOR]](s64)
; RV64I-NEXT: PseudoRET implicit $x10
;
; RV64ZBB-LABEL: name: abs_i64
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
; RV64ZBB-NEXT: [[SUB:%[0-9]+]]:_(s64) = G_SUB [[C]], [[COPY]]
; RV64ZBB-NEXT: [[SMAX:%[0-9]+]]:_(s64) = G_SMAX [[COPY]], [[SUB]]
; RV64ZBB-NEXT: $x10 = COPY [[SMAX]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%0:_(s64) = COPY $x10
%1:_(s64) = G_ABS %0
$x10 = COPY %1(s64)
PseudoRET implicit $x10
...