# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - | FileCheck %s --check-prefixes=RV64I
# RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=legalizer %s -o - \
# RUN: | FileCheck %s --check-prefixes=RV64ZBB
---
name: umax_i8
body: |
bb.0.entry:
; RV64I-LABEL: name: umax_i8
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[AND]](s64), [[AND1]]
; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64)
; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[COPY1]](s64)
; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
; RV64I-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[ANYEXT]], [[C]]
; RV64I-NEXT: $x10 = COPY [[AND2]](s64)
; RV64I-NEXT: PseudoRET implicit $x10
;
; RV64ZBB-LABEL: name: umax_i8
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
; RV64ZBB-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
; RV64ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s64) = G_UMAX [[AND]], [[AND1]]
; RV64ZBB-NEXT: $x10 = COPY [[UMAX]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%0:_(s64) = COPY $x10
%1:_(s64) = COPY $x11
%2:_(s8) = G_TRUNC %0(s64)
%3:_(s8) = G_TRUNC %1(s64)
%4:_(s8) = G_UMAX %2, %3
%5:_(s64) = G_ZEXT %4(s8)
$x10 = COPY %5(s64)
PseudoRET implicit $x10
...
---
name: umax_i16
body: |
bb.0.entry:
; RV64I-LABEL: name: umax_i16
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[AND]](s64), [[AND1]]
; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64)
; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[COPY1]](s64)
; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
; RV64I-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[ANYEXT]], [[C]]
; RV64I-NEXT: $x10 = COPY [[AND2]](s64)
; RV64I-NEXT: PseudoRET implicit $x10
;
; RV64ZBB-LABEL: name: umax_i16
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
; RV64ZBB-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
; RV64ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s64) = G_UMAX [[AND]], [[AND1]]
; RV64ZBB-NEXT: $x10 = COPY [[UMAX]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%0:_(s64) = COPY $x10
%1:_(s64) = COPY $x11
%2:_(s16) = G_TRUNC %0(s64)
%3:_(s16) = G_TRUNC %1(s64)
%4:_(s16) = G_UMAX %2, %3
%5:_(s64) = G_ZEXT %4(s16)
$x10 = COPY %5(s64)
PseudoRET implicit $x10
...
---
name: umax_i32
body: |
bb.0.entry:
; RV64I-LABEL: name: umax_i32
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64)
; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[COPY1]](s64)
; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[AND]](s64), [[AND1]]
; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
; RV64I-NEXT: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[SELECT]](s32)
; RV64I-NEXT: $x10 = COPY [[ZEXT]](s64)
; RV64I-NEXT: PseudoRET implicit $x10
;
; RV64ZBB-LABEL: name: umax_i32
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
; RV64ZBB-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
; RV64ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s64) = G_UMAX [[AND]], [[AND1]]
; RV64ZBB-NEXT: $x10 = COPY [[UMAX]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%0:_(s64) = COPY $x10
%1:_(s64) = COPY $x11
%2:_(s32) = G_TRUNC %0(s64)
%3:_(s32) = G_TRUNC %1(s64)
%4:_(s32) = G_UMAX %2, %3
%5:_(s64) = G_ZEXT %4(s32)
$x10 = COPY %5(s64)
PseudoRET implicit $x10
...
---
name: umax_i64
body: |
bb.0.entry:
; RV64I-LABEL: name: umax_i64
; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ugt), [[COPY]](s64), [[COPY1]]
; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
; RV64I-NEXT: $x10 = COPY [[SELECT]](s64)
; RV64I-NEXT: PseudoRET implicit $x10
;
; RV64ZBB-LABEL: name: umax_i64
; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
; RV64ZBB-NEXT: [[UMAX:%[0-9]+]]:_(s64) = G_UMAX [[COPY]], [[COPY1]]
; RV64ZBB-NEXT: $x10 = COPY [[UMAX]](s64)
; RV64ZBB-NEXT: PseudoRET implicit $x10
%0:_(s64) = COPY $x10
%1:_(s64) = COPY $x11
%2:_(s64) = G_UMAX %0, %1
$x10 = COPY %2(s64)
PseudoRET implicit $x10
...