# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - \
# RUN: | FileCheck %s --check-prefix=RV64I
# RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=legalizer %s -o - \
# RUN: | FileCheck %s --check-prefix=RV64ZBB_OR_RV64ZBKB
# RUN: llc -mtriple=riscv64 -mattr=+zbkb -run-pass=legalizer %s -o - \
# RUN: | FileCheck %s --check-prefix=RV64ZBB_OR_RV64ZBKB
---
name: bswap_i16
body: |
bb.0:
liveins: $x10
; RV64I-LABEL: name: bswap_i16
; RV64I: liveins: $x10
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64I-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 16
; RV64I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_ZEXT]](s64)
; RV64I-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[TRUNC]], [[C]](s32)
; RV64I-NEXT: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[TRUNC]], [[C]](s32)
; RV64I-NEXT: [[OR:%[0-9]+]]:_(s32) = G_OR [[LSHR]], [[SHL]]
; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[OR]](s32)
; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ANYEXT]], [[C1]]
; RV64I-NEXT: $x10 = COPY [[AND]](s64)
; RV64I-NEXT: PseudoRET implicit $x10
;
; RV64ZBB_OR_RV64ZBKB-LABEL: name: bswap_i16
; RV64ZBB_OR_RV64ZBKB: liveins: $x10
; RV64ZBB_OR_RV64ZBKB-NEXT: {{ $}}
; RV64ZBB_OR_RV64ZBKB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB_OR_RV64ZBKB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 16
; RV64ZBB_OR_RV64ZBKB-NEXT: [[BSWAP:%[0-9]+]]:_(s64) = G_BSWAP [[ASSERT_ZEXT]]
; RV64ZBB_OR_RV64ZBKB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 48
; RV64ZBB_OR_RV64ZBKB-NEXT: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[BSWAP]], [[C]](s64)
; RV64ZBB_OR_RV64ZBKB-NEXT: $x10 = COPY [[LSHR]](s64)
; RV64ZBB_OR_RV64ZBKB-NEXT: PseudoRET implicit $x10
%0:_(s64) = COPY $x10
%1:_(s64) = G_ASSERT_ZEXT %0, 16
%2:_(s16) = G_TRUNC %1(s64)
%3:_(s16) = G_BSWAP %2
%4:_(s64) = G_ZEXT %3(s16)
$x10 = COPY %4(s64)
PseudoRET implicit $x10
...
---
name: bswap_i32
body: |
bb.0:
liveins: $x10
; RV64I-LABEL: name: bswap_i32
; RV64I: liveins: $x10
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64I-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 32
; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[ASSERT_ZEXT]](s64)
; RV64I-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 24
; RV64I-NEXT: [[SHL:%[0-9]+]]:_(s32) = G_SHL [[TRUNC]], [[C]](s32)
; RV64I-NEXT: [[LSHR:%[0-9]+]]:_(s32) = G_LSHR [[TRUNC]], [[C]](s32)
; RV64I-NEXT: [[OR:%[0-9]+]]:_(s32) = G_OR [[LSHR]], [[SHL]]
; RV64I-NEXT: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 65280
; RV64I-NEXT: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 8
; RV64I-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND [[TRUNC]], [[C1]]
; RV64I-NEXT: [[SHL1:%[0-9]+]]:_(s32) = G_SHL [[AND]], [[C2]](s32)
; RV64I-NEXT: [[OR1:%[0-9]+]]:_(s32) = G_OR [[OR]], [[SHL1]]
; RV64I-NEXT: [[LSHR1:%[0-9]+]]:_(s32) = G_LSHR [[TRUNC]], [[C2]](s32)
; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s32) = G_AND [[LSHR1]], [[C1]]
; RV64I-NEXT: [[OR2:%[0-9]+]]:_(s32) = G_OR [[OR1]], [[AND1]]
; RV64I-NEXT: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[OR2]](s32)
; RV64I-NEXT: $x10 = COPY [[ZEXT]](s64)
; RV64I-NEXT: PseudoRET implicit $x10
;
; RV64ZBB_OR_RV64ZBKB-LABEL: name: bswap_i32
; RV64ZBB_OR_RV64ZBKB: liveins: $x10
; RV64ZBB_OR_RV64ZBKB-NEXT: {{ $}}
; RV64ZBB_OR_RV64ZBKB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB_OR_RV64ZBKB-NEXT: [[ASSERT_ZEXT:%[0-9]+]]:_(s64) = G_ASSERT_ZEXT [[COPY]], 32
; RV64ZBB_OR_RV64ZBKB-NEXT: [[BSWAP:%[0-9]+]]:_(s64) = G_BSWAP [[ASSERT_ZEXT]]
; RV64ZBB_OR_RV64ZBKB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 32
; RV64ZBB_OR_RV64ZBKB-NEXT: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[BSWAP]], [[C]](s64)
; RV64ZBB_OR_RV64ZBKB-NEXT: $x10 = COPY [[LSHR]](s64)
; RV64ZBB_OR_RV64ZBKB-NEXT: PseudoRET implicit $x10
%0:_(s64) = COPY $x10
%1:_(s64) = G_ASSERT_ZEXT %0, 32
%2:_(s32) = G_TRUNC %1(s64)
%3:_(s32) = G_BSWAP %2
%4:_(s64) = G_ZEXT %3(s32)
$x10 = COPY %4(s64)
PseudoRET implicit $x10
...
---
name: bswap_i64
body: |
bb.0:
liveins: $x10
; RV64I-LABEL: name: bswap_i64
; RV64I: liveins: $x10
; RV64I-NEXT: {{ $}}
; RV64I-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 56
; RV64I-NEXT: [[SHL:%[0-9]+]]:_(s64) = G_SHL [[COPY]], [[C]](s64)
; RV64I-NEXT: [[LSHR:%[0-9]+]]:_(s64) = G_LSHR [[COPY]], [[C]](s64)
; RV64I-NEXT: [[OR:%[0-9]+]]:_(s64) = G_OR [[LSHR]], [[SHL]]
; RV64I-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 65280
; RV64I-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 40
; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C1]]
; RV64I-NEXT: [[SHL1:%[0-9]+]]:_(s64) = G_SHL [[AND]], [[C2]](s64)
; RV64I-NEXT: [[OR1:%[0-9]+]]:_(s64) = G_OR [[OR]], [[SHL1]]
; RV64I-NEXT: [[LSHR1:%[0-9]+]]:_(s64) = G_LSHR [[COPY]], [[C2]](s64)
; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[LSHR1]], [[C1]]
; RV64I-NEXT: [[OR2:%[0-9]+]]:_(s64) = G_OR [[OR1]], [[AND1]]
; RV64I-NEXT: [[C3:%[0-9]+]]:_(s64) = G_CONSTANT i64 16711680
; RV64I-NEXT: [[C4:%[0-9]+]]:_(s64) = G_CONSTANT i64 24
; RV64I-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C3]]
; RV64I-NEXT: [[SHL2:%[0-9]+]]:_(s64) = G_SHL [[AND2]], [[C4]](s64)
; RV64I-NEXT: [[OR3:%[0-9]+]]:_(s64) = G_OR [[OR2]], [[SHL2]]
; RV64I-NEXT: [[LSHR2:%[0-9]+]]:_(s64) = G_LSHR [[COPY]], [[C4]](s64)
; RV64I-NEXT: [[AND3:%[0-9]+]]:_(s64) = G_AND [[LSHR2]], [[C3]]
; RV64I-NEXT: [[OR4:%[0-9]+]]:_(s64) = G_OR [[OR3]], [[AND3]]
; RV64I-NEXT: [[C5:%[0-9]+]]:_(s64) = G_CONSTANT i64 -16777216
; RV64I-NEXT: [[C6:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
; RV64I-NEXT: [[AND4:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C5]]
; RV64I-NEXT: [[SHL3:%[0-9]+]]:_(s64) = G_SHL [[AND4]], [[C6]](s64)
; RV64I-NEXT: [[OR5:%[0-9]+]]:_(s64) = G_OR [[OR4]], [[SHL3]]
; RV64I-NEXT: [[LSHR3:%[0-9]+]]:_(s64) = G_LSHR [[COPY]], [[C6]](s64)
; RV64I-NEXT: [[AND5:%[0-9]+]]:_(s64) = G_AND [[LSHR3]], [[C5]]
; RV64I-NEXT: [[OR6:%[0-9]+]]:_(s64) = G_OR [[OR5]], [[AND5]]
; RV64I-NEXT: $x10 = COPY [[OR6]](s64)
; RV64I-NEXT: PseudoRET implicit $x10
;
; RV64ZBB_OR_RV64ZBKB-LABEL: name: bswap_i64
; RV64ZBB_OR_RV64ZBKB: liveins: $x10
; RV64ZBB_OR_RV64ZBKB-NEXT: {{ $}}
; RV64ZBB_OR_RV64ZBKB-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
; RV64ZBB_OR_RV64ZBKB-NEXT: [[BSWAP:%[0-9]+]]:_(s64) = G_BSWAP [[COPY]]
; RV64ZBB_OR_RV64ZBKB-NEXT: $x10 = COPY [[BSWAP]](s64)
; RV64ZBB_OR_RV64ZBKB-NEXT: PseudoRET implicit $x10
%0:_(s64) = COPY $x10
%1:_(s64) = G_BSWAP %0
$x10 = COPY %1(s64)
PseudoRET implicit $x10
...