llvm/llvm/test/CodeGen/AArch64/GlobalISel/legalize-cttz.mir

# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple=aarch64 -global-isel -verify-machineinstrs -run-pass=legalizer %s -o - | FileCheck %s
# RUN: llc -mtriple=aarch64 -global-isel -verify-machineinstrs -mattr=+cssc -run-pass=legalizer %s -o - | FileCheck %s --check-prefix=CHECK-CSSC

...
---
name:            s8
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $w0
    ; CHECK-LABEL: name: s8
    ; CHECK: liveins: $w0
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 256
    ; CHECK-NEXT: [[OR:%[0-9]+]]:_(s32) = G_OR [[DEF]], [[C]]
    ; CHECK-NEXT: [[BITREVERSE:%[0-9]+]]:_(s32) = G_BITREVERSE [[OR]]
    ; CHECK-NEXT: [[CTLZ:%[0-9]+]]:_(s32) = G_CTLZ [[BITREVERSE]](s32)
    ; CHECK-NEXT: $w0 = COPY [[CTLZ]](s32)
    ; CHECK-NEXT: RET_ReallyLR implicit $w0
    ;
    ; CHECK-CSSC-LABEL: name: s8
    ; CHECK-CSSC: liveins: $w0
    ; CHECK-CSSC-NEXT: {{  $}}
    ; CHECK-CSSC-NEXT: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
    ; CHECK-CSSC-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 256
    ; CHECK-CSSC-NEXT: [[OR:%[0-9]+]]:_(s32) = G_OR [[DEF]], [[C]]
    ; CHECK-CSSC-NEXT: [[CTTZ:%[0-9]+]]:_(s32) = G_CTTZ [[OR]](s32)
    ; CHECK-CSSC-NEXT: $w0 = COPY [[CTTZ]](s32)
    ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $w0
    %val:_(s8) = G_IMPLICIT_DEF
    %cttz:_(s8) = G_CTTZ %val(s8)
    %ext:_(s32) = G_ANYEXT %cttz(s8)
    $w0 = COPY %ext(s32)
    RET_ReallyLR implicit $w0

...
---
name:            s16
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $w0
    ; CHECK-LABEL: name: s16
    ; CHECK: liveins: $w0
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65536
    ; CHECK-NEXT: [[OR:%[0-9]+]]:_(s32) = G_OR [[DEF]], [[C]]
    ; CHECK-NEXT: [[BITREVERSE:%[0-9]+]]:_(s32) = G_BITREVERSE [[OR]]
    ; CHECK-NEXT: [[CTLZ:%[0-9]+]]:_(s32) = G_CTLZ [[BITREVERSE]](s32)
    ; CHECK-NEXT: $w0 = COPY [[CTLZ]](s32)
    ; CHECK-NEXT: RET_ReallyLR implicit $w0
    ;
    ; CHECK-CSSC-LABEL: name: s16
    ; CHECK-CSSC: liveins: $w0
    ; CHECK-CSSC-NEXT: {{  $}}
    ; CHECK-CSSC-NEXT: [[DEF:%[0-9]+]]:_(s32) = G_IMPLICIT_DEF
    ; CHECK-CSSC-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65536
    ; CHECK-CSSC-NEXT: [[OR:%[0-9]+]]:_(s32) = G_OR [[DEF]], [[C]]
    ; CHECK-CSSC-NEXT: [[CTTZ:%[0-9]+]]:_(s32) = G_CTTZ [[OR]](s32)
    ; CHECK-CSSC-NEXT: $w0 = COPY [[CTTZ]](s32)
    ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $w0
    %val:_(s16) = G_IMPLICIT_DEF
    %cttz:_(s16) = G_CTTZ %val(s16)
    %ext:_(s32) = G_ANYEXT %cttz(s16)
    $w0 = COPY %ext(s32)
    RET_ReallyLR implicit $w0

...
---
name:            s32
alignment:       4
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $w0

    ; CHECK-LABEL: name: s32
    ; CHECK: liveins: $w0
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: %val:_(s32) = COPY $w0
    ; CHECK-NEXT: [[BITREVERSE:%[0-9]+]]:_(s32) = G_BITREVERSE %val
    ; CHECK-NEXT: [[CTLZ:%[0-9]+]]:_(s32) = G_CTLZ [[BITREVERSE]](s32)
    ; CHECK-NEXT: $w0 = COPY [[CTLZ]](s32)
    ; CHECK-NEXT: RET_ReallyLR implicit $w0
    ;
    ; CHECK-CSSC-LABEL: name: s32
    ; CHECK-CSSC: liveins: $w0
    ; CHECK-CSSC-NEXT: {{  $}}
    ; CHECK-CSSC-NEXT: %val:_(s32) = COPY $w0
    ; CHECK-CSSC-NEXT: [[CTTZ:%[0-9]+]]:_(s32) = G_CTTZ %val(s32)
    ; CHECK-CSSC-NEXT: $w0 = COPY [[CTTZ]](s32)
    ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $w0
    %val:_(s32) = COPY $w0
    %1:_(s32) = G_CTTZ %val(s32)
    $w0 = COPY %1(s32)
    RET_ReallyLR implicit $w0

...
---
name:            s64
alignment:       4
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $x0

    ; CHECK-LABEL: name: s64
    ; CHECK: liveins: $x0
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: %val:_(s64) = COPY $x0
    ; CHECK-NEXT: [[BITREVERSE:%[0-9]+]]:_(s64) = G_BITREVERSE %val
    ; CHECK-NEXT: [[CTLZ:%[0-9]+]]:_(s64) = G_CTLZ [[BITREVERSE]](s64)
    ; CHECK-NEXT: $x0 = COPY [[CTLZ]](s64)
    ; CHECK-NEXT: RET_ReallyLR implicit $x0
    ;
    ; CHECK-CSSC-LABEL: name: s64
    ; CHECK-CSSC: liveins: $x0
    ; CHECK-CSSC-NEXT: {{  $}}
    ; CHECK-CSSC-NEXT: %val:_(s64) = COPY $x0
    ; CHECK-CSSC-NEXT: [[CTTZ:%[0-9]+]]:_(s64) = G_CTTZ %val(s64)
    ; CHECK-CSSC-NEXT: $x0 = COPY [[CTTZ]](s64)
    ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $x0
    %val:_(s64) = COPY $x0
    %1:_(s64) = G_CTTZ %val(s64)
    $x0 = COPY %1(s64)
    RET_ReallyLR implicit $x0
...
---
name:            v4s32
alignment:       4
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $q0

    ; CHECK-LABEL: name: v4s32
    ; CHECK: liveins: $q0
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: %val:_(<4 x s32>) = COPY $q0
    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
    ; CHECK-NEXT: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[C]](s32), [[C]](s32), [[C]](s32), [[C]](s32)
    ; CHECK-NEXT: [[XOR:%[0-9]+]]:_(<4 x s32>) = G_XOR %val, [[BUILD_VECTOR]]
    ; CHECK-NEXT: [[ADD:%[0-9]+]]:_(<4 x s32>) = G_ADD %val, [[BUILD_VECTOR]]
    ; CHECK-NEXT: [[AND:%[0-9]+]]:_(<4 x s32>) = G_AND [[XOR]], [[ADD]]
    ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<16 x s8>) = G_BITCAST [[AND]](<4 x s32>)
    ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<16 x s8>) = G_CTPOP [[BITCAST]](<16 x s8>)
    ; CHECK-NEXT: [[UADDLP:%[0-9]+]]:_(<8 x s16>) = G_UADDLP [[CTPOP]]
    ; CHECK-NEXT: [[UADDLP1:%[0-9]+]]:_(<4 x s32>) = G_UADDLP [[UADDLP]]
    ; CHECK-NEXT: $q0 = COPY [[UADDLP1]](<4 x s32>)
    ; CHECK-NEXT: RET_ReallyLR implicit $q0
    ;
    ; CHECK-CSSC-LABEL: name: v4s32
    ; CHECK-CSSC: liveins: $q0
    ; CHECK-CSSC-NEXT: {{  $}}
    ; CHECK-CSSC-NEXT: %val:_(<4 x s32>) = COPY $q0
    ; CHECK-CSSC-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
    ; CHECK-CSSC-NEXT: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[C]](s32), [[C]](s32), [[C]](s32), [[C]](s32)
    ; CHECK-CSSC-NEXT: [[XOR:%[0-9]+]]:_(<4 x s32>) = G_XOR %val, [[BUILD_VECTOR]]
    ; CHECK-CSSC-NEXT: [[ADD:%[0-9]+]]:_(<4 x s32>) = G_ADD %val, [[BUILD_VECTOR]]
    ; CHECK-CSSC-NEXT: [[AND:%[0-9]+]]:_(<4 x s32>) = G_AND [[XOR]], [[ADD]]
    ; CHECK-CSSC-NEXT: [[BITCAST:%[0-9]+]]:_(<16 x s8>) = G_BITCAST [[AND]](<4 x s32>)
    ; CHECK-CSSC-NEXT: [[CTPOP:%[0-9]+]]:_(<16 x s8>) = G_CTPOP [[BITCAST]](<16 x s8>)
    ; CHECK-CSSC-NEXT: [[UADDLP:%[0-9]+]]:_(<8 x s16>) = G_UADDLP [[CTPOP]]
    ; CHECK-CSSC-NEXT: [[UADDLP1:%[0-9]+]]:_(<4 x s32>) = G_UADDLP [[UADDLP]]
    ; CHECK-CSSC-NEXT: $q0 = COPY [[UADDLP1]](<4 x s32>)
    ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $q0
    %val:_(<4 x s32>) = COPY $q0
    %1:_(<4 x s32>) = G_CTTZ %val(<4 x s32>)
    $q0 = COPY %1(<4 x s32>)
    RET_ReallyLR implicit $q0

...
---
name:            s35
alignment:       4
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; CHECK-LABEL: name: s35
    ; CHECK: liveins: $x0
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 34359738368
    ; CHECK-NEXT: [[OR:%[0-9]+]]:_(s64) = G_OR [[COPY]], [[C]]
    ; CHECK-NEXT: [[BITREVERSE:%[0-9]+]]:_(s64) = G_BITREVERSE [[OR]]
    ; CHECK-NEXT: [[CTLZ:%[0-9]+]]:_(s64) = G_CTLZ [[BITREVERSE]](s64)
    ; CHECK-NEXT: $x0 = COPY [[CTLZ]](s64)
    ; CHECK-NEXT: RET_ReallyLR implicit $x0
    ;
    ; CHECK-CSSC-LABEL: name: s35
    ; CHECK-CSSC: liveins: $x0
    ; CHECK-CSSC-NEXT: {{  $}}
    ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK-CSSC-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 34359738368
    ; CHECK-CSSC-NEXT: [[OR:%[0-9]+]]:_(s64) = G_OR [[COPY]], [[C]]
    ; CHECK-CSSC-NEXT: [[CTTZ:%[0-9]+]]:_(s64) = G_CTTZ [[OR]](s64)
    ; CHECK-CSSC-NEXT: $x0 = COPY [[CTTZ]](s64)
    ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $x0
    %1:_(s64) = COPY $x0
    %0:_(s35) = G_TRUNC %1(s64)
    %2:_(s35) = G_CTTZ %0(s35)
    %3:_(s64) = G_ANYEXT %2(s35)
    $x0 = COPY %3(s64)
    RET_ReallyLR implicit $x0
...
---
name:            s65
alignment:       4
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0, $x1

    ; CHECK-LABEL: name: s65
    ; CHECK: liveins: $x0, $x1
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
    ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
    ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 2
    ; CHECK-NEXT: [[OR:%[0-9]+]]:_(s64) = G_OR [[COPY]], [[C]]
    ; CHECK-NEXT: [[OR1:%[0-9]+]]:_(s64) = G_OR [[COPY1]], [[C1]]
    ; CHECK-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(eq), [[OR]](s64), [[C]]
    ; CHECK-NEXT: [[BITREVERSE:%[0-9]+]]:_(s64) = G_BITREVERSE [[OR1]]
    ; CHECK-NEXT: [[CTLZ:%[0-9]+]]:_(s64) = G_CTLZ [[BITREVERSE]](s64)
    ; CHECK-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 64
    ; CHECK-NEXT: [[UADDO:%[0-9]+]]:_(s64), [[UADDO1:%[0-9]+]]:_(s32) = G_UADDO [[CTLZ]], [[C2]]
    ; CHECK-NEXT: [[UADDE:%[0-9]+]]:_(s64), [[UADDE1:%[0-9]+]]:_(s32) = G_UADDE [[C]], [[C]], [[UADDO1]]
    ; CHECK-NEXT: [[BITREVERSE1:%[0-9]+]]:_(s64) = G_BITREVERSE [[OR]]
    ; CHECK-NEXT: [[CTLZ1:%[0-9]+]]:_(s64) = G_CTLZ [[BITREVERSE1]](s64)
    ; CHECK-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s32), [[UADDO]], [[CTLZ1]]
    ; CHECK-NEXT: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s32), [[UADDE]], [[C]]
    ; CHECK-NEXT: $x0 = COPY [[SELECT]](s64)
    ; CHECK-NEXT: $x1 = COPY [[SELECT1]](s64)
    ; CHECK-NEXT: RET_ReallyLR implicit $x0, implicit $x1
    ;
    ; CHECK-CSSC-LABEL: name: s65
    ; CHECK-CSSC: liveins: $x0, $x1
    ; CHECK-CSSC-NEXT: {{  $}}
    ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK-CSSC-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
    ; CHECK-CSSC-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
    ; CHECK-CSSC-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 2
    ; CHECK-CSSC-NEXT: [[OR:%[0-9]+]]:_(s64) = G_OR [[COPY]], [[C]]
    ; CHECK-CSSC-NEXT: [[OR1:%[0-9]+]]:_(s64) = G_OR [[COPY1]], [[C1]]
    ; CHECK-CSSC-NEXT: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(eq), [[OR]](s64), [[C]]
    ; CHECK-CSSC-NEXT: [[CTTZ:%[0-9]+]]:_(s64) = G_CTTZ [[OR1]](s64)
    ; CHECK-CSSC-NEXT: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 64
    ; CHECK-CSSC-NEXT: [[UADDO:%[0-9]+]]:_(s64), [[UADDO1:%[0-9]+]]:_(s32) = G_UADDO [[CTTZ]], [[C2]]
    ; CHECK-CSSC-NEXT: [[UADDE:%[0-9]+]]:_(s64), [[UADDE1:%[0-9]+]]:_(s32) = G_UADDE [[C]], [[C]], [[UADDO1]]
    ; CHECK-CSSC-NEXT: [[CTTZ1:%[0-9]+]]:_(s64) = G_CTTZ [[OR]](s64)
    ; CHECK-CSSC-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s32), [[UADDO]], [[CTTZ1]]
    ; CHECK-CSSC-NEXT: [[SELECT1:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s32), [[UADDE]], [[C]]
    ; CHECK-CSSC-NEXT: $x0 = COPY [[SELECT]](s64)
    ; CHECK-CSSC-NEXT: $x1 = COPY [[SELECT1]](s64)
    ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $x0, implicit $x1
    %1:_(s64) = COPY $x0
    %2:_(s64) = COPY $x1
    %3:_(s128) = G_MERGE_VALUES %1(s64), %2(s64)
    %0:_(s65) = G_TRUNC %3(s128)
    %4:_(s65) = G_CTTZ %0(s65)
    %7:_(s128) = G_ANYEXT %4(s65)
    %5:_(s64), %6:_(s64) = G_UNMERGE_VALUES %7(s128)
    $x0 = COPY %5(s64)
    $x1 = COPY %6(s64)
    RET_ReallyLR implicit $x0, implicit $x1
...