llvm/llvm/test/CodeGen/AArch64/GlobalISel/prelegalizercombiner-invert-cmp.mir

# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple aarch64-apple-ios  -run-pass=aarch64-prelegalizer-combiner --aarch64prelegalizercombiner-only-enable-rule="not_cmp_fold" %s -o - -verify-machineinstrs | FileCheck %s

# Need asserts for the only-enable-rule to work.

# REQUIRES: asserts
# Check that we fold an compare result inverted into just inverting the condition code.
---
name: icmp
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; CHECK-LABEL: name: icmp
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
    ; CHECK: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(sle), [[COPY]](s64), [[C]]
    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[ICMP]](s1)
    ; CHECK: $w0 = COPY [[ANYEXT]](s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %0:_(s64) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 1
    %2:_(s1) = G_CONSTANT i1 1
    %3:_(s1) = G_ICMP intpred(sgt), %0(s64), %1
    %4:_(s1) = G_XOR %3, %2
    %5:_(s32) = G_ANYEXT %4
    $w0 = COPY %5(s32)
    RET_ReallyLR implicit $w0
...
---
name: fcmp
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; CHECK-LABEL: name: fcmp
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
    ; CHECK: [[FCMP:%[0-9]+]]:_(s1) = G_FCMP floatpred(ule), [[COPY]](s64), [[C]]
    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FCMP]](s1)
    ; CHECK: $w0 = COPY [[ANYEXT]](s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %0:_(s64) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 1
    %2:_(s1) = G_CONSTANT i1 1
    %3:_(s1) = G_FCMP floatpred(ogt), %0(s64), %1
    %4:_(s1) = G_XOR %3, %2
    %5:_(s32) = G_ANYEXT %4
    $w0 = COPY %5(s32)
    RET_ReallyLR implicit $w0
...
---
name: icmp_not_xor_with_1
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; CHECK-LABEL: name: icmp_not_xor_with_1
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
    ; CHECK: [[C1:%[0-9]+]]:_(s1) = G_CONSTANT i1 false
    ; CHECK: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(sgt), [[COPY]](s64), [[C]]
    ; CHECK: [[XOR:%[0-9]+]]:_(s1) = G_XOR [[ICMP]], [[C1]]
    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[XOR]](s1)
    ; CHECK: $w0 = COPY [[ANYEXT]](s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %0:_(s64) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 1
    %2:_(s1) = G_CONSTANT i1 0
    %3:_(s1) = G_ICMP intpred(sgt), %0(s64), %1
    %4:_(s1) = G_XOR %3, %2
    %5:_(s32) = G_ANYEXT %4
    $w0 = COPY %5(s32)
    RET_ReallyLR implicit $w0
...
---
name: icmp_not_xor_with_wrong_bool_contents
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; Even though bit 0 of the constant is 1, we require zero in the upper bits
    ; for our aarch64's zero-or-one boolean contents.
    ; CHECK-LABEL: name: icmp_not_xor_with_wrong_bool_contents
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
    ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 7
    ; CHECK: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), [[COPY]](s64), [[C]]
    ; CHECK: [[XOR:%[0-9]+]]:_(s32) = G_XOR [[ICMP]], [[C1]]
    ; CHECK: $w0 = COPY [[XOR]](s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %0:_(s64) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 1
    %2:_(s32) = G_CONSTANT i32 7
    %3:_(s32) = G_ICMP intpred(sgt), %0(s64), %1
    %4:_(s32) = G_XOR %3, %2
    $w0 = COPY %4(s32)
    RET_ReallyLR implicit $w0
...
---
name: icmp_multiple_use
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; CHECK-LABEL: name: icmp_multiple_use
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
    ; CHECK: [[C1:%[0-9]+]]:_(s1) = G_CONSTANT i1 true
    ; CHECK: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(sgt), [[COPY]](s64), [[C]]
    ; CHECK: [[XOR:%[0-9]+]]:_(s1) = G_XOR [[ICMP]], [[C1]]
    ; CHECK: %other_use:_(s1) = G_AND [[ICMP]], [[C1]]
    ; CHECK: %other_use_ext:_(s32) = G_ANYEXT %other_use(s1)
    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[XOR]](s1)
    ; CHECK: $w0 = COPY [[ANYEXT]](s32)
    ; CHECK: $w1 = COPY %other_use_ext(s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %0:_(s64) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 1
    %2:_(s1) = G_CONSTANT i1 1
    %3:_(s1) = G_ICMP intpred(sgt), %0(s64), %1
    %4:_(s1) = G_XOR %3, %2
    %other_use:_(s1) = G_AND %3, %2
    %other_use_ext:_(s32) = G_ANYEXT %other_use(s1)
    %5:_(s32) = G_ANYEXT %4
    $w0 = COPY %5(s32)
    $w1 = COPY %other_use_ext
    RET_ReallyLR implicit $w0
...
---
name: icmp_vector
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $q0

    ; CHECK-LABEL: name: icmp_vector
    ; CHECK: liveins: $q0
    ; CHECK: [[COPY:%[0-9]+]]:_(<4 x s32>) = COPY $q0
    ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 5
    ; CHECK: %splat_op2:_(<4 x s32>) = G_BUILD_VECTOR [[C]](s32), [[C]](s32), [[C]](s32), [[C]](s32)
    ; CHECK: [[ICMP:%[0-9]+]]:_(<4 x s1>) = G_ICMP intpred(sle), [[COPY]](<4 x s32>), %splat_op2
    ; CHECK: [[ANYEXT:%[0-9]+]]:_(<4 x s32>) = G_ANYEXT [[ICMP]](<4 x s1>)
    ; CHECK: $q0 = COPY [[ANYEXT]](<4 x s32>)
    ; CHECK: RET_ReallyLR implicit $q0
    %0:_(<4 x s32>) = COPY $q0
    %1:_(s32) = G_CONSTANT i32 5
    %splat_op2:_(<4 x s32>) = G_BUILD_VECTOR %1, %1, %1, %1
    %2:_(s1) = G_CONSTANT i1 1
    %splat_true:_(<4 x s1>) = G_BUILD_VECTOR %2, %2, %2, %2
    %3:_(<4 x s1>) = G_ICMP intpred(sgt), %0(<4 x s32>), %splat_op2
    %4:_(<4 x s1>) = G_XOR %3, %splat_true
    %5:_(<4 x s32>) = G_ANYEXT %4
    $q0 = COPY %5(<4 x s32>)
    RET_ReallyLR implicit $q0
...
---
name: icmp_and_icmp
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; CHECK-LABEL: name: icmp_and_icmp
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
    ; CHECK: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(sle), [[COPY]](s64), [[C]]
    ; CHECK: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(ule), [[COPY]](s64), [[C]]
    ; CHECK: [[OR:%[0-9]+]]:_(s1) = G_OR [[ICMP]], [[ICMP1]]
    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[OR]](s1)
    ; CHECK: $w0 = COPY [[ANYEXT]](s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %0:_(s64) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 1
    %2:_(s1) = G_CONSTANT i1 1
    %3:_(s1) = G_ICMP intpred(sgt), %0(s64), %1
    %4:_(s1) = G_ICMP intpred(ugt), %0(s64), %1
    %5:_(s1) = G_AND %3, %4
    %6:_(s1) = G_XOR %5, %2
    %7:_(s32) = G_ANYEXT %6
    $w0 = COPY %7(s32)
    RET_ReallyLR implicit $w0
...
---
name: icmp_or_icmp
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; CHECK-LABEL: name: icmp_or_icmp
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
    ; CHECK: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(sle), [[COPY]](s64), [[C]]
    ; CHECK: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(ule), [[COPY]](s64), [[C]]
    ; CHECK: [[AND:%[0-9]+]]:_(s1) = G_AND [[ICMP]], [[ICMP1]]
    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[AND]](s1)
    ; CHECK: $w0 = COPY [[ANYEXT]](s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %0:_(s64) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 1
    %2:_(s1) = G_CONSTANT i1 1
    %3:_(s1) = G_ICMP intpred(sgt), %0(s64), %1
    %4:_(s1) = G_ICMP intpred(ugt), %0(s64), %1
    %5:_(s1) = G_OR %3, %4
    %6:_(s1) = G_XOR %5, %2
    %7:_(s32) = G_ANYEXT %6
    $w0 = COPY %7(s32)
    RET_ReallyLR implicit $w0
...
---
name: icmp_and_icmp_or_icmp
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; CHECK-LABEL: name: icmp_and_icmp_or_icmp
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
    ; CHECK: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(sle), [[COPY]](s64), [[C]]
    ; CHECK: [[ICMP1:%[0-9]+]]:_(s1) = G_ICMP intpred(ule), [[COPY]](s64), [[C]]
    ; CHECK: [[OR:%[0-9]+]]:_(s1) = G_OR [[ICMP]], [[ICMP1]]
    ; CHECK: [[ICMP2:%[0-9]+]]:_(s1) = G_ICMP intpred(eq), [[COPY]](s64), [[C]]
    ; CHECK: [[AND:%[0-9]+]]:_(s1) = G_AND [[OR]], [[ICMP2]]
    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[AND]](s1)
    ; CHECK: $w0 = COPY [[ANYEXT]](s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %0:_(s64) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 1
    %2:_(s1) = G_CONSTANT i1 1
    %3:_(s1) = G_ICMP intpred(sgt), %0(s64), %1
    %4:_(s1) = G_ICMP intpred(ugt), %0(s64), %1
    %5:_(s1) = G_AND %3, %4
    %6:_(s1) = G_ICMP intpred(ne), %0(s64), %1
    %7:_(s1) = G_OR %5, %6
    %8:_(s1) = G_XOR %7, %2
    %9:_(s32) = G_ANYEXT %8
    $w0 = COPY %9(s32)
    RET_ReallyLR implicit $w0
...
---
name: icmp_and_trunc
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0

    ; CHECK-LABEL: name: icmp_and_trunc
    ; CHECK: liveins: $x0
    ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 1
    ; CHECK: [[C1:%[0-9]+]]:_(s1) = G_CONSTANT i1 true
    ; CHECK: [[ICMP:%[0-9]+]]:_(s1) = G_ICMP intpred(sgt), [[COPY]](s64), [[C]]
    ; CHECK: [[TRUNC:%[0-9]+]]:_(s1) = G_TRUNC [[COPY]](s64)
    ; CHECK: [[AND:%[0-9]+]]:_(s1) = G_AND [[ICMP]], [[TRUNC]]
    ; CHECK: [[XOR:%[0-9]+]]:_(s1) = G_XOR [[AND]], [[C1]]
    ; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[XOR]](s1)
    ; CHECK: $w0 = COPY [[ANYEXT]](s32)
    ; CHECK: RET_ReallyLR implicit $w0
    %0:_(s64) = COPY $x0
    %1:_(s64) = G_CONSTANT i64 1
    %2:_(s1) = G_CONSTANT i1 1
    %3:_(s1) = G_ICMP intpred(sgt), %0(s64), %1
    %4:_(s1) = G_TRUNC %0(s64)
    %5:_(s1) = G_AND %3, %4
    %6:_(s1) = G_XOR %5, %2
    %7:_(s32) = G_ANYEXT %6
    $w0 = COPY %7(s32)
    RET_ReallyLR implicit $w0
...