llvm/llvm/test/CodeGen/AArch64/GlobalISel/prelegalizercombiner-concat-vectors.mir

# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple aarch64-apple-ios  -run-pass=aarch64-prelegalizer-combiner %s -o - | FileCheck %s

# Check that we canonicalize concat_vectors(build_vector, build_vector)
# into build_vector.
---
name: concat_to_build_vector
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0, $x1, $x2, $x3

    ; CHECK-LABEL: name: concat_to_build_vector
    ; CHECK: liveins: $x0, $x1, $x2, $x3
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
    ; CHECK-NEXT: [[COPY2:%[0-9]+]]:_(s64) = COPY $x2
    ; CHECK-NEXT: [[COPY3:%[0-9]+]]:_(s64) = COPY $x3
    ; CHECK-NEXT: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[COPY]](s64), [[COPY1]](s64), [[COPY2]](s64), [[COPY3]](s64)
    ; CHECK-NEXT: RET_ReallyLR implicit [[BUILD_VECTOR]](<4 x s64>)
    %0:_(s64) = COPY $x0
    %1:_(s64) = COPY $x1
    %2:_(s64) = COPY $x2
    %3:_(s64) = COPY $x3
    %4:_(<2 x s64>) = G_BUILD_VECTOR %0(s64), %1
    %5:_(<2 x s64>) = G_BUILD_VECTOR %2(s64), %3
    %6:_(<4 x s64>) = G_CONCAT_VECTORS %4(<2 x s64>), %5
    RET_ReallyLR implicit %6
...
# Same test as concat_to_build_vector but with pointer types.
---
name: concat_to_build_vector_ptr
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0, $x1, $x2, $x3

    ; CHECK-LABEL: name: concat_to_build_vector_ptr
    ; CHECK: liveins: $x0, $x1, $x2, $x3
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
    ; CHECK-NEXT: [[COPY2:%[0-9]+]]:_(p0) = COPY $x2
    ; CHECK-NEXT: [[COPY3:%[0-9]+]]:_(p0) = COPY $x3
    ; CHECK-NEXT: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x p0>) = G_BUILD_VECTOR [[COPY]](p0), [[COPY1]](p0), [[COPY2]](p0), [[COPY3]](p0)
    ; CHECK-NEXT: RET_ReallyLR implicit [[BUILD_VECTOR]](<4 x p0>)
    %0:_(p0) = COPY $x0
    %1:_(p0) = COPY $x1
    %2:_(p0) = COPY $x2
    %3:_(p0) = COPY $x3
    %4:_(<2 x p0>) = G_BUILD_VECTOR %0(p0), %1
    %5:_(<2 x p0>) = G_BUILD_VECTOR %2(p0), %3
    %6:_(<4 x p0>) = G_CONCAT_VECTORS %4(<2 x p0>), %5
    RET_ReallyLR implicit %6
...
# Check that we canonicalize concat_vectors(undef, undef) into undef.
---
name: concat_to_undef
tracksRegLiveness: true
body:             |
  bb.1:
    ; CHECK-LABEL: name: concat_to_undef
    ; CHECK: [[DEF:%[0-9]+]]:_(<4 x s64>) = G_IMPLICIT_DEF
    ; CHECK-NEXT: RET_ReallyLR implicit [[DEF]](<4 x s64>)
    %4:_(<2 x s64>) = G_IMPLICIT_DEF
    %5:_(<2 x s64>) = G_IMPLICIT_DEF
    %6:_(<4 x s64>) = G_CONCAT_VECTORS %4(<2 x s64>), %5
    RET_ReallyLR implicit %6
...
# Check that when combining concat_vectors(build_vector, undef) into
# build_vector, we correctly break the undef vector into a sequence
# of undef scalar.
---
name: concat_to_build_vector_with_undef
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0, $x1

    ; CHECK-LABEL: name: concat_to_build_vector_with_undef
    ; CHECK: liveins: $x0, $x1
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
    ; CHECK-NEXT: [[DEF:%[0-9]+]]:_(s64) = G_IMPLICIT_DEF
    ; CHECK-NEXT: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[COPY]](s64), [[COPY1]](s64), [[DEF]](s64), [[DEF]](s64)
    ; CHECK-NEXT: RET_ReallyLR implicit [[BUILD_VECTOR]](<4 x s64>)
    %0:_(s64) = COPY $x0
    %1:_(s64) = COPY $x1
    %4:_(<2 x s64>) = G_BUILD_VECTOR %0(s64), %1
    %5:_(<2 x s64>) = G_IMPLICIT_DEF
    %6:_(<4 x s64>) = G_CONCAT_VECTORS %4(<2 x s64>), %5
    RET_ReallyLR implicit %6
...
# Same as concat_to_build_vector_with_undef but with pointer types.
---
name: concat_to_build_vector_with_undef_ptr
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $x0, $x1

    ; CHECK-LABEL: name: concat_to_build_vector_with_undef_ptr
    ; CHECK: liveins: $x0, $x1
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
    ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
    ; CHECK-NEXT: [[DEF:%[0-9]+]]:_(p0) = G_IMPLICIT_DEF
    ; CHECK-NEXT: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x p0>) = G_BUILD_VECTOR [[COPY]](p0), [[COPY1]](p0), [[DEF]](p0), [[DEF]](p0)
    ; CHECK-NEXT: RET_ReallyLR implicit [[BUILD_VECTOR]](<4 x p0>)
    %0:_(p0) = COPY $x0
    %1:_(p0) = COPY $x1
    %4:_(<2 x p0>) = G_BUILD_VECTOR %0(p0), %1
    %5:_(<2 x p0>) = G_IMPLICIT_DEF
    %6:_(<4 x p0>) = G_CONCAT_VECTORS %4(<2 x p0>), %5
    RET_ReallyLR implicit %6
...
# Check that we keep a concat_vectors as soon as one of the operand is
# not undef or build_vector. I.e., we cannot flatten the concat_vectors.
---
name: concat_to_build_vector_negative_test
tracksRegLiveness: true
body:             |
  bb.1:
    liveins: $q0

    ; CHECK-LABEL: name: concat_to_build_vector_negative_test
    ; CHECK: liveins: $q0
    ; CHECK-NEXT: {{  $}}
    ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $q0
    ; CHECK-NEXT: [[DEF:%[0-9]+]]:_(<2 x s64>) = G_IMPLICIT_DEF
    ; CHECK-NEXT: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s64>) = G_CONCAT_VECTORS [[COPY]](<2 x s64>), [[DEF]](<2 x s64>)
    ; CHECK-NEXT: RET_ReallyLR implicit [[CONCAT_VECTORS]](<4 x s64>)
    %4:_(<2 x s64>) = COPY $q0
    %5:_(<2 x s64>) = G_IMPLICIT_DEF
    %6:_(<4 x s64>) = G_CONCAT_VECTORS %4(<2 x s64>), %5
    RET_ReallyLR implicit %6
...