# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple aarch64-apple-ios -run-pass=aarch64-prelegalizer-combiner %s -o - -verify-machineinstrs | FileCheck %s
# This test checks the optimization to remove the G_TRUNC if we can determine it's redundant.
---
name: icmp_trunc_sextload
tracksRegLiveness: true
body: |
bb.1:
liveins: $x0
; CHECK-LABEL: name: icmp_trunc_sextload
; CHECK: liveins: $x0
; CHECK: %v:_(p0) = COPY $x0
; CHECK: %load:_(s64) = G_SEXTLOAD %v(p0) :: (load (s32))
; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
; CHECK: %cmp:_(s1) = G_ICMP intpred(ne), %load(s64), [[C]]
; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT %cmp(s1)
; CHECK: $w0 = COPY [[ANYEXT]](s32)
; CHECK: RET_ReallyLR implicit $w0
%v:_(p0) = COPY $x0
%load:_(s64) = G_SEXTLOAD %v:_(p0) :: (load (s32))
%trunc:_(s32) = G_TRUNC %load(s64)
%zero:_(s32) = G_CONSTANT i32 0
%cmp:_(s1) = G_ICMP intpred(ne), %trunc(s32), %zero
%5:_(s32) = G_ANYEXT %cmp
$w0 = COPY %5(s32)
RET_ReallyLR implicit $w0
...
---
name: icmp_trunc_sextload_eq
tracksRegLiveness: true
body: |
bb.1:
liveins: $x0
; CHECK-LABEL: name: icmp_trunc_sextload_eq
; CHECK: liveins: $x0
; CHECK: %v:_(p0) = COPY $x0
; CHECK: %load:_(s64) = G_SEXTLOAD %v(p0) :: (load (s32))
; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
; CHECK: %cmp:_(s1) = G_ICMP intpred(eq), %load(s64), [[C]]
; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT %cmp(s1)
; CHECK: $w0 = COPY [[ANYEXT]](s32)
; CHECK: RET_ReallyLR implicit $w0
%v:_(p0) = COPY $x0
%load:_(s64) = G_SEXTLOAD %v:_(p0) :: (load (s32))
%trunc:_(s32) = G_TRUNC %load(s64)
%zero:_(s32) = G_CONSTANT i32 0
%cmp:_(s1) = G_ICMP intpred(eq), %trunc(s32), %zero
%5:_(s32) = G_ANYEXT %cmp
$w0 = COPY %5(s32)
RET_ReallyLR implicit $w0
...
---
name: icmp_trunc_sextload_wrongpred
tracksRegLiveness: true
body: |
bb.1:
liveins: $x0
; CHECK-LABEL: name: icmp_trunc_sextload_wrongpred
; CHECK: liveins: $x0
; CHECK: %v:_(p0) = COPY $x0
; CHECK: %load:_(s64) = G_SEXTLOAD %v(p0) :: (load (s32))
; CHECK: %trunc:_(s32) = G_TRUNC %load(s64)
; CHECK: %zero:_(s32) = G_CONSTANT i32 0
; CHECK: %cmp:_(s1) = G_ICMP intpred(slt), %trunc(s32), %zero
; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT %cmp(s1)
; CHECK: $w0 = COPY [[ANYEXT]](s32)
; CHECK: RET_ReallyLR implicit $w0
%v:_(p0) = COPY $x0
%load:_(s64) = G_SEXTLOAD %v:_(p0) :: (load (s32))
%trunc:_(s32) = G_TRUNC %load(s64)
%zero:_(s32) = G_CONSTANT i32 0
%cmp:_(s1) = G_ICMP intpred(slt), %trunc(s32), %zero
%5:_(s32) = G_ANYEXT %cmp
$w0 = COPY %5(s32)
RET_ReallyLR implicit $w0
...
---
name: icmp_trunc_sextload_extend_mismatch
tracksRegLiveness: true
body: |
bb.1:
liveins: $x0
; CHECK-LABEL: name: icmp_trunc_sextload_extend_mismatch
; CHECK: liveins: $x0
; CHECK: %v:_(p0) = COPY $x0
; CHECK: %load:_(s64) = G_SEXTLOAD %v(p0) :: (load (s32))
; CHECK: %trunc:_(s16) = G_TRUNC %load(s64)
; CHECK: %zero:_(s16) = G_CONSTANT i16 0
; CHECK: %cmp:_(s1) = G_ICMP intpred(ne), %trunc(s16), %zero
; CHECK: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT %cmp(s1)
; CHECK: $w0 = COPY [[ANYEXT]](s32)
; CHECK: RET_ReallyLR implicit $w0
%v:_(p0) = COPY $x0
%load:_(s64) = G_SEXTLOAD %v:_(p0) :: (load (s32))
%trunc:_(s16) = G_TRUNC %load(s64)
%zero:_(s16) = G_CONSTANT i16 0
%cmp:_(s1) = G_ICMP intpred(ne), %trunc(s16), %zero
%5:_(s32) = G_ANYEXT %cmp
$w0 = COPY %5(s32)
RET_ReallyLR implicit $w0
...