llvm/llvm/test/CodeGen/X86/xor-lea.ll

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=i686-unknown-unknown | FileCheck %s --check-prefixes=X86
; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefixes=X64
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi | FileCheck %s --check-prefixes=X64

; PR52267
; InstCombine transforms an 'add' with min-signed-value into an 'xor'.
; LEA instruction selection should be able to see through that
; transform and reduce add/shift/xor instruction counts and moves.

;
; XOR(X,MIN_SIGNED_VALUE)
;

define i8 @xor_sminval_i8(i8 %x) {
; X86-LABEL: xor_sminval_i8:
; X86:       # %bb.0:
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    addb $-128, %al
; X86-NEXT:    retl
;
; X64-LABEL: xor_sminval_i8:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal -128(%rdi), %eax
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
  %r = xor i8 %x, 128
  ret i8 %r
}

; negative test
define i8 @xor_notsminval_i8(i8 %x) {
; X86-LABEL: xor_notsminval_i8:
; X86:       # %bb.0:
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    xorb $127, %al
; X86-NEXT:    retl
;
; X64-LABEL: xor_notsminval_i8:
; X64:       # %bb.0:
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    xorb $127, %al
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
  %r = xor i8 %x, 127
  ret i8 %r
}

define i16 @xor_sminval_i16(i16 %x) {
; X86-LABEL: xor_sminval_i16:
; X86:       # %bb.0:
; X86-NEXT:    movl $32768, %eax # imm = 0x8000
; X86-NEXT:    xorl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    # kill: def $ax killed $ax killed $eax
; X86-NEXT:    retl
;
; X64-LABEL: xor_sminval_i16:
; X64:       # %bb.0:
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    xorl $32768, %eax # imm = 0x8000
; X64-NEXT:    # kill: def $ax killed $ax killed $eax
; X64-NEXT:    retq
  %r = xor i16 %x, 32768
  ret i16 %r
}

define i32 @xor_sminval_i32(i32 %x) {
; X86-LABEL: xor_sminval_i32:
; X86:       # %bb.0:
; X86-NEXT:    movl $-2147483648, %eax # imm = 0x80000000
; X86-NEXT:    xorl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    retl
;
; X64-LABEL: xor_sminval_i32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal -2147483648(%rdi), %eax
; X64-NEXT:    retq
  %r = xor i32 %x, 2147483648
  ret i32 %r
}

; negative test
define i32 @xor_notsminval_i32(i32 %x) {
; X86-LABEL: xor_notsminval_i32:
; X86:       # %bb.0:
; X86-NEXT:    movl $32768, %eax # imm = 0x8000
; X86-NEXT:    xorl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    retl
;
; X64-LABEL: xor_notsminval_i32:
; X64:       # %bb.0:
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    xorl $32768, %eax # imm = 0x8000
; X64-NEXT:    retq
  %r = xor i32 %x, 32768
  ret i32 %r
}

define i64 @xor_sminval_i64(i64 %x) {
; X86-LABEL: xor_sminval_i64:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl $-2147483648, %edx # imm = 0x80000000
; X86-NEXT:    xorl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    retl
;
; X64-LABEL: xor_sminval_i64:
; X64:       # %bb.0:
; X64-NEXT:    movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
; X64-NEXT:    xorq %rdi, %rax
; X64-NEXT:    retq
  %r = xor i64 %x, -9223372036854775808
  ret i64 %r
}

;
; XOR(ADD/SUB(X,C),MIN_SIGNED_VALUE)
;

define i8 @xor_add_sminval_i8(i8 %x, i8 %y) {
; X86-LABEL: xor_add_sminval_i8:
; X86:       # %bb.0:
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    addb {{[0-9]+}}(%esp), %al
; X86-NEXT:    addb $-128, %al
; X86-NEXT:    retl
;
; X64-LABEL: xor_add_sminval_i8:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $esi killed $esi def $rsi
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal (%rdi,%rsi), %eax
; X64-NEXT:    addb $-128, %al
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
  %s = add i8 %x, %y
  %r = xor i8 %s, 128
  ret i8 %r
}

define i16 @xor_sub_sminval_i16(i16 %x) {
; X86-LABEL: xor_sub_sminval_i16:
; X86:       # %bb.0:
; X86-NEXT:    movl $32766, %eax # imm = 0x7FFE
; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    # kill: def $ax killed $ax killed $eax
; X86-NEXT:    retl
;
; X64-LABEL: xor_sub_sminval_i16:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal 32766(%rdi), %eax
; X64-NEXT:    # kill: def $ax killed $ax killed $eax
; X64-NEXT:    retq
  %s = sub i16 %x, 2
  %r = xor i16 %s, 32768
  ret i16 %r
}

define i32 @xor_add_sminval_i32(i32 %x) {
; X86-LABEL: xor_add_sminval_i32:
; X86:       # %bb.0:
; X86-NEXT:    movl $-2147483136, %eax # imm = 0x80000200
; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    retl
;
; X64-LABEL: xor_add_sminval_i32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal -2147483136(%rdi), %eax
; X64-NEXT:    retq
  %s = add i32 %x, 512
  %r = xor i32 %s, 2147483648
  ret i32 %r
}

define i64 @xor_add_sminval_i64(i64 %x, i64 %y) {
; X86-LABEL: xor_add_sminval_i64:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    adcl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    addl $-2147483648, %edx # imm = 0x80000000
; X86-NEXT:    retl
;
; X64-LABEL: xor_add_sminval_i64:
; X64:       # %bb.0:
; X64-NEXT:    leaq (%rdi,%rsi), %rcx
; X64-NEXT:    movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
; X64-NEXT:    xorq %rcx, %rax
; X64-NEXT:    retq
  %s = add i64 %x, %y
  %r = xor i64 %s, -9223372036854775808
  ret i64 %r
}

;
; ADD/SUB(XOR(X,MIN_SIGNED_VALUE),C)
;

define i8 @sub_xor_sminval_i8(i8 %x, i8 %y) {
; X86-LABEL: sub_xor_sminval_i8:
; X86:       # %bb.0:
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    addb $-128, %al
; X86-NEXT:    subb {{[0-9]+}}(%esp), %al
; X86-NEXT:    retl
;
; X64-LABEL: sub_xor_sminval_i8:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal -128(%rdi), %eax
; X64-NEXT:    subb %sil, %al
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
  %r = xor i8 %x, 128
  %s = sub i8 %r, %y
  ret i8 %s
}

define i16 @add_xor_sminval_i16(i16 %x) {
; X86-LABEL: add_xor_sminval_i16:
; X86:       # %bb.0:
; X86-NEXT:    movl $-32766, %eax # imm = 0x8002
; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    # kill: def $ax killed $ax killed $eax
; X86-NEXT:    retl
;
; X64-LABEL: add_xor_sminval_i16:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal -32766(%rdi), %eax
; X64-NEXT:    # kill: def $ax killed $ax killed $eax
; X64-NEXT:    retq
  %r = xor i16 %x, 32768
  %s = add i16 %r, 2
  ret i16 %s
}

define i32 @sub_xor_sminval_i32(i32 %x) {
; X86-LABEL: sub_xor_sminval_i32:
; X86:       # %bb.0:
; X86-NEXT:    movl $2147483136, %eax # imm = 0x7FFFFE00
; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    retl
;
; X64-LABEL: sub_xor_sminval_i32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal 2147483136(%rdi), %eax
; X64-NEXT:    retq
  %r = xor i32 %x, 2147483648
  %s = sub i32 %r, 512
  ret i32 %s
}

define i64 @add_xor_sminval_i64(i64 %x, i64 %y) {
; X86-LABEL: add_xor_sminval_i64:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    adcl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    addl $-2147483648, %edx # imm = 0x80000000
; X86-NEXT:    retl
;
; X64-LABEL: add_xor_sminval_i64:
; X64:       # %bb.0:
; X64-NEXT:    movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
; X64-NEXT:    addq %rdi, %rax
; X64-NEXT:    addq %rsi, %rax
; X64-NEXT:    retq
  %r = xor i64 %x, -9223372036854775808
  %s = add i64 %y, %r
  ret i64 %s
}

;
; XOR(SHL(X,C),MIN_SIGNED_VALUE)
;

define i8 @xor_shl_sminval_i8(i8 %x) {
; X86-LABEL: xor_shl_sminval_i8:
; X86:       # %bb.0:
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    addb %al, %al
; X86-NEXT:    addb $-128, %al
; X86-NEXT:    retl
;
; X64-LABEL: xor_shl_sminval_i8:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal (%rdi,%rdi), %eax
; X64-NEXT:    addb $-128, %al
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
  %s = shl i8 %x, 1
  %r = xor i8 %s, 128
  ret i8 %r
}

define i16 @xor_shl_sminval_i16(i16 %x) {
; X86-LABEL: xor_shl_sminval_i16:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    shll $2, %eax
; X86-NEXT:    xorl $32768, %eax # imm = 0x8000
; X86-NEXT:    # kill: def $ax killed $ax killed $eax
; X86-NEXT:    retl
;
; X64-LABEL: xor_shl_sminval_i16:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal (,%rdi,4), %eax
; X64-NEXT:    xorl $32768, %eax # imm = 0x8000
; X64-NEXT:    # kill: def $ax killed $ax killed $eax
; X64-NEXT:    retq
  %s = shl i16 %x, 2
  %r = xor i16 %s, 32768
  ret i16 %r
}

define i32 @xor_shl_sminval_i32(i32 %x) {
; X86-LABEL: xor_shl_sminval_i32:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    leal -2147483648(,%eax,8), %eax
; X86-NEXT:    retl
;
; X64-LABEL: xor_shl_sminval_i32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal -2147483648(,%rdi,8), %eax
; X64-NEXT:    retq
  %s = shl i32 %x, 3
  %r = xor i32 %s, 2147483648
  ret i32 %r
}

; negative test
define i32 @xor_bigshl_sminval_i32(i32 %x) {
; X86-LABEL: xor_bigshl_sminval_i32:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    shll $8, %eax
; X86-NEXT:    addl $-2147483648, %eax # imm = 0x80000000
; X86-NEXT:    retl
;
; X64-LABEL: xor_bigshl_sminval_i32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    shll $8, %edi
; X64-NEXT:    leal -2147483648(%rdi), %eax
; X64-NEXT:    retq
  %s = shl i32 %x, 8
  %r = xor i32 %s, 2147483648
  ret i32 %r
}

define i64 @xor_shl_sminval_i64(i64 %x) {
; X86-LABEL: xor_shl_sminval_i64:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    shldl $2, %eax, %edx
; X86-NEXT:    shll $2, %eax
; X86-NEXT:    addl $-2147483648, %edx # imm = 0x80000000
; X86-NEXT:    retl
;
; X64-LABEL: xor_shl_sminval_i64:
; X64:       # %bb.0:
; X64-NEXT:    movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000
; X64-NEXT:    leaq (%rax,%rdi,4), %rax
; X64-NEXT:    retq
  %s = shl i64 %x, 2
  %r = xor i64 %s, -9223372036854775808
  ret i64 %r
}