llvm/llvm/test/CodeGen/X86/tbm-intrinsics-fast-isel.ll

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -fast-isel -mtriple=i686-unknown-unknown -mattr=+tbm | FileCheck %s --check-prefix=X86
; RUN: llc < %s -fast-isel -mtriple=x86_64-unknown-unknown -mattr=+tbm | FileCheck %s --check-prefix=X64

; NOTE: This should use IR equivalent to what is generated by clang/test/CodeGen/tbm-builtins.c

define i32 @test__bextri_u32(i32 %a0) {
; X86-LABEL: test__bextri_u32:
; X86:       # %bb.0:
; X86-NEXT:    bextrl $3841, {{[0-9]+}}(%esp), %eax # imm = 0xF01
; X86-NEXT:    retl
;
; X64-LABEL: test__bextri_u32:
; X64:       # %bb.0:
; X64-NEXT:    bextrl $3841, %edi, %eax # imm = 0xF01
; X64-NEXT:    retq
  %1 = call i32 @llvm.x86.tbm.bextri.u32(i32 %a0, i32 3841)
  ret i32 %1
}

define i32 @test__blcfill_u32(i32 %a0) {
; X86-LABEL: test__blcfill_u32:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    leal 1(%ecx), %eax
; X86-NEXT:    andl %ecx, %eax
; X86-NEXT:    retl
;
; X64-LABEL: test__blcfill_u32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal 1(%rdi), %eax
; X64-NEXT:    andl %edi, %eax
; X64-NEXT:    retq
  %1 = add i32 %a0, 1
  %2 = and i32 %a0, %1
  ret i32 %2
}

define i32 @test__blci_u32(i32 %a0) {
; X86-LABEL: test__blci_u32:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    leal 1(%ecx), %eax
; X86-NEXT:    xorl $-1, %eax
; X86-NEXT:    orl %ecx, %eax
; X86-NEXT:    retl
;
; X64-LABEL: test__blci_u32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal 1(%rdi), %eax
; X64-NEXT:    xorl $-1, %eax
; X64-NEXT:    orl %edi, %eax
; X64-NEXT:    retq
  %1 = add i32 %a0, 1
  %2 = xor i32 %1, -1
  %3 = or i32 %a0, %2
  ret i32 %3
}

define i32 @test__blcic_u32(i32 %a0) {
; X86-LABEL: test__blcic_u32:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl %eax, %ecx
; X86-NEXT:    xorl $-1, %ecx
; X86-NEXT:    addl $1, %eax
; X86-NEXT:    andl %ecx, %eax
; X86-NEXT:    retl
;
; X64-LABEL: test__blcic_u32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal 1(%rdi), %eax
; X64-NEXT:    movl %edi, %ecx
; X64-NEXT:    xorl $-1, %ecx
; X64-NEXT:    andl %ecx, %eax
; X64-NEXT:    retq
  %1 = xor i32 %a0, -1
  %2 = add i32 %a0, 1
  %3 = and i32 %1, %2
  ret i32 %3
}

define i32 @test__blcmsk_u32(i32 %a0) {
; X86-LABEL: test__blcmsk_u32:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    leal 1(%ecx), %eax
; X86-NEXT:    xorl %ecx, %eax
; X86-NEXT:    retl
;
; X64-LABEL: test__blcmsk_u32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal 1(%rdi), %eax
; X64-NEXT:    xorl %edi, %eax
; X64-NEXT:    retq
  %1 = add i32 %a0, 1
  %2 = xor i32 %a0, %1
  ret i32 %2
}

define i32 @test__blcs_u32(i32 %a0) {
; X86-LABEL: test__blcs_u32:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    leal 1(%ecx), %eax
; X86-NEXT:    orl %ecx, %eax
; X86-NEXT:    retl
;
; X64-LABEL: test__blcs_u32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal 1(%rdi), %eax
; X64-NEXT:    orl %edi, %eax
; X64-NEXT:    retq
  %1 = add i32 %a0, 1
  %2 = or i32 %a0, %1
  ret i32 %2
}

define i32 @test__blsfill_u32(i32 %a0) {
; X86-LABEL: test__blsfill_u32:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    leal -1(%ecx), %eax
; X86-NEXT:    orl %ecx, %eax
; X86-NEXT:    retl
;
; X64-LABEL: test__blsfill_u32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal -1(%rdi), %eax
; X64-NEXT:    orl %edi, %eax
; X64-NEXT:    retq
  %1 = sub i32 %a0, 1
  %2 = or i32 %a0, %1
  ret i32 %2
}

define i32 @test__blsic_u32(i32 %a0) {
; X86-LABEL: test__blsic_u32:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl %eax, %ecx
; X86-NEXT:    xorl $-1, %ecx
; X86-NEXT:    subl $1, %eax
; X86-NEXT:    orl %ecx, %eax
; X86-NEXT:    retl
;
; X64-LABEL: test__blsic_u32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal -1(%rdi), %eax
; X64-NEXT:    movl %edi, %ecx
; X64-NEXT:    xorl $-1, %ecx
; X64-NEXT:    orl %ecx, %eax
; X64-NEXT:    retq
  %1 = xor i32 %a0, -1
  %2 = sub i32 %a0, 1
  %3 = or i32 %1, %2
  ret i32 %3
}

define i32 @test__t1mskc_u32(i32 %a0) {
; X86-LABEL: test__t1mskc_u32:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl %eax, %ecx
; X86-NEXT:    xorl $-1, %ecx
; X86-NEXT:    addl $1, %eax
; X86-NEXT:    orl %ecx, %eax
; X86-NEXT:    retl
;
; X64-LABEL: test__t1mskc_u32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal 1(%rdi), %eax
; X64-NEXT:    movl %edi, %ecx
; X64-NEXT:    xorl $-1, %ecx
; X64-NEXT:    orl %ecx, %eax
; X64-NEXT:    retq
  %1 = xor i32 %a0, -1
  %2 = add i32 %a0, 1
  %3 = or i32 %1, %2
  ret i32 %3
}

define i32 @test__tzmsk_u32(i32 %a0) {
; X86-LABEL: test__tzmsk_u32:
; X86:       # %bb.0:
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    movl %eax, %ecx
; X86-NEXT:    xorl $-1, %ecx
; X86-NEXT:    subl $1, %eax
; X86-NEXT:    andl %ecx, %eax
; X86-NEXT:    retl
;
; X64-LABEL: test__tzmsk_u32:
; X64:       # %bb.0:
; X64-NEXT:    # kill: def $edi killed $edi def $rdi
; X64-NEXT:    leal -1(%rdi), %eax
; X64-NEXT:    movl %edi, %ecx
; X64-NEXT:    xorl $-1, %ecx
; X64-NEXT:    andl %ecx, %eax
; X64-NEXT:    retq
  %1 = xor i32 %a0, -1
  %2 = sub i32 %a0, 1
  %3 = and i32 %1, %2
  ret i32 %3
}

declare i32 @llvm.x86.tbm.bextri.u32(i32, i32)