; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4
; RUN: llc < %s -mtriple=i686-unknown-unknown | FileCheck %s --check-prefixes=X86,X86-NOCMOV
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+cmov | FileCheck %s --check-prefixes=X86,X86-CMOV
; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefix=X64
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+bmi,+lzcnt | FileCheck %s --check-prefix=X86-CLZ
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+lzcnt | FileCheck %s --check-prefix=X64-CLZ
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+lzcnt,+fast-lzcnt | FileCheck %s --check-prefix=X64-FASTLZCNT
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+bmi,+lzcnt,+fast-lzcnt | FileCheck %s --check-prefix=X86-FASTLZCNT
declare i8 @llvm.ctlz.i8(i8, i1)
declare i16 @llvm.ctlz.i16(i16, i1)
declare i32 @llvm.ctlz.i32(i32, i1)
declare i64 @llvm.ctlz.i64(i64, i1)
define i8 @ctlz_i8(i8 %x) {
; X86-LABEL: ctlz_i8:
; X86: # %bb.0:
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT: bsrl %eax, %eax
; X86-NEXT: xorl $7, %eax
; X86-NEXT: # kill: def $al killed $al killed $eax
; X86-NEXT: retl
;
; X64-LABEL: ctlz_i8:
; X64: # %bb.0:
; X64-NEXT: movzbl %dil, %eax
; X64-NEXT: bsrl %eax, %eax
; X64-NEXT: xorl $7, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i8:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: shll $24, %eax
; X86-CLZ-NEXT: lzcntl %eax, %eax
; X86-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i8:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: shll $24, %edi
; X64-CLZ-NEXT: lzcntl %edi, %eax
; X64-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i8:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: shll $24, %edi
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i8:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: shll $24, %eax
; X86-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X86-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X86-FASTLZCNT-NEXT: retl
%tmp2 = call i8 @llvm.ctlz.i8( i8 %x, i1 true )
ret i8 %tmp2
}
define i16 @ctlz_i16(i16 %x) {
; X86-LABEL: ctlz_i16:
; X86: # %bb.0:
; X86-NEXT: bsrw {{[0-9]+}}(%esp), %ax
; X86-NEXT: xorl $15, %eax
; X86-NEXT: # kill: def $ax killed $ax killed $eax
; X86-NEXT: retl
;
; X64-LABEL: ctlz_i16:
; X64: # %bb.0:
; X64-NEXT: bsrw %di, %ax
; X64-NEXT: xorl $15, %eax
; X64-NEXT: # kill: def $ax killed $ax killed $eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i16:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: lzcntw {{[0-9]+}}(%esp), %ax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i16:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntw %di, %ax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i16:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntw %di, %ax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i16:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: lzcntw {{[0-9]+}}(%esp), %ax
; X86-FASTLZCNT-NEXT: retl
%tmp2 = call i16 @llvm.ctlz.i16( i16 %x, i1 true )
ret i16 %tmp2
}
define i32 @ctlz_i32(i32 %x) {
; X86-LABEL: ctlz_i32:
; X86: # %bb.0:
; X86-NEXT: bsrl {{[0-9]+}}(%esp), %eax
; X86-NEXT: xorl $31, %eax
; X86-NEXT: retl
;
; X64-LABEL: ctlz_i32:
; X64: # %bb.0:
; X64-NEXT: bsrl %edi, %eax
; X64-NEXT: xorl $31, %eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i32:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i32:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntl %edi, %eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i32:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i32:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: retl
%tmp = call i32 @llvm.ctlz.i32( i32 %x, i1 true )
ret i32 %tmp
}
define i64 @ctlz_i64(i64 %x) {
; X86-NOCMOV-LABEL: ctlz_i64:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: testl %eax, %eax
; X86-NOCMOV-NEXT: jne .LBB3_1
; X86-NOCMOV-NEXT: # %bb.2:
; X86-NOCMOV-NEXT: bsrl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: orl $32, %eax
; X86-NOCMOV-NEXT: xorl %edx, %edx
; X86-NOCMOV-NEXT: retl
; X86-NOCMOV-NEXT: .LBB3_1:
; X86-NOCMOV-NEXT: bsrl %eax, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: xorl %edx, %edx
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_i64:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-CMOV-NEXT: bsrl %ecx, %edx
; X86-CMOV-NEXT: xorl $31, %edx
; X86-CMOV-NEXT: bsrl {{[0-9]+}}(%esp), %eax
; X86-CMOV-NEXT: xorl $31, %eax
; X86-CMOV-NEXT: orl $32, %eax
; X86-CMOV-NEXT: testl %ecx, %ecx
; X86-CMOV-NEXT: cmovnel %edx, %eax
; X86-CMOV-NEXT: xorl %edx, %edx
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_i64:
; X64: # %bb.0:
; X64-NEXT: bsrq %rdi, %rax
; X64-NEXT: xorq $63, %rax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i64:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: testl %eax, %eax
; X86-CLZ-NEXT: jne .LBB3_1
; X86-CLZ-NEXT: # %bb.2:
; X86-CLZ-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: addl $32, %eax
; X86-CLZ-NEXT: xorl %edx, %edx
; X86-CLZ-NEXT: retl
; X86-CLZ-NEXT: .LBB3_1:
; X86-CLZ-NEXT: lzcntl %eax, %eax
; X86-CLZ-NEXT: xorl %edx, %edx
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i64:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntq %rdi, %rax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i64:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntq %rdi, %rax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i64:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: testl %eax, %eax
; X86-FASTLZCNT-NEXT: jne .LBB3_1
; X86-FASTLZCNT-NEXT: # %bb.2:
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: addl $32, %eax
; X86-FASTLZCNT-NEXT: xorl %edx, %edx
; X86-FASTLZCNT-NEXT: retl
; X86-FASTLZCNT-NEXT: .LBB3_1:
; X86-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X86-FASTLZCNT-NEXT: xorl %edx, %edx
; X86-FASTLZCNT-NEXT: retl
%tmp = call i64 @llvm.ctlz.i64( i64 %x, i1 true )
ret i64 %tmp
}
; Generate a test and branch to handle zero inputs because bsr/bsf are very slow.
define i8 @ctlz_i8_zero_test(i8 %n) {
; X86-NOCMOV-LABEL: ctlz_i8_zero_test:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: testb %al, %al
; X86-NOCMOV-NEXT: je .LBB4_1
; X86-NOCMOV-NEXT: # %bb.2: # %cond.false
; X86-NOCMOV-NEXT: movzbl %al, %eax
; X86-NOCMOV-NEXT: bsrl %eax, %eax
; X86-NOCMOV-NEXT: xorl $7, %eax
; X86-NOCMOV-NEXT: # kill: def $al killed $al killed $eax
; X86-NOCMOV-NEXT: retl
; X86-NOCMOV-NEXT: .LBB4_1:
; X86-NOCMOV-NEXT: movb $8, %al
; X86-NOCMOV-NEXT: # kill: def $al killed $al killed $eax
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_i8_zero_test:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-CMOV-NEXT: bsrl %eax, %ecx
; X86-CMOV-NEXT: movl $15, %eax
; X86-CMOV-NEXT: cmovnel %ecx, %eax
; X86-CMOV-NEXT: xorl $7, %eax
; X86-CMOV-NEXT: # kill: def $al killed $al killed $eax
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_i8_zero_test:
; X64: # %bb.0:
; X64-NEXT: movzbl %dil, %eax
; X64-NEXT: bsrl %eax, %ecx
; X64-NEXT: movl $15, %eax
; X64-NEXT: cmovnel %ecx, %eax
; X64-NEXT: xorl $7, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i8_zero_test:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: lzcntl %eax, %eax
; X86-CLZ-NEXT: addl $-24, %eax
; X86-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i8_zero_test:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: movzbl %dil, %eax
; X64-CLZ-NEXT: lzcntl %eax, %eax
; X64-CLZ-NEXT: addl $-24, %eax
; X64-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i8_zero_test:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: movzbl %dil, %eax
; X64-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X64-FASTLZCNT-NEXT: addl $-24, %eax
; X64-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i8_zero_test:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X86-FASTLZCNT-NEXT: addl $-24, %eax
; X86-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X86-FASTLZCNT-NEXT: retl
%tmp1 = call i8 @llvm.ctlz.i8(i8 %n, i1 false)
ret i8 %tmp1
}
; Generate a test and branch to handle zero inputs because bsr/bsf are very slow.
define i16 @ctlz_i16_zero_test(i16 %n) {
; X86-NOCMOV-LABEL: ctlz_i16_zero_test:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movzwl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: testw %ax, %ax
; X86-NOCMOV-NEXT: je .LBB5_1
; X86-NOCMOV-NEXT: # %bb.2: # %cond.false
; X86-NOCMOV-NEXT: bsrw %ax, %ax
; X86-NOCMOV-NEXT: xorl $15, %eax
; X86-NOCMOV-NEXT: # kill: def $ax killed $ax killed $eax
; X86-NOCMOV-NEXT: retl
; X86-NOCMOV-NEXT: .LBB5_1:
; X86-NOCMOV-NEXT: movw $16, %ax
; X86-NOCMOV-NEXT: # kill: def $ax killed $ax killed $eax
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_i16_zero_test:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: bsrw {{[0-9]+}}(%esp), %cx
; X86-CMOV-NEXT: movw $31, %ax
; X86-CMOV-NEXT: cmovnew %cx, %ax
; X86-CMOV-NEXT: xorl $15, %eax
; X86-CMOV-NEXT: # kill: def $ax killed $ax killed $eax
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_i16_zero_test:
; X64: # %bb.0:
; X64-NEXT: bsrw %di, %cx
; X64-NEXT: movw $31, %ax
; X64-NEXT: cmovnew %cx, %ax
; X64-NEXT: xorl $15, %eax
; X64-NEXT: # kill: def $ax killed $ax killed $eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i16_zero_test:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: lzcntw {{[0-9]+}}(%esp), %ax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i16_zero_test:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntw %di, %ax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i16_zero_test:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntw %di, %ax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i16_zero_test:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: lzcntw {{[0-9]+}}(%esp), %ax
; X86-FASTLZCNT-NEXT: retl
%tmp1 = call i16 @llvm.ctlz.i16(i16 %n, i1 false)
ret i16 %tmp1
}
; Generate a test and branch to handle zero inputs because bsr/bsf are very slow.
define i32 @ctlz_i32_zero_test(i32 %n) {
; X86-NOCMOV-LABEL: ctlz_i32_zero_test:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: testl %eax, %eax
; X86-NOCMOV-NEXT: je .LBB6_1
; X86-NOCMOV-NEXT: # %bb.2: # %cond.false
; X86-NOCMOV-NEXT: bsrl %eax, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: retl
; X86-NOCMOV-NEXT: .LBB6_1:
; X86-NOCMOV-NEXT: movl $32, %eax
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_i32_zero_test:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: bsrl {{[0-9]+}}(%esp), %ecx
; X86-CMOV-NEXT: movl $63, %eax
; X86-CMOV-NEXT: cmovnel %ecx, %eax
; X86-CMOV-NEXT: xorl $31, %eax
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_i32_zero_test:
; X64: # %bb.0:
; X64-NEXT: bsrl %edi, %ecx
; X64-NEXT: movl $63, %eax
; X64-NEXT: cmovnel %ecx, %eax
; X64-NEXT: xorl $31, %eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i32_zero_test:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i32_zero_test:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntl %edi, %eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i32_zero_test:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i32_zero_test:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: retl
%tmp1 = call i32 @llvm.ctlz.i32(i32 %n, i1 false)
ret i32 %tmp1
}
; Generate a test and branch to handle zero inputs because bsr/bsf are very slow.
define i64 @ctlz_i64_zero_test(i64 %n) {
; X86-NOCMOV-LABEL: ctlz_i64_zero_test:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-NOCMOV-NEXT: bsrl {{[0-9]+}}(%esp), %edx
; X86-NOCMOV-NEXT: movl $63, %eax
; X86-NOCMOV-NEXT: je .LBB7_2
; X86-NOCMOV-NEXT: # %bb.1:
; X86-NOCMOV-NEXT: movl %edx, %eax
; X86-NOCMOV-NEXT: .LBB7_2:
; X86-NOCMOV-NEXT: testl %ecx, %ecx
; X86-NOCMOV-NEXT: jne .LBB7_3
; X86-NOCMOV-NEXT: # %bb.4:
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: addl $32, %eax
; X86-NOCMOV-NEXT: xorl %edx, %edx
; X86-NOCMOV-NEXT: retl
; X86-NOCMOV-NEXT: .LBB7_3:
; X86-NOCMOV-NEXT: bsrl %ecx, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: xorl %edx, %edx
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_i64_zero_test:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-CMOV-NEXT: bsrl {{[0-9]+}}(%esp), %eax
; X86-CMOV-NEXT: movl $63, %edx
; X86-CMOV-NEXT: cmovnel %eax, %edx
; X86-CMOV-NEXT: xorl $31, %edx
; X86-CMOV-NEXT: addl $32, %edx
; X86-CMOV-NEXT: bsrl %ecx, %eax
; X86-CMOV-NEXT: xorl $31, %eax
; X86-CMOV-NEXT: testl %ecx, %ecx
; X86-CMOV-NEXT: cmovel %edx, %eax
; X86-CMOV-NEXT: xorl %edx, %edx
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_i64_zero_test:
; X64: # %bb.0:
; X64-NEXT: bsrq %rdi, %rcx
; X64-NEXT: movl $127, %eax
; X64-NEXT: cmovneq %rcx, %rax
; X64-NEXT: xorq $63, %rax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i64_zero_test:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: testl %eax, %eax
; X86-CLZ-NEXT: jne .LBB7_1
; X86-CLZ-NEXT: # %bb.2:
; X86-CLZ-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: addl $32, %eax
; X86-CLZ-NEXT: xorl %edx, %edx
; X86-CLZ-NEXT: retl
; X86-CLZ-NEXT: .LBB7_1:
; X86-CLZ-NEXT: lzcntl %eax, %eax
; X86-CLZ-NEXT: xorl %edx, %edx
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i64_zero_test:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntq %rdi, %rax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i64_zero_test:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntq %rdi, %rax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i64_zero_test:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: testl %eax, %eax
; X86-FASTLZCNT-NEXT: jne .LBB7_1
; X86-FASTLZCNT-NEXT: # %bb.2:
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: addl $32, %eax
; X86-FASTLZCNT-NEXT: xorl %edx, %edx
; X86-FASTLZCNT-NEXT: retl
; X86-FASTLZCNT-NEXT: .LBB7_1:
; X86-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X86-FASTLZCNT-NEXT: xorl %edx, %edx
; X86-FASTLZCNT-NEXT: retl
%tmp1 = call i64 @llvm.ctlz.i64(i64 %n, i1 false)
ret i64 %tmp1
}
; Don't generate the cmovne when the source is known non-zero (and bsr would
; not set ZF).
; rdar://9490949
define i32 @ctlz_i32_fold_cmov(i32 %n) {
; X86-LABEL: ctlz_i32_fold_cmov:
; X86: # %bb.0:
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: orl $1, %eax
; X86-NEXT: bsrl %eax, %eax
; X86-NEXT: xorl $31, %eax
; X86-NEXT: retl
;
; X64-LABEL: ctlz_i32_fold_cmov:
; X64: # %bb.0:
; X64-NEXT: orl $1, %edi
; X64-NEXT: bsrl %edi, %eax
; X64-NEXT: xorl $31, %eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i32_fold_cmov:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: orl $1, %eax
; X86-CLZ-NEXT: lzcntl %eax, %eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i32_fold_cmov:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: orl $1, %edi
; X64-CLZ-NEXT: lzcntl %edi, %eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i32_fold_cmov:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: orl $1, %edi
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i32_fold_cmov:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: orl $1, %eax
; X86-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X86-FASTLZCNT-NEXT: retl
%or = or i32 %n, 1
%tmp1 = call i32 @llvm.ctlz.i32(i32 %or, i1 false)
ret i32 %tmp1
}
; Don't generate any xors when a 'ctlz' intrinsic is actually used to compute
; the most significant bit, which is what 'bsr' does natively.
; NOTE: We intentionally don't select `bsr` when `fast-lzcnt` is
; available. This is 1) because `bsr` has some drawbacks including a
; dependency on dst, 2) very poor performance on some of the
; `fast-lzcnt` processors, and 3) `lzcnt` runs at ALU latency/throughput
; so `lzcnt` + `xor` has better throughput than even the 1-uop
; (1c latency, 1c throughput) `bsr`.
define i32 @ctlz_bsr(i32 %n) {
; X86-LABEL: ctlz_bsr:
; X86: # %bb.0:
; X86-NEXT: bsrl {{[0-9]+}}(%esp), %eax
; X86-NEXT: retl
;
; X64-LABEL: ctlz_bsr:
; X64: # %bb.0:
; X64-NEXT: bsrl %edi, %eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_bsr:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: bsrl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_bsr:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: bsrl %edi, %eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_bsr:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: xorl $31, %eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_bsr:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: xorl $31, %eax
; X86-FASTLZCNT-NEXT: retl
%ctlz = call i32 @llvm.ctlz.i32(i32 %n, i1 true)
%bsr = xor i32 %ctlz, 31
ret i32 %bsr
}
; Generate a test and branch to handle zero inputs because bsr/bsf are very slow.
; FIXME: The compare and branch are produced late in IR (by CodeGenPrepare), and
; codegen doesn't know how to combine the $32 and $31 into $63.
define i32 @ctlz_bsr_zero_test(i32 %n) {
; X86-NOCMOV-LABEL: ctlz_bsr_zero_test:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: testl %eax, %eax
; X86-NOCMOV-NEXT: je .LBB10_1
; X86-NOCMOV-NEXT: # %bb.2: # %cond.false
; X86-NOCMOV-NEXT: bsrl %eax, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: retl
; X86-NOCMOV-NEXT: .LBB10_1:
; X86-NOCMOV-NEXT: movl $32, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_bsr_zero_test:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: bsrl {{[0-9]+}}(%esp), %ecx
; X86-CMOV-NEXT: movl $63, %eax
; X86-CMOV-NEXT: cmovnel %ecx, %eax
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_bsr_zero_test:
; X64: # %bb.0:
; X64-NEXT: bsrl %edi, %ecx
; X64-NEXT: movl $63, %eax
; X64-NEXT: cmovnel %ecx, %eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_bsr_zero_test:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: xorl $31, %eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_bsr_zero_test:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntl %edi, %eax
; X64-CLZ-NEXT: xorl $31, %eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_bsr_zero_test:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: xorl $31, %eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_bsr_zero_test:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: xorl $31, %eax
; X86-FASTLZCNT-NEXT: retl
%ctlz = call i32 @llvm.ctlz.i32(i32 %n, i1 false)
%bsr = xor i32 %ctlz, 31
ret i32 %bsr
}
define i8 @ctlz_i8_knownbits(i8 %x) {
; X86-LABEL: ctlz_i8_knownbits:
; X86: # %bb.0:
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT: orb $64, %al
; X86-NEXT: movzbl %al, %eax
; X86-NEXT: bsrl %eax, %eax
; X86-NEXT: xorl $7, %eax
; X86-NEXT: # kill: def $al killed $al killed $eax
; X86-NEXT: retl
;
; X64-LABEL: ctlz_i8_knownbits:
; X64: # %bb.0:
; X64-NEXT: orb $64, %dil
; X64-NEXT: movzbl %dil, %eax
; X64-NEXT: bsrl %eax, %eax
; X64-NEXT: xorl $7, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i8_knownbits:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: orb $64, %al
; X86-CLZ-NEXT: movzbl %al, %eax
; X86-CLZ-NEXT: shll $24, %eax
; X86-CLZ-NEXT: lzcntl %eax, %eax
; X86-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i8_knownbits:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: orb $64, %dil
; X64-CLZ-NEXT: movzbl %dil, %eax
; X64-CLZ-NEXT: shll $24, %eax
; X64-CLZ-NEXT: lzcntl %eax, %eax
; X64-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i8_knownbits:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: orb $64, %dil
; X64-FASTLZCNT-NEXT: movzbl %dil, %eax
; X64-FASTLZCNT-NEXT: shll $24, %eax
; X64-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X64-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i8_knownbits:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: orb $64, %al
; X86-FASTLZCNT-NEXT: movzbl %al, %eax
; X86-FASTLZCNT-NEXT: shll $24, %eax
; X86-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X86-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X86-FASTLZCNT-NEXT: retl
%x2 = or i8 %x, 64
%tmp = call i8 @llvm.ctlz.i8(i8 %x2, i1 true )
%tmp2 = and i8 %tmp, 1
ret i8 %tmp2
}
; Make sure we can detect that the input is non-zero and avoid cmov after BSR
; This is relevant for 32-bit mode without lzcnt
define i64 @ctlz_i64_zero_test_knownneverzero(i64 %n) {
; X86-NOCMOV-LABEL: ctlz_i64_zero_test_knownneverzero:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: testl %eax, %eax
; X86-NOCMOV-NEXT: jne .LBB12_1
; X86-NOCMOV-NEXT: # %bb.2:
; X86-NOCMOV-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: orl $1, %eax
; X86-NOCMOV-NEXT: bsrl %eax, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: orl $32, %eax
; X86-NOCMOV-NEXT: xorl %edx, %edx
; X86-NOCMOV-NEXT: retl
; X86-NOCMOV-NEXT: .LBB12_1:
; X86-NOCMOV-NEXT: bsrl %eax, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: xorl %edx, %edx
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_i64_zero_test_knownneverzero:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-CMOV-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-CMOV-NEXT: orl $1, %eax
; X86-CMOV-NEXT: bsrl %ecx, %edx
; X86-CMOV-NEXT: xorl $31, %edx
; X86-CMOV-NEXT: bsrl %eax, %eax
; X86-CMOV-NEXT: xorl $31, %eax
; X86-CMOV-NEXT: orl $32, %eax
; X86-CMOV-NEXT: testl %ecx, %ecx
; X86-CMOV-NEXT: cmovnel %edx, %eax
; X86-CMOV-NEXT: xorl %edx, %edx
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_i64_zero_test_knownneverzero:
; X64: # %bb.0:
; X64-NEXT: orq $1, %rdi
; X64-NEXT: bsrq %rdi, %rax
; X64-NEXT: xorq $63, %rax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i64_zero_test_knownneverzero:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: testl %eax, %eax
; X86-CLZ-NEXT: jne .LBB12_1
; X86-CLZ-NEXT: # %bb.2:
; X86-CLZ-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: orl $1, %eax
; X86-CLZ-NEXT: lzcntl %eax, %eax
; X86-CLZ-NEXT: orl $32, %eax
; X86-CLZ-NEXT: xorl %edx, %edx
; X86-CLZ-NEXT: retl
; X86-CLZ-NEXT: .LBB12_1:
; X86-CLZ-NEXT: lzcntl %eax, %eax
; X86-CLZ-NEXT: xorl %edx, %edx
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i64_zero_test_knownneverzero:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: orq $1, %rdi
; X64-CLZ-NEXT: lzcntq %rdi, %rax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i64_zero_test_knownneverzero:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: orq $1, %rdi
; X64-FASTLZCNT-NEXT: lzcntq %rdi, %rax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i64_zero_test_knownneverzero:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: testl %eax, %eax
; X86-FASTLZCNT-NEXT: jne .LBB12_1
; X86-FASTLZCNT-NEXT: # %bb.2:
; X86-FASTLZCNT-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: orl $1, %eax
; X86-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X86-FASTLZCNT-NEXT: orl $32, %eax
; X86-FASTLZCNT-NEXT: xorl %edx, %edx
; X86-FASTLZCNT-NEXT: retl
; X86-FASTLZCNT-NEXT: .LBB12_1:
; X86-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X86-FASTLZCNT-NEXT: xorl %edx, %edx
; X86-FASTLZCNT-NEXT: retl
%o = or i64 %n, 1
%tmp1 = call i64 @llvm.ctlz.i64(i64 %o, i1 false)
ret i64 %tmp1
}
; Ensure we fold away the XOR(TRUNC(XOR(BSR(X),31)),31).
define i8 @PR47603_trunc(i32 %0) {
; X86-LABEL: PR47603_trunc:
; X86: # %bb.0:
; X86-NEXT: bsrl {{[0-9]+}}(%esp), %eax
; X86-NEXT: # kill: def $al killed $al killed $eax
; X86-NEXT: retl
;
; X64-LABEL: PR47603_trunc:
; X64: # %bb.0:
; X64-NEXT: bsrl %edi, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: PR47603_trunc:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: xorb $31, %al
; X86-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: PR47603_trunc:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntl %edi, %eax
; X64-CLZ-NEXT: xorb $31, %al
; X64-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: PR47603_trunc:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: xorb $31, %al
; X64-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: PR47603_trunc:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: xorb $31, %al
; X86-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X86-FASTLZCNT-NEXT: retl
%2 = call i32 @llvm.ctlz.i32(i32 %0, i1 true)
%3 = xor i32 %2, 31
%4 = trunc i32 %3 to i8
ret i8 %4
}
; Ensure we fold away the XOR(ZEXT(XOR(BSR(X),31)),31).
define i32 @PR47603_zext(i32 %a0, ptr %a1) {
; X86-LABEL: PR47603_zext:
; X86: # %bb.0:
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: bsrl {{[0-9]+}}(%esp), %ecx
; X86-NEXT: movsbl (%eax,%ecx), %eax
; X86-NEXT: retl
;
; X64-LABEL: PR47603_zext:
; X64: # %bb.0:
; X64-NEXT: bsrl %edi, %eax
; X64-NEXT: movsbl (%rsi,%rax), %eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: PR47603_zext:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: bsrl {{[0-9]+}}(%esp), %ecx
; X86-CLZ-NEXT: movsbl (%eax,%ecx), %eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: PR47603_zext:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntl %edi, %eax
; X64-CLZ-NEXT: xorq $31, %rax
; X64-CLZ-NEXT: movsbl (%rsi,%rax), %eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: PR47603_zext:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: xorq $31, %rax
; X64-FASTLZCNT-NEXT: movsbl (%rsi,%rax), %eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: PR47603_zext:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %ecx
; X86-FASTLZCNT-NEXT: xorl $31, %ecx
; X86-FASTLZCNT-NEXT: movsbl (%eax,%ecx), %eax
; X86-FASTLZCNT-NEXT: retl
%ctlz = tail call i32 @llvm.ctlz.i32(i32 %a0, i1 true)
%xor = xor i32 %ctlz, 31
%zext = zext i32 %xor to i64
%gep = getelementptr inbounds [32 x i8], ptr %a1, i64 0, i64 %zext
%load = load i8, ptr %gep, align 1
%sext = sext i8 %load to i32
ret i32 %sext
}
define i8 @ctlz_xor7_i8_true(i8 %x) {
; X86-LABEL: ctlz_xor7_i8_true:
; X86: # %bb.0:
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT: bsrl %eax, %eax
; X86-NEXT: # kill: def $al killed $al killed $eax
; X86-NEXT: retl
;
; X64-LABEL: ctlz_xor7_i8_true:
; X64: # %bb.0:
; X64-NEXT: movzbl %dil, %eax
; X64-NEXT: bsrl %eax, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_xor7_i8_true:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: bsrl %eax, %eax
; X86-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_xor7_i8_true:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: movzbl %dil, %eax
; X64-CLZ-NEXT: bsrl %eax, %eax
; X64-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_xor7_i8_true:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: shll $24, %edi
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: xorb $7, %al
; X64-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_xor7_i8_true:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: shll $24, %eax
; X86-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X86-FASTLZCNT-NEXT: xorb $7, %al
; X86-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X86-FASTLZCNT-NEXT: retl
%clz = call i8 @llvm.ctlz.i8(i8 %x, i1 true)
%res = xor i8 %clz, 7
ret i8 %res
}
define i8 @ctlz_xor7_i8_false(i8 %x) {
; X86-NOCMOV-LABEL: ctlz_xor7_i8_false:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: testb %al, %al
; X86-NOCMOV-NEXT: je .LBB16_1
; X86-NOCMOV-NEXT: # %bb.2: # %cond.false
; X86-NOCMOV-NEXT: movzbl %al, %eax
; X86-NOCMOV-NEXT: bsrl %eax, %eax
; X86-NOCMOV-NEXT: xorl $7, %eax
; X86-NOCMOV-NEXT: xorb $7, %al
; X86-NOCMOV-NEXT: # kill: def $al killed $al killed $eax
; X86-NOCMOV-NEXT: retl
; X86-NOCMOV-NEXT: .LBB16_1:
; X86-NOCMOV-NEXT: movb $8, %al
; X86-NOCMOV-NEXT: xorb $7, %al
; X86-NOCMOV-NEXT: # kill: def $al killed $al killed $eax
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_xor7_i8_false:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-CMOV-NEXT: bsrl %eax, %ecx
; X86-CMOV-NEXT: movl $15, %eax
; X86-CMOV-NEXT: cmovnel %ecx, %eax
; X86-CMOV-NEXT: # kill: def $al killed $al killed $eax
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_xor7_i8_false:
; X64: # %bb.0:
; X64-NEXT: movzbl %dil, %eax
; X64-NEXT: bsrl %eax, %ecx
; X64-NEXT: movl $15, %eax
; X64-NEXT: cmovnel %ecx, %eax
; X64-NEXT: # kill: def $al killed $al killed $eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_xor7_i8_false:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: lzcntl %eax, %eax
; X86-CLZ-NEXT: addl $-24, %eax
; X86-CLZ-NEXT: xorb $7, %al
; X86-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_xor7_i8_false:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: movzbl %dil, %eax
; X64-CLZ-NEXT: lzcntl %eax, %eax
; X64-CLZ-NEXT: addl $-24, %eax
; X64-CLZ-NEXT: xorb $7, %al
; X64-CLZ-NEXT: # kill: def $al killed $al killed $eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_xor7_i8_false:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: movzbl %dil, %eax
; X64-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X64-FASTLZCNT-NEXT: addl $-24, %eax
; X64-FASTLZCNT-NEXT: xorb $7, %al
; X64-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_xor7_i8_false:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: movzbl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X86-FASTLZCNT-NEXT: addl $-24, %eax
; X86-FASTLZCNT-NEXT: xorb $7, %al
; X86-FASTLZCNT-NEXT: # kill: def $al killed $al killed $eax
; X86-FASTLZCNT-NEXT: retl
%clz = call i8 @llvm.ctlz.i8(i8 %x, i1 false)
%res = xor i8 %clz, 7
ret i8 %res
}
define i16 @ctlz_xor15_i16_true(i16 %x) {
; X86-LABEL: ctlz_xor15_i16_true:
; X86: # %bb.0:
; X86-NEXT: bsrw {{[0-9]+}}(%esp), %ax
; X86-NEXT: retl
;
; X64-LABEL: ctlz_xor15_i16_true:
; X64: # %bb.0:
; X64-NEXT: bsrw %di, %ax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_xor15_i16_true:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: bsrw {{[0-9]+}}(%esp), %ax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_xor15_i16_true:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: bsrw %di, %ax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_xor15_i16_true:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntw %di, %ax
; X64-FASTLZCNT-NEXT: xorl $15, %eax
; X64-FASTLZCNT-NEXT: # kill: def $ax killed $ax killed $eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_xor15_i16_true:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: lzcntw {{[0-9]+}}(%esp), %ax
; X86-FASTLZCNT-NEXT: xorl $15, %eax
; X86-FASTLZCNT-NEXT: # kill: def $ax killed $ax killed $eax
; X86-FASTLZCNT-NEXT: retl
%clz = call i16 @llvm.ctlz.i16(i16 %x, i1 true)
%res = xor i16 %clz, 15
ret i16 %res
}
define i32 @ctlz_xor31_i32_false(i32 %x) {
; X86-NOCMOV-LABEL: ctlz_xor31_i32_false:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: testl %eax, %eax
; X86-NOCMOV-NEXT: je .LBB18_1
; X86-NOCMOV-NEXT: # %bb.2: # %cond.false
; X86-NOCMOV-NEXT: bsrl %eax, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: retl
; X86-NOCMOV-NEXT: .LBB18_1:
; X86-NOCMOV-NEXT: movl $32, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_xor31_i32_false:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: bsrl {{[0-9]+}}(%esp), %ecx
; X86-CMOV-NEXT: movl $63, %eax
; X86-CMOV-NEXT: cmovnel %ecx, %eax
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_xor31_i32_false:
; X64: # %bb.0:
; X64-NEXT: bsrl %edi, %ecx
; X64-NEXT: movl $63, %eax
; X64-NEXT: cmovnel %ecx, %eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_xor31_i32_false:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: xorl $31, %eax
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_xor31_i32_false:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntl %edi, %eax
; X64-CLZ-NEXT: xorl $31, %eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_xor31_i32_false:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: xorl $31, %eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_xor31_i32_false:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: xorl $31, %eax
; X86-FASTLZCNT-NEXT: retl
%clz = call i32 @llvm.ctlz.i32(i32 %x, i1 false)
%res = xor i32 %clz, 31
ret i32 %res
}
define i64 @ctlz_xor63_i64_true(i64 %x) {
; X86-NOCMOV-LABEL: ctlz_xor63_i64_true:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: testl %eax, %eax
; X86-NOCMOV-NEXT: jne .LBB19_1
; X86-NOCMOV-NEXT: # %bb.2:
; X86-NOCMOV-NEXT: bsrl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: orl $32, %eax
; X86-NOCMOV-NEXT: jmp .LBB19_3
; X86-NOCMOV-NEXT: .LBB19_1:
; X86-NOCMOV-NEXT: bsrl %eax, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: .LBB19_3:
; X86-NOCMOV-NEXT: xorl $63, %eax
; X86-NOCMOV-NEXT: xorl %edx, %edx
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_xor63_i64_true:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-CMOV-NEXT: bsrl %ecx, %edx
; X86-CMOV-NEXT: xorl $31, %edx
; X86-CMOV-NEXT: bsrl {{[0-9]+}}(%esp), %eax
; X86-CMOV-NEXT: xorl $31, %eax
; X86-CMOV-NEXT: orl $32, %eax
; X86-CMOV-NEXT: testl %ecx, %ecx
; X86-CMOV-NEXT: cmovnel %edx, %eax
; X86-CMOV-NEXT: xorl $63, %eax
; X86-CMOV-NEXT: xorl %edx, %edx
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_xor63_i64_true:
; X64: # %bb.0:
; X64-NEXT: bsrq %rdi, %rax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_xor63_i64_true:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: testl %eax, %eax
; X86-CLZ-NEXT: jne .LBB19_1
; X86-CLZ-NEXT: # %bb.2:
; X86-CLZ-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: addl $32, %eax
; X86-CLZ-NEXT: jmp .LBB19_3
; X86-CLZ-NEXT: .LBB19_1:
; X86-CLZ-NEXT: lzcntl %eax, %eax
; X86-CLZ-NEXT: .LBB19_3:
; X86-CLZ-NEXT: xorl $63, %eax
; X86-CLZ-NEXT: xorl %edx, %edx
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_xor63_i64_true:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: bsrq %rdi, %rax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_xor63_i64_true:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntq %rdi, %rax
; X64-FASTLZCNT-NEXT: xorq $63, %rax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_xor63_i64_true:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: testl %eax, %eax
; X86-FASTLZCNT-NEXT: jne .LBB19_1
; X86-FASTLZCNT-NEXT: # %bb.2:
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: addl $32, %eax
; X86-FASTLZCNT-NEXT: jmp .LBB19_3
; X86-FASTLZCNT-NEXT: .LBB19_1:
; X86-FASTLZCNT-NEXT: lzcntl %eax, %eax
; X86-FASTLZCNT-NEXT: .LBB19_3:
; X86-FASTLZCNT-NEXT: xorl $63, %eax
; X86-FASTLZCNT-NEXT: xorl %edx, %edx
; X86-FASTLZCNT-NEXT: retl
%clz = call i64 @llvm.ctlz.i64(i64 %x, i1 true)
%res = xor i64 %clz, 63
ret i64 %res
}
define i64 @ctlz_i32_sext(i32 %x) {
; X86-NOCMOV-LABEL: ctlz_i32_sext:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: testl %eax, %eax
; X86-NOCMOV-NEXT: je .LBB20_1
; X86-NOCMOV-NEXT: # %bb.2: # %cond.false
; X86-NOCMOV-NEXT: bsrl %eax, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: jmp .LBB20_3
; X86-NOCMOV-NEXT: .LBB20_1:
; X86-NOCMOV-NEXT: movl $32, %eax
; X86-NOCMOV-NEXT: .LBB20_3: # %cond.end
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: xorl %edx, %edx
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_i32_sext:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: bsrl {{[0-9]+}}(%esp), %ecx
; X86-CMOV-NEXT: movl $63, %eax
; X86-CMOV-NEXT: cmovnel %ecx, %eax
; X86-CMOV-NEXT: xorl %edx, %edx
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_i32_sext:
; X64: # %bb.0:
; X64-NEXT: bsrl %edi, %ecx
; X64-NEXT: movl $63, %eax
; X64-NEXT: cmovnel %ecx, %eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i32_sext:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: xorl $31, %eax
; X86-CLZ-NEXT: xorl %edx, %edx
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i32_sext:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntl %edi, %eax
; X64-CLZ-NEXT: xorl $31, %eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i32_sext:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: xorl $31, %eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i32_sext:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: xorl $31, %eax
; X86-FASTLZCNT-NEXT: xorl %edx, %edx
; X86-FASTLZCNT-NEXT: retl
%tmp = call i32 @llvm.ctlz.i32( i32 %x, i1 false)
%xor = xor i32 %tmp, 31
%ext = sext i32 %xor to i64
ret i64 %ext
}
define i64 @ctlz_i32_zext(i32 %x) {
; X86-NOCMOV-LABEL: ctlz_i32_zext:
; X86-NOCMOV: # %bb.0:
; X86-NOCMOV-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NOCMOV-NEXT: testl %eax, %eax
; X86-NOCMOV-NEXT: je .LBB21_1
; X86-NOCMOV-NEXT: # %bb.2: # %cond.false
; X86-NOCMOV-NEXT: bsrl %eax, %eax
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: jmp .LBB21_3
; X86-NOCMOV-NEXT: .LBB21_1:
; X86-NOCMOV-NEXT: movl $32, %eax
; X86-NOCMOV-NEXT: .LBB21_3: # %cond.end
; X86-NOCMOV-NEXT: xorl $31, %eax
; X86-NOCMOV-NEXT: xorl %edx, %edx
; X86-NOCMOV-NEXT: retl
;
; X86-CMOV-LABEL: ctlz_i32_zext:
; X86-CMOV: # %bb.0:
; X86-CMOV-NEXT: bsrl {{[0-9]+}}(%esp), %ecx
; X86-CMOV-NEXT: movl $63, %eax
; X86-CMOV-NEXT: cmovnel %ecx, %eax
; X86-CMOV-NEXT: xorl %edx, %edx
; X86-CMOV-NEXT: retl
;
; X64-LABEL: ctlz_i32_zext:
; X64: # %bb.0:
; X64-NEXT: bsrl %edi, %ecx
; X64-NEXT: movl $63, %eax
; X64-NEXT: cmovnel %ecx, %eax
; X64-NEXT: retq
;
; X86-CLZ-LABEL: ctlz_i32_zext:
; X86-CLZ: # %bb.0:
; X86-CLZ-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-CLZ-NEXT: xorl $31, %eax
; X86-CLZ-NEXT: xorl %edx, %edx
; X86-CLZ-NEXT: retl
;
; X64-CLZ-LABEL: ctlz_i32_zext:
; X64-CLZ: # %bb.0:
; X64-CLZ-NEXT: lzcntl %edi, %eax
; X64-CLZ-NEXT: xorl $31, %eax
; X64-CLZ-NEXT: retq
;
; X64-FASTLZCNT-LABEL: ctlz_i32_zext:
; X64-FASTLZCNT: # %bb.0:
; X64-FASTLZCNT-NEXT: lzcntl %edi, %eax
; X64-FASTLZCNT-NEXT: xorl $31, %eax
; X64-FASTLZCNT-NEXT: retq
;
; X86-FASTLZCNT-LABEL: ctlz_i32_zext:
; X86-FASTLZCNT: # %bb.0:
; X86-FASTLZCNT-NEXT: lzcntl {{[0-9]+}}(%esp), %eax
; X86-FASTLZCNT-NEXT: xorl $31, %eax
; X86-FASTLZCNT-NEXT: xorl %edx, %edx
; X86-FASTLZCNT-NEXT: retl
%tmp = call i32 @llvm.ctlz.i32( i32 %x, i1 false)
%xor = xor i32 %tmp, 31
%ext = zext i32 %xor to i64
ret i64 %ext
}