llvm/llvm/test/CodeGen/X86/divrem8_ext.ll

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=i686-unknown | FileCheck %s --check-prefix=X86
; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s --check-prefix=X64

define zeroext i8 @test_udivrem_zext_ah(i8 %x, i8 %y) {
; X86-LABEL: test_udivrem_zext_ah:
; X86:       # %bb.0:
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    divb {{[0-9]+}}(%esp)
; X86-NEXT:    movzbl %ah, %ecx
; X86-NEXT:    movb %al, z
; X86-NEXT:    movl %ecx, %eax
; X86-NEXT:    retl
;
; X64-LABEL: test_udivrem_zext_ah:
; X64:       # %bb.0:
; X64-NEXT:    movzbl %dil, %eax
; X64-NEXT:    divb %sil
; X64-NEXT:    movzbl %ah, %ecx
; X64-NEXT:    movb %al, z(%rip)
; X64-NEXT:    movl %ecx, %eax
; X64-NEXT:    retq
  %div = udiv i8 %x, %y
  store i8 %div, ptr @z
  %1 = urem i8 %x, %y
  ret i8 %1
}

define zeroext i8 @test_urem_zext_ah(i8 %x, i8 %y) {
; X86-LABEL: test_urem_zext_ah:
; X86:       # %bb.0:
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    divb {{[0-9]+}}(%esp)
; X86-NEXT:    movzbl %ah, %eax
; X86-NEXT:    # kill: def $al killed $al killed $eax
; X86-NEXT:    retl
;
; X64-LABEL: test_urem_zext_ah:
; X64:       # %bb.0:
; X64-NEXT:    movzbl %dil, %eax
; X64-NEXT:    divb %sil
; X64-NEXT:    movzbl %ah, %eax
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
  %1 = urem i8 %x, %y
  ret i8 %1
}

define i8 @test_urem_noext_ah(i8 %x, i8 %y) {
; X86-LABEL: test_urem_noext_ah:
; X86:       # %bb.0:
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    divb %cl
; X86-NEXT:    movzbl %ah, %eax
; X86-NEXT:    addb %cl, %al
; X86-NEXT:    # kill: def $al killed $al killed $eax
; X86-NEXT:    retl
;
; X64-LABEL: test_urem_noext_ah:
; X64:       # %bb.0:
; X64-NEXT:    movzbl %dil, %eax
; X64-NEXT:    divb %sil
; X64-NEXT:    movzbl %ah, %eax
; X64-NEXT:    addb %sil, %al
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
  %1 = urem i8 %x, %y
  %2 = add i8 %1, %y
  ret i8 %2
}

define i64 @test_urem_zext64_ah(i8 %x, i8 %y) {
; X86-LABEL: test_urem_zext64_ah:
; X86:       # %bb.0:
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    divb {{[0-9]+}}(%esp)
; X86-NEXT:    movzbl %ah, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:    retl
;
; X64-LABEL: test_urem_zext64_ah:
; X64:       # %bb.0:
; X64-NEXT:    movzbl %dil, %eax
; X64-NEXT:    divb %sil
; X64-NEXT:    movzbl %ah, %eax
; X64-NEXT:    retq
  %1 = urem i8 %x, %y
  %2 = zext i8 %1 to i64
  ret i64 %2
}

define signext i8 @test_sdivrem_sext_ah(i8 %x, i8 %y) {
; X86-LABEL: test_sdivrem_sext_ah:
; X86:       # %bb.0:
; X86-NEXT:    movsbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    idivb {{[0-9]+}}(%esp)
; X86-NEXT:    movsbl %ah, %ecx
; X86-NEXT:    movb %al, z
; X86-NEXT:    movl %ecx, %eax
; X86-NEXT:    retl
;
; X64-LABEL: test_sdivrem_sext_ah:
; X64:       # %bb.0:
; X64-NEXT:    movsbl %dil, %eax
; X64-NEXT:    idivb %sil
; X64-NEXT:    movsbl %ah, %ecx
; X64-NEXT:    movb %al, z(%rip)
; X64-NEXT:    movl %ecx, %eax
; X64-NEXT:    retq
  %div = sdiv i8 %x, %y
  store i8 %div, ptr @z
  %1 = srem i8 %x, %y
  ret i8 %1
}

define signext i8 @test_srem_sext_ah(i8 %x, i8 %y) {
; X86-LABEL: test_srem_sext_ah:
; X86:       # %bb.0:
; X86-NEXT:    movsbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    idivb {{[0-9]+}}(%esp)
; X86-NEXT:    movsbl %ah, %eax
; X86-NEXT:    # kill: def $al killed $al killed $eax
; X86-NEXT:    retl
;
; X64-LABEL: test_srem_sext_ah:
; X64:       # %bb.0:
; X64-NEXT:    movsbl %dil, %eax
; X64-NEXT:    idivb %sil
; X64-NEXT:    movsbl %ah, %eax
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
  %1 = srem i8 %x, %y
  ret i8 %1
}

define i8 @test_srem_noext_ah(i8 %x, i8 %y) {
; X86-LABEL: test_srem_noext_ah:
; X86:       # %bb.0:
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movsbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    idivb %cl
; X86-NEXT:    movsbl %ah, %eax
; X86-NEXT:    addb %cl, %al
; X86-NEXT:    # kill: def $al killed $al killed $eax
; X86-NEXT:    retl
;
; X64-LABEL: test_srem_noext_ah:
; X64:       # %bb.0:
; X64-NEXT:    movsbl %dil, %eax
; X64-NEXT:    idivb %sil
; X64-NEXT:    movsbl %ah, %eax
; X64-NEXT:    addb %sil, %al
; X64-NEXT:    # kill: def $al killed $al killed $eax
; X64-NEXT:    retq
  %1 = srem i8 %x, %y
  %2 = add i8 %1, %y
  ret i8 %2
}

define i64 @test_srem_sext64_ah(i8 %x, i8 %y) {
; X86-LABEL: test_srem_sext64_ah:
; X86:       # %bb.0:
; X86-NEXT:    movsbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    idivb {{[0-9]+}}(%esp)
; X86-NEXT:    movsbl %ah, %eax
; X86-NEXT:    movl %eax, %edx
; X86-NEXT:    sarl $31, %edx
; X86-NEXT:    retl
;
; X64-LABEL: test_srem_sext64_ah:
; X64:       # %bb.0:
; X64-NEXT:    movsbl %dil, %eax
; X64-NEXT:    idivb %sil
; X64-NEXT:    movsbl %ah, %eax
; X64-NEXT:    cltq
; X64-NEXT:    retq
  %1 = srem i8 %x, %y
  %2 = sext i8 %1 to i64
  ret i64 %2
}

define i64 @pr25754(i8 %a, i8 %c) {
; X86-LABEL: pr25754:
; X86:       # %bb.0:
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    divb {{[0-9]+}}(%esp)
; X86-NEXT:    movzbl %ah, %ecx
; X86-NEXT:    movzbl %al, %eax
; X86-NEXT:    addl %ecx, %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:    retl
;
; X64-LABEL: pr25754:
; X64:       # %bb.0:
; X64-NEXT:    movzbl %dil, %eax
; X64-NEXT:    divb %sil
; X64-NEXT:    movzbl %ah, %ecx
; X64-NEXT:    movzbl %al, %eax
; X64-NEXT:    addq %rcx, %rax
; X64-NEXT:    retq
  %r1 = urem i8 %a, %c
  %d1 = udiv i8 %a, %c
  %r2 = zext i8 %r1 to i64
  %d2 = zext i8 %d1 to i64
  %ret = add i64 %r2, %d2
  ret i64 %ret
}

@z = external dso_local global i8