llvm/llvm/test/CodeGen/X86/divrem.ll

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=i686-unknown | FileCheck %s --check-prefix=X86
; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s --check-prefix=X64

define void @si64(i64 %x, i64 %y, ptr %p, ptr %q) nounwind {
; X86-LABEL: si64:
; X86:       # %bb.0:
; X86-NEXT:    pushl %ebp
; X86-NEXT:    pushl %ebx
; X86-NEXT:    pushl %edi
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ebx
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ebp
; X86-NEXT:    pushl %ebp
; X86-NEXT:    pushl %ebx
; X86-NEXT:    pushl {{[0-9]+}}(%esp)
; X86-NEXT:    pushl {{[0-9]+}}(%esp)
; X86-NEXT:    calll __divdi3
; X86-NEXT:    addl $16, %esp
; X86-NEXT:    movl %eax, %esi
; X86-NEXT:    movl %edx, %edi
; X86-NEXT:    pushl %ebp
; X86-NEXT:    pushl %ebx
; X86-NEXT:    pushl {{[0-9]+}}(%esp)
; X86-NEXT:    pushl {{[0-9]+}}(%esp)
; X86-NEXT:    calll __moddi3
; X86-NEXT:    addl $16, %esp
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl %edi, 4(%ecx)
; X86-NEXT:    movl %esi, (%ecx)
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl %edx, 4(%ecx)
; X86-NEXT:    movl %eax, (%ecx)
; X86-NEXT:    popl %esi
; X86-NEXT:    popl %edi
; X86-NEXT:    popl %ebx
; X86-NEXT:    popl %ebp
; X86-NEXT:    retl
;
; X64-LABEL: si64:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %r8
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    cqto
; X64-NEXT:    idivq %rsi
; X64-NEXT:    movq %rax, (%r8)
; X64-NEXT:    movq %rdx, (%rcx)
; X64-NEXT:    retq
	%r = sdiv i64 %x, %y
	%t = srem i64 %x, %y
	store i64 %r, ptr %p
	store i64 %t, ptr %q
	ret void
}

define void @si32(i32 %x, i32 %y, ptr %p, ptr %q) nounwind {
; X86-LABEL: si32:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    cltd
; X86-NEXT:    idivl {{[0-9]+}}(%esp)
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl %eax, (%esi)
; X86-NEXT:    movl %edx, (%ecx)
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: si32:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %r8
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    cltd
; X64-NEXT:    idivl %esi
; X64-NEXT:    movl %eax, (%r8)
; X64-NEXT:    movl %edx, (%rcx)
; X64-NEXT:    retq
	%r = sdiv i32 %x, %y
	%t = srem i32 %x, %y
	store i32 %r, ptr %p
	store i32 %t, ptr %q
	ret void
}

define void @si16(i16 %x, i16 %y, ptr %p, ptr %q) nounwind {
; X86-LABEL: si16:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    cwtd
; X86-NEXT:    idivw {{[0-9]+}}(%esp)
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movw %ax, (%esi)
; X86-NEXT:    movw %dx, (%ecx)
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: si16:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %r8
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    # kill: def $ax killed $ax killed $eax
; X64-NEXT:    cwtd
; X64-NEXT:    idivw %si
; X64-NEXT:    movw %ax, (%r8)
; X64-NEXT:    movw %dx, (%rcx)
; X64-NEXT:    retq
	%r = sdiv i16 %x, %y
	%t = srem i16 %x, %y
	store i16 %r, ptr %p
	store i16 %t, ptr %q
	ret void
}

define void @si8(i8 %x, i8 %y, ptr %p, ptr %q) nounwind {
; X86-LABEL: si8:
; X86:       # %bb.0:
; X86-NEXT:    pushl %ebx
; X86-NEXT:    movsbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    idivb {{[0-9]+}}(%esp)
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    movsbl %ah, %ebx
; X86-NEXT:    movb %al, (%edx)
; X86-NEXT:    movb %bl, (%ecx)
; X86-NEXT:    popl %ebx
; X86-NEXT:    retl
;
; X64-LABEL: si8:
; X64:       # %bb.0:
; X64-NEXT:    movsbl %dil, %eax
; X64-NEXT:    idivb %sil
; X64-NEXT:    movsbl %ah, %esi
; X64-NEXT:    movb %al, (%rdx)
; X64-NEXT:    movb %sil, (%rcx)
; X64-NEXT:    retq
	%r = sdiv i8 %x, %y
	%t = srem i8 %x, %y
	store i8 %r, ptr %p
	store i8 %t, ptr %q
	ret void
}

define void @ui64(i64 %x, i64 %y, ptr %p, ptr %q) nounwind {
; X86-LABEL: ui64:
; X86:       # %bb.0:
; X86-NEXT:    pushl %ebp
; X86-NEXT:    pushl %ebx
; X86-NEXT:    pushl %edi
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ebx
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ebp
; X86-NEXT:    pushl %ebp
; X86-NEXT:    pushl %ebx
; X86-NEXT:    pushl {{[0-9]+}}(%esp)
; X86-NEXT:    pushl {{[0-9]+}}(%esp)
; X86-NEXT:    calll __udivdi3
; X86-NEXT:    addl $16, %esp
; X86-NEXT:    movl %eax, %esi
; X86-NEXT:    movl %edx, %edi
; X86-NEXT:    pushl %ebp
; X86-NEXT:    pushl %ebx
; X86-NEXT:    pushl {{[0-9]+}}(%esp)
; X86-NEXT:    pushl {{[0-9]+}}(%esp)
; X86-NEXT:    calll __umoddi3
; X86-NEXT:    addl $16, %esp
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl %edi, 4(%ecx)
; X86-NEXT:    movl %esi, (%ecx)
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl %edx, 4(%ecx)
; X86-NEXT:    movl %eax, (%ecx)
; X86-NEXT:    popl %esi
; X86-NEXT:    popl %edi
; X86-NEXT:    popl %ebx
; X86-NEXT:    popl %ebp
; X86-NEXT:    retl
;
; X64-LABEL: ui64:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %r8
; X64-NEXT:    movq %rdi, %rax
; X64-NEXT:    xorl %edx, %edx
; X64-NEXT:    divq %rsi
; X64-NEXT:    movq %rax, (%r8)
; X64-NEXT:    movq %rdx, (%rcx)
; X64-NEXT:    retq
	%r = udiv i64 %x, %y
	%t = urem i64 %x, %y
	store i64 %r, ptr %p
	store i64 %t, ptr %q
	ret void
}

define void @ui32(i32 %x, i32 %y, ptr %p, ptr %q) nounwind {
; X86-LABEL: ui32:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:    divl {{[0-9]+}}(%esp)
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movl %eax, (%esi)
; X86-NEXT:    movl %edx, (%ecx)
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: ui32:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %r8
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    xorl %edx, %edx
; X64-NEXT:    divl %esi
; X64-NEXT:    movl %eax, (%r8)
; X64-NEXT:    movl %edx, (%rcx)
; X64-NEXT:    retq
	%r = udiv i32 %x, %y
	%t = urem i32 %x, %y
	store i32 %r, ptr %p
	store i32 %t, ptr %q
	ret void
}

define void @ui16(i16 %x, i16 %y, ptr %p, ptr %q) nounwind {
; X86-LABEL: ui16:
; X86:       # %bb.0:
; X86-NEXT:    pushl %esi
; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    xorl %edx, %edx
; X86-NEXT:    divw {{[0-9]+}}(%esp)
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
; X86-NEXT:    movw %ax, (%esi)
; X86-NEXT:    movw %dx, (%ecx)
; X86-NEXT:    popl %esi
; X86-NEXT:    retl
;
; X64-LABEL: ui16:
; X64:       # %bb.0:
; X64-NEXT:    movq %rdx, %r8
; X64-NEXT:    movl %edi, %eax
; X64-NEXT:    # kill: def $ax killed $ax killed $eax
; X64-NEXT:    xorl %edx, %edx
; X64-NEXT:    divw %si
; X64-NEXT:    movw %ax, (%r8)
; X64-NEXT:    movw %dx, (%rcx)
; X64-NEXT:    retq
	%r = udiv i16 %x, %y
	%t = urem i16 %x, %y
	store i16 %r, ptr %p
	store i16 %t, ptr %q
	ret void
}

define void @ui8(i8 %x, i8 %y, ptr %p, ptr %q) nounwind {
; X86-LABEL: ui8:
; X86:       # %bb.0:
; X86-NEXT:    pushl %ebx
; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X86-NEXT:    divb {{[0-9]+}}(%esp)
; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X86-NEXT:    movzbl %ah, %ebx
; X86-NEXT:    movb %al, (%edx)
; X86-NEXT:    movb %bl, (%ecx)
; X86-NEXT:    popl %ebx
; X86-NEXT:    retl
;
; X64-LABEL: ui8:
; X64:       # %bb.0:
; X64-NEXT:    movzbl %dil, %eax
; X64-NEXT:    divb %sil
; X64-NEXT:    movzbl %ah, %esi
; X64-NEXT:    movb %al, (%rdx)
; X64-NEXT:    movb %sil, (%rcx)
; X64-NEXT:    retq
	%r = udiv i8 %x, %y
	%t = urem i8 %x, %y
	store i8 %r, ptr %p
	store i8 %t, ptr %q
	ret void
}