llvm/llvm/test/CodeGen/X86/fp-strict-scalar-fptoint.ll

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+sse2 -O3 | FileCheck %s --check-prefixes=SSE-X86
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse2 -O3 | FileCheck %s --check-prefixes=SSE-X64
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx -O3 | FileCheck %s --check-prefixes=AVX-X86,AVX1-X86
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx -O3 | FileCheck %s --check-prefixes=AVX-X64,AVX1-X64
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512f -mattr=+avx512vl -O3 | FileCheck %s --check-prefixes=AVX-X86,AVX512-X86
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f -mattr=+avx512vl -O3 | FileCheck %s --check-prefixes=AVX-X64,AVX512-X64
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=-sse -O3 | FileCheck %s --check-prefixes=X87

declare i1  @llvm.experimental.constrained.fptosi.i1.f32(float, metadata)
declare i8  @llvm.experimental.constrained.fptosi.i8.f32(float, metadata)
declare i16 @llvm.experimental.constrained.fptosi.i16.f32(float, metadata)
declare i32 @llvm.experimental.constrained.fptosi.i32.f32(float, metadata)
declare i64 @llvm.experimental.constrained.fptosi.i64.f32(float, metadata)
declare i1  @llvm.experimental.constrained.fptoui.i1.f32(float, metadata)
declare i8  @llvm.experimental.constrained.fptoui.i8.f32(float, metadata)
declare i16 @llvm.experimental.constrained.fptoui.i16.f32(float, metadata)
declare i32 @llvm.experimental.constrained.fptoui.i32.f32(float, metadata)
declare i64 @llvm.experimental.constrained.fptoui.i64.f32(float, metadata)

declare i1  @llvm.experimental.constrained.fptosi.i1.f64(double, metadata)
declare i8  @llvm.experimental.constrained.fptosi.i8.f64(double, metadata)
declare i16 @llvm.experimental.constrained.fptosi.i16.f64(double, metadata)
declare i32 @llvm.experimental.constrained.fptosi.i32.f64(double, metadata)
declare i64 @llvm.experimental.constrained.fptosi.i64.f64(double, metadata)
declare i1  @llvm.experimental.constrained.fptoui.i1.f64(double, metadata)
declare i8  @llvm.experimental.constrained.fptoui.i8.f64(double, metadata)
declare i16 @llvm.experimental.constrained.fptoui.i16.f64(double, metadata)
declare i32 @llvm.experimental.constrained.fptoui.i32.f64(double, metadata)
declare i64 @llvm.experimental.constrained.fptoui.i64.f64(double, metadata)

define i1 @fptosi_f32toi1(float %x) #0 {
; SSE-X86-LABEL: fptosi_f32toi1:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttss2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptosi_f32toi1:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttss2si %xmm0, %eax
; SSE-X64-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptosi_f32toi1:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttss2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptosi_f32toi1:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttss2si %xmm0, %eax
; AVX-X64-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptosi_f32toi1:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    flds {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistps {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i1 @llvm.experimental.constrained.fptosi.i1.f32(float %x,
                                               metadata !"fpexcept.strict") #0
  ret i1 %result
}

define i8 @fptosi_f32toi8(float %x) #0 {
; SSE-X86-LABEL: fptosi_f32toi8:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttss2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptosi_f32toi8:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttss2si %xmm0, %eax
; SSE-X64-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptosi_f32toi8:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttss2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptosi_f32toi8:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttss2si %xmm0, %eax
; AVX-X64-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptosi_f32toi8:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    flds {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistps {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i8 @llvm.experimental.constrained.fptosi.i8.f32(float %x,
                                               metadata !"fpexcept.strict") #0
  ret i8 %result
}

define i16 @fptosi_f32toi16(float %x) #0 {
; SSE-X86-LABEL: fptosi_f32toi16:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttss2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    # kill: def $ax killed $ax killed $eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptosi_f32toi16:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttss2si %xmm0, %eax
; SSE-X64-NEXT:    # kill: def $ax killed $ax killed $eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptosi_f32toi16:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttss2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    # kill: def $ax killed $ax killed $eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptosi_f32toi16:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttss2si %xmm0, %eax
; AVX-X64-NEXT:    # kill: def $ax killed $ax killed $eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptosi_f32toi16:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    flds {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistps {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i16 @llvm.experimental.constrained.fptosi.i16.f32(float %x,
                                               metadata !"fpexcept.strict") #0
  ret i16 %result
}

define i32 @fptosi_f32toi32(float %x) #0 {
; SSE-X86-LABEL: fptosi_f32toi32:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttss2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptosi_f32toi32:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttss2si %xmm0, %eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptosi_f32toi32:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttss2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptosi_f32toi32:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttss2si %xmm0, %eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptosi_f32toi32:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    flds {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw (%esp)
; X87-NEXT:    movzwl (%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistpl {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw (%esp)
; X87-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i32 @llvm.experimental.constrained.fptosi.i32.f32(float %x,
                                               metadata !"fpexcept.strict") #0
  ret i32 %result
}

define i64 @fptosi_f32toi64(float %x) #0 {
; SSE-X86-LABEL: fptosi_f32toi64:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    pushl %ebp
; SSE-X86-NEXT:    .cfi_def_cfa_offset 8
; SSE-X86-NEXT:    .cfi_offset %ebp, -8
; SSE-X86-NEXT:    movl %esp, %ebp
; SSE-X86-NEXT:    .cfi_def_cfa_register %ebp
; SSE-X86-NEXT:    andl $-8, %esp
; SSE-X86-NEXT:    subl $16, %esp
; SSE-X86-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSE-X86-NEXT:    movss %xmm0, {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    flds {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    wait
; SSE-X86-NEXT:    fnstcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    orl $3072, %eax # imm = 0xC00
; SSE-X86-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fldcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fistpll {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fldcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; SSE-X86-NEXT:    movl %ebp, %esp
; SSE-X86-NEXT:    popl %ebp
; SSE-X86-NEXT:    .cfi_def_cfa %esp, 4
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptosi_f32toi64:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttss2si %xmm0, %rax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptosi_f32toi64:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    pushl %ebp
; AVX-X86-NEXT:    .cfi_def_cfa_offset 8
; AVX-X86-NEXT:    .cfi_offset %ebp, -8
; AVX-X86-NEXT:    movl %esp, %ebp
; AVX-X86-NEXT:    .cfi_def_cfa_register %ebp
; AVX-X86-NEXT:    andl $-8, %esp
; AVX-X86-NEXT:    subl $8, %esp
; AVX-X86-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; AVX-X86-NEXT:    vmovss %xmm0, (%esp)
; AVX-X86-NEXT:    flds (%esp)
; AVX-X86-NEXT:    fisttpll (%esp)
; AVX-X86-NEXT:    wait
; AVX-X86-NEXT:    movl (%esp), %eax
; AVX-X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; AVX-X86-NEXT:    movl %ebp, %esp
; AVX-X86-NEXT:    popl %ebp
; AVX-X86-NEXT:    .cfi_def_cfa %esp, 4
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptosi_f32toi64:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttss2si %xmm0, %rax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptosi_f32toi64:
; X87:       # %bb.0:
; X87-NEXT:    pushl %ebp
; X87-NEXT:    .cfi_def_cfa_offset 8
; X87-NEXT:    .cfi_offset %ebp, -8
; X87-NEXT:    movl %esp, %ebp
; X87-NEXT:    .cfi_def_cfa_register %ebp
; X87-NEXT:    andl $-8, %esp
; X87-NEXT:    subl $16, %esp
; X87-NEXT:    flds 8(%ebp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistpll {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X87-NEXT:    movl %ebp, %esp
; X87-NEXT:    popl %ebp
; X87-NEXT:    .cfi_def_cfa %esp, 4
; X87-NEXT:    retl
  %result = call i64 @llvm.experimental.constrained.fptosi.i64.f32(float %x,
                                               metadata !"fpexcept.strict") #0
  ret i64 %result
}

define i1 @fptoui_f32toi1(float %x) #0 {
; SSE-X86-LABEL: fptoui_f32toi1:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttss2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptoui_f32toi1:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttss2si %xmm0, %eax
; SSE-X64-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptoui_f32toi1:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttss2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptoui_f32toi1:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttss2si %xmm0, %eax
; AVX-X64-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptoui_f32toi1:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    flds {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistps {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i1 @llvm.experimental.constrained.fptoui.i1.f32(float %x,
                                               metadata !"fpexcept.strict") #0
  ret i1 %result
}

define i8 @fptoui_f32toi8(float %x) #0 {
; SSE-X86-LABEL: fptoui_f32toi8:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttss2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptoui_f32toi8:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttss2si %xmm0, %eax
; SSE-X64-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptoui_f32toi8:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttss2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptoui_f32toi8:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttss2si %xmm0, %eax
; AVX-X64-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptoui_f32toi8:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    flds {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistps {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i8 @llvm.experimental.constrained.fptoui.i8.f32(float %x,
                                               metadata !"fpexcept.strict") #0
  ret i8 %result
}

define i16 @fptoui_f32toi16(float %x) #0 {
; SSE-X86-LABEL: fptoui_f32toi16:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttss2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    # kill: def $ax killed $ax killed $eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptoui_f32toi16:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttss2si %xmm0, %eax
; SSE-X64-NEXT:    # kill: def $ax killed $ax killed $eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptoui_f32toi16:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttss2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    # kill: def $ax killed $ax killed $eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptoui_f32toi16:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttss2si %xmm0, %eax
; AVX-X64-NEXT:    # kill: def $ax killed $ax killed $eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptoui_f32toi16:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    flds {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw (%esp)
; X87-NEXT:    movzwl (%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistpl {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw (%esp)
; X87-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    # kill: def $ax killed $ax killed $eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i16 @llvm.experimental.constrained.fptoui.i16.f32(float %x,
                                               metadata !"fpexcept.strict") #0
  ret i16 %result
}

define i32 @fptoui_f32toi32(float %x) #0 {
; SSE-X86-LABEL: fptoui_f32toi32:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSE-X86-NEXT:    movss {{.*#+}} xmm2 = [2.14748365E+9,0.0E+0,0.0E+0,0.0E+0]
; SSE-X86-NEXT:    comiss %xmm0, %xmm2
; SSE-X86-NEXT:    xorps %xmm1, %xmm1
; SSE-X86-NEXT:    ja .LBB8_2
; SSE-X86-NEXT:  # %bb.1:
; SSE-X86-NEXT:    movaps %xmm2, %xmm1
; SSE-X86-NEXT:  .LBB8_2:
; SSE-X86-NEXT:    setbe %al
; SSE-X86-NEXT:    movzbl %al, %ecx
; SSE-X86-NEXT:    shll $31, %ecx
; SSE-X86-NEXT:    subss %xmm1, %xmm0
; SSE-X86-NEXT:    cvttss2si %xmm0, %eax
; SSE-X86-NEXT:    xorl %ecx, %eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptoui_f32toi32:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttss2si %xmm0, %rax
; SSE-X64-NEXT:    # kill: def $eax killed $eax killed $rax
; SSE-X64-NEXT:    retq
;
; AVX1-X86-LABEL: fptoui_f32toi32:
; AVX1-X86:       # %bb.0:
; AVX1-X86-NEXT:    pushl %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa_offset 8
; AVX1-X86-NEXT:    .cfi_offset %ebp, -8
; AVX1-X86-NEXT:    movl %esp, %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa_register %ebp
; AVX1-X86-NEXT:    andl $-8, %esp
; AVX1-X86-NEXT:    subl $8, %esp
; AVX1-X86-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; AVX1-X86-NEXT:    vmovss %xmm0, (%esp)
; AVX1-X86-NEXT:    flds (%esp)
; AVX1-X86-NEXT:    fisttpll (%esp)
; AVX1-X86-NEXT:    wait
; AVX1-X86-NEXT:    movl (%esp), %eax
; AVX1-X86-NEXT:    movl %ebp, %esp
; AVX1-X86-NEXT:    popl %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa %esp, 4
; AVX1-X86-NEXT:    retl
;
; AVX1-X64-LABEL: fptoui_f32toi32:
; AVX1-X64:       # %bb.0:
; AVX1-X64-NEXT:    vcvttss2si %xmm0, %rax
; AVX1-X64-NEXT:    # kill: def $eax killed $eax killed $rax
; AVX1-X64-NEXT:    retq
;
; AVX512-X86-LABEL: fptoui_f32toi32:
; AVX512-X86:       # %bb.0:
; AVX512-X86-NEXT:    vcvttss2usi {{[0-9]+}}(%esp), %eax
; AVX512-X86-NEXT:    retl
;
; AVX512-X64-LABEL: fptoui_f32toi32:
; AVX512-X64:       # %bb.0:
; AVX512-X64-NEXT:    vcvttss2usi %xmm0, %eax
; AVX512-X64-NEXT:    retq
;
; X87-LABEL: fptoui_f32toi32:
; X87:       # %bb.0:
; X87-NEXT:    pushl %ebp
; X87-NEXT:    .cfi_def_cfa_offset 8
; X87-NEXT:    .cfi_offset %ebp, -8
; X87-NEXT:    movl %esp, %ebp
; X87-NEXT:    .cfi_def_cfa_register %ebp
; X87-NEXT:    andl $-8, %esp
; X87-NEXT:    subl $16, %esp
; X87-NEXT:    flds 8(%ebp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistpll {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    movl %ebp, %esp
; X87-NEXT:    popl %ebp
; X87-NEXT:    .cfi_def_cfa %esp, 4
; X87-NEXT:    retl
  %result = call i32 @llvm.experimental.constrained.fptoui.i32.f32(float %x,
                                               metadata !"fpexcept.strict") #0
  ret i32 %result
}

define i64 @fptoui_f32toi64(float %x) #0 {
; SSE-X86-LABEL: fptoui_f32toi64:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    pushl %ebp
; SSE-X86-NEXT:    .cfi_def_cfa_offset 8
; SSE-X86-NEXT:    .cfi_offset %ebp, -8
; SSE-X86-NEXT:    movl %esp, %ebp
; SSE-X86-NEXT:    .cfi_def_cfa_register %ebp
; SSE-X86-NEXT:    andl $-8, %esp
; SSE-X86-NEXT:    subl $16, %esp
; SSE-X86-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; SSE-X86-NEXT:    movss {{.*#+}} xmm1 = [9.22337203E+18,0.0E+0,0.0E+0,0.0E+0]
; SSE-X86-NEXT:    comiss %xmm0, %xmm1
; SSE-X86-NEXT:    jbe .LBB9_2
; SSE-X86-NEXT:  # %bb.1:
; SSE-X86-NEXT:    xorps %xmm1, %xmm1
; SSE-X86-NEXT:  .LBB9_2:
; SSE-X86-NEXT:    subss %xmm1, %xmm0
; SSE-X86-NEXT:    movss %xmm0, {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    setbe %al
; SSE-X86-NEXT:    flds {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    wait
; SSE-X86-NEXT:    fnstcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    movzwl {{[0-9]+}}(%esp), %ecx
; SSE-X86-NEXT:    orl $3072, %ecx # imm = 0xC00
; SSE-X86-NEXT:    movw %cx, {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fldcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fistpll {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fldcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    movzbl %al, %edx
; SSE-X86-NEXT:    shll $31, %edx
; SSE-X86-NEXT:    xorl {{[0-9]+}}(%esp), %edx
; SSE-X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    movl %ebp, %esp
; SSE-X86-NEXT:    popl %ebp
; SSE-X86-NEXT:    .cfi_def_cfa %esp, 4
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptoui_f32toi64:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    movss {{.*#+}} xmm2 = [9.22337203E+18,0.0E+0,0.0E+0,0.0E+0]
; SSE-X64-NEXT:    comiss %xmm2, %xmm0
; SSE-X64-NEXT:    xorps %xmm1, %xmm1
; SSE-X64-NEXT:    jb .LBB9_2
; SSE-X64-NEXT:  # %bb.1:
; SSE-X64-NEXT:    movaps %xmm2, %xmm1
; SSE-X64-NEXT:  .LBB9_2:
; SSE-X64-NEXT:    subss %xmm1, %xmm0
; SSE-X64-NEXT:    cvttss2si %xmm0, %rcx
; SSE-X64-NEXT:    setae %al
; SSE-X64-NEXT:    movzbl %al, %eax
; SSE-X64-NEXT:    shlq $63, %rax
; SSE-X64-NEXT:    xorq %rcx, %rax
; SSE-X64-NEXT:    retq
;
; AVX1-X86-LABEL: fptoui_f32toi64:
; AVX1-X86:       # %bb.0:
; AVX1-X86-NEXT:    pushl %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa_offset 8
; AVX1-X86-NEXT:    .cfi_offset %ebp, -8
; AVX1-X86-NEXT:    movl %esp, %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa_register %ebp
; AVX1-X86-NEXT:    andl $-8, %esp
; AVX1-X86-NEXT:    subl $8, %esp
; AVX1-X86-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; AVX1-X86-NEXT:    vmovss {{.*#+}} xmm1 = [9.22337203E+18,0.0E+0,0.0E+0,0.0E+0]
; AVX1-X86-NEXT:    vcomiss %xmm0, %xmm1
; AVX1-X86-NEXT:    jbe .LBB9_2
; AVX1-X86-NEXT:  # %bb.1:
; AVX1-X86-NEXT:    vxorps %xmm1, %xmm1, %xmm1
; AVX1-X86-NEXT:  .LBB9_2:
; AVX1-X86-NEXT:    vsubss %xmm1, %xmm0, %xmm0
; AVX1-X86-NEXT:    vmovss %xmm0, (%esp)
; AVX1-X86-NEXT:    flds (%esp)
; AVX1-X86-NEXT:    fisttpll (%esp)
; AVX1-X86-NEXT:    wait
; AVX1-X86-NEXT:    setbe %al
; AVX1-X86-NEXT:    movzbl %al, %edx
; AVX1-X86-NEXT:    shll $31, %edx
; AVX1-X86-NEXT:    xorl {{[0-9]+}}(%esp), %edx
; AVX1-X86-NEXT:    movl (%esp), %eax
; AVX1-X86-NEXT:    movl %ebp, %esp
; AVX1-X86-NEXT:    popl %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa %esp, 4
; AVX1-X86-NEXT:    retl
;
; AVX1-X64-LABEL: fptoui_f32toi64:
; AVX1-X64:       # %bb.0:
; AVX1-X64-NEXT:    vmovss {{.*#+}} xmm1 = [9.22337203E+18,0.0E+0,0.0E+0,0.0E+0]
; AVX1-X64-NEXT:    vcomiss %xmm1, %xmm0
; AVX1-X64-NEXT:    vxorps %xmm2, %xmm2, %xmm2
; AVX1-X64-NEXT:    jb .LBB9_2
; AVX1-X64-NEXT:  # %bb.1:
; AVX1-X64-NEXT:    vmovaps %xmm1, %xmm2
; AVX1-X64-NEXT:  .LBB9_2:
; AVX1-X64-NEXT:    vsubss %xmm2, %xmm0, %xmm0
; AVX1-X64-NEXT:    vcvttss2si %xmm0, %rcx
; AVX1-X64-NEXT:    setae %al
; AVX1-X64-NEXT:    movzbl %al, %eax
; AVX1-X64-NEXT:    shlq $63, %rax
; AVX1-X64-NEXT:    xorq %rcx, %rax
; AVX1-X64-NEXT:    retq
;
; AVX512-X86-LABEL: fptoui_f32toi64:
; AVX512-X86:       # %bb.0:
; AVX512-X86-NEXT:    pushl %ebp
; AVX512-X86-NEXT:    .cfi_def_cfa_offset 8
; AVX512-X86-NEXT:    .cfi_offset %ebp, -8
; AVX512-X86-NEXT:    movl %esp, %ebp
; AVX512-X86-NEXT:    .cfi_def_cfa_register %ebp
; AVX512-X86-NEXT:    andl $-8, %esp
; AVX512-X86-NEXT:    subl $8, %esp
; AVX512-X86-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; AVX512-X86-NEXT:    vmovss {{.*#+}} xmm1 = [9.22337203E+18,0.0E+0,0.0E+0,0.0E+0]
; AVX512-X86-NEXT:    xorl %edx, %edx
; AVX512-X86-NEXT:    vcomiss %xmm0, %xmm1
; AVX512-X86-NEXT:    setbe %dl
; AVX512-X86-NEXT:    kmovw %edx, %k1
; AVX512-X86-NEXT:    vmovss %xmm1, %xmm1, %xmm1 {%k1} {z}
; AVX512-X86-NEXT:    vsubss %xmm1, %xmm0, %xmm0
; AVX512-X86-NEXT:    vmovss %xmm0, (%esp)
; AVX512-X86-NEXT:    flds (%esp)
; AVX512-X86-NEXT:    fisttpll (%esp)
; AVX512-X86-NEXT:    wait
; AVX512-X86-NEXT:    shll $31, %edx
; AVX512-X86-NEXT:    xorl {{[0-9]+}}(%esp), %edx
; AVX512-X86-NEXT:    movl (%esp), %eax
; AVX512-X86-NEXT:    movl %ebp, %esp
; AVX512-X86-NEXT:    popl %ebp
; AVX512-X86-NEXT:    .cfi_def_cfa %esp, 4
; AVX512-X86-NEXT:    retl
;
; AVX512-X64-LABEL: fptoui_f32toi64:
; AVX512-X64:       # %bb.0:
; AVX512-X64-NEXT:    vcvttss2usi %xmm0, %rax
; AVX512-X64-NEXT:    retq
;
; X87-LABEL: fptoui_f32toi64:
; X87:       # %bb.0:
; X87-NEXT:    pushl %ebp
; X87-NEXT:    .cfi_def_cfa_offset 8
; X87-NEXT:    .cfi_offset %ebp, -8
; X87-NEXT:    movl %esp, %ebp
; X87-NEXT:    .cfi_def_cfa_register %ebp
; X87-NEXT:    andl $-8, %esp
; X87-NEXT:    subl $16, %esp
; X87-NEXT:    flds 8(%ebp)
; X87-NEXT:    flds {{\.?LCPI[0-9]+_[0-9]+}}
; X87-NEXT:    fcom %st(1)
; X87-NEXT:    wait
; X87-NEXT:    fnstsw %ax
; X87-NEXT:    xorl %edx, %edx
; X87-NEXT:    # kill: def $ah killed $ah killed $ax
; X87-NEXT:    sahf
; X87-NEXT:    setbe %al
; X87-NEXT:    fldz
; X87-NEXT:    jbe .LBB9_2
; X87-NEXT:  # %bb.1:
; X87-NEXT:    fstp %st(1)
; X87-NEXT:    fldz
; X87-NEXT:  .LBB9_2:
; X87-NEXT:    fstp %st(0)
; X87-NEXT:    fsubrp %st, %st(1)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %ecx
; X87-NEXT:    orl $3072, %ecx # imm = 0xC00
; X87-NEXT:    movw %cx, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistpll {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movb %al, %dl
; X87-NEXT:    shll $31, %edx
; X87-NEXT:    xorl {{[0-9]+}}(%esp), %edx
; X87-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    movl %ebp, %esp
; X87-NEXT:    popl %ebp
; X87-NEXT:    .cfi_def_cfa %esp, 4
; X87-NEXT:    retl
  %result = call i64 @llvm.experimental.constrained.fptoui.i64.f32(float %x,
                                               metadata !"fpexcept.strict") #0
  ret i64 %result
}

define i8 @fptosi_f64toi8(double %x) #0 {
; SSE-X86-LABEL: fptosi_f64toi8:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttsd2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptosi_f64toi8:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttsd2si %xmm0, %eax
; SSE-X64-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptosi_f64toi8:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttsd2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptosi_f64toi8:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttsd2si %xmm0, %eax
; AVX-X64-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptosi_f64toi8:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    fldl {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistps {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i8 @llvm.experimental.constrained.fptosi.i8.f64(double %x,
                                               metadata !"fpexcept.strict") #0
  ret i8 %result
}

define i16 @fptosi_f64toi16(double %x) #0 {
; SSE-X86-LABEL: fptosi_f64toi16:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttsd2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    # kill: def $ax killed $ax killed $eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptosi_f64toi16:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttsd2si %xmm0, %eax
; SSE-X64-NEXT:    # kill: def $ax killed $ax killed $eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptosi_f64toi16:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttsd2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    # kill: def $ax killed $ax killed $eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptosi_f64toi16:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttsd2si %xmm0, %eax
; AVX-X64-NEXT:    # kill: def $ax killed $ax killed $eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptosi_f64toi16:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    fldl {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistps {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i16 @llvm.experimental.constrained.fptosi.i16.f64(double %x,
                                               metadata !"fpexcept.strict") #0
  ret i16 %result
}

define i32 @fptosi_f64toi32(double %x) #0 {
; SSE-X86-LABEL: fptosi_f64toi32:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttsd2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptosi_f64toi32:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttsd2si %xmm0, %eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptosi_f64toi32:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttsd2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptosi_f64toi32:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttsd2si %xmm0, %eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptosi_f64toi32:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    fldl {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw (%esp)
; X87-NEXT:    movzwl (%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistpl {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw (%esp)
; X87-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i32 @llvm.experimental.constrained.fptosi.i32.f64(double %x,
                                               metadata !"fpexcept.strict") #0
  ret i32 %result
}

define i64 @fptosi_f64toi64(double %x) #0 {
; SSE-X86-LABEL: fptosi_f64toi64:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    pushl %ebp
; SSE-X86-NEXT:    .cfi_def_cfa_offset 8
; SSE-X86-NEXT:    .cfi_offset %ebp, -8
; SSE-X86-NEXT:    movl %esp, %ebp
; SSE-X86-NEXT:    .cfi_def_cfa_register %ebp
; SSE-X86-NEXT:    andl $-8, %esp
; SSE-X86-NEXT:    subl $16, %esp
; SSE-X86-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
; SSE-X86-NEXT:    movsd %xmm0, {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fldl {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    wait
; SSE-X86-NEXT:    fnstcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    orl $3072, %eax # imm = 0xC00
; SSE-X86-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fldcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fistpll {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fldcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; SSE-X86-NEXT:    movl %ebp, %esp
; SSE-X86-NEXT:    popl %ebp
; SSE-X86-NEXT:    .cfi_def_cfa %esp, 4
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptosi_f64toi64:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttsd2si %xmm0, %rax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptosi_f64toi64:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    pushl %ebp
; AVX-X86-NEXT:    .cfi_def_cfa_offset 8
; AVX-X86-NEXT:    .cfi_offset %ebp, -8
; AVX-X86-NEXT:    movl %esp, %ebp
; AVX-X86-NEXT:    .cfi_def_cfa_register %ebp
; AVX-X86-NEXT:    andl $-8, %esp
; AVX-X86-NEXT:    subl $8, %esp
; AVX-X86-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX-X86-NEXT:    vmovsd %xmm0, (%esp)
; AVX-X86-NEXT:    fldl (%esp)
; AVX-X86-NEXT:    fisttpll (%esp)
; AVX-X86-NEXT:    wait
; AVX-X86-NEXT:    movl (%esp), %eax
; AVX-X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
; AVX-X86-NEXT:    movl %ebp, %esp
; AVX-X86-NEXT:    popl %ebp
; AVX-X86-NEXT:    .cfi_def_cfa %esp, 4
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptosi_f64toi64:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttsd2si %xmm0, %rax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptosi_f64toi64:
; X87:       # %bb.0:
; X87-NEXT:    pushl %ebp
; X87-NEXT:    .cfi_def_cfa_offset 8
; X87-NEXT:    .cfi_offset %ebp, -8
; X87-NEXT:    movl %esp, %ebp
; X87-NEXT:    .cfi_def_cfa_register %ebp
; X87-NEXT:    andl $-8, %esp
; X87-NEXT:    subl $16, %esp
; X87-NEXT:    fldl 8(%ebp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistpll {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    movl {{[0-9]+}}(%esp), %edx
; X87-NEXT:    movl %ebp, %esp
; X87-NEXT:    popl %ebp
; X87-NEXT:    .cfi_def_cfa %esp, 4
; X87-NEXT:    retl
  %result = call i64 @llvm.experimental.constrained.fptosi.i64.f64(double %x,
                                               metadata !"fpexcept.strict") #0
  ret i64 %result
}

define i1 @fptoui_f64toi1(double %x) #0 {
; SSE-X86-LABEL: fptoui_f64toi1:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttsd2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptoui_f64toi1:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttsd2si %xmm0, %eax
; SSE-X64-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptoui_f64toi1:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttsd2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptoui_f64toi1:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttsd2si %xmm0, %eax
; AVX-X64-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptoui_f64toi1:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    fldl {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistps {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i1 @llvm.experimental.constrained.fptoui.i1.f64(double %x,
                                               metadata !"fpexcept.strict") #0
  ret i1 %result
}

define i8 @fptoui_f64toi8(double %x) #0 {
; SSE-X86-LABEL: fptoui_f64toi8:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttsd2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptoui_f64toi8:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttsd2si %xmm0, %eax
; SSE-X64-NEXT:    # kill: def $al killed $al killed $eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptoui_f64toi8:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttsd2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptoui_f64toi8:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttsd2si %xmm0, %eax
; AVX-X64-NEXT:    # kill: def $al killed $al killed $eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptoui_f64toi8:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    fldl {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistps {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i8 @llvm.experimental.constrained.fptoui.i8.f64(double %x,
                                               metadata !"fpexcept.strict") #0
  ret i8 %result
}

define i16 @fptoui_f64toi16(double %x) #0 {
; SSE-X86-LABEL: fptoui_f64toi16:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    cvttsd2si {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    # kill: def $ax killed $ax killed $eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptoui_f64toi16:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttsd2si %xmm0, %eax
; SSE-X64-NEXT:    # kill: def $ax killed $ax killed $eax
; SSE-X64-NEXT:    retq
;
; AVX-X86-LABEL: fptoui_f64toi16:
; AVX-X86:       # %bb.0:
; AVX-X86-NEXT:    vcvttsd2si {{[0-9]+}}(%esp), %eax
; AVX-X86-NEXT:    # kill: def $ax killed $ax killed $eax
; AVX-X86-NEXT:    retl
;
; AVX-X64-LABEL: fptoui_f64toi16:
; AVX-X64:       # %bb.0:
; AVX-X64-NEXT:    vcvttsd2si %xmm0, %eax
; AVX-X64-NEXT:    # kill: def $ax killed $ax killed $eax
; AVX-X64-NEXT:    retq
;
; X87-LABEL: fptoui_f64toi16:
; X87:       # %bb.0:
; X87-NEXT:    subl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 12
; X87-NEXT:    fldl {{[0-9]+}}(%esp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw (%esp)
; X87-NEXT:    movzwl (%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistpl {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw (%esp)
; X87-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    # kill: def $ax killed $ax killed $eax
; X87-NEXT:    addl $8, %esp
; X87-NEXT:    .cfi_def_cfa_offset 4
; X87-NEXT:    retl
  %result = call i16 @llvm.experimental.constrained.fptoui.i16.f64(double %x,
                                               metadata !"fpexcept.strict") #0
  ret i16 %result
}

define i32 @fptoui_f64toi32(double %x) #0 {
; SSE-X86-LABEL: fptoui_f64toi32:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
; SSE-X86-NEXT:    movsd {{.*#+}} xmm2 = [2.147483648E+9,0.0E+0]
; SSE-X86-NEXT:    comisd %xmm0, %xmm2
; SSE-X86-NEXT:    xorpd %xmm1, %xmm1
; SSE-X86-NEXT:    ja .LBB17_2
; SSE-X86-NEXT:  # %bb.1:
; SSE-X86-NEXT:    movapd %xmm2, %xmm1
; SSE-X86-NEXT:  .LBB17_2:
; SSE-X86-NEXT:    setbe %al
; SSE-X86-NEXT:    movzbl %al, %ecx
; SSE-X86-NEXT:    shll $31, %ecx
; SSE-X86-NEXT:    subsd %xmm1, %xmm0
; SSE-X86-NEXT:    cvttsd2si %xmm0, %eax
; SSE-X86-NEXT:    xorl %ecx, %eax
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptoui_f64toi32:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    cvttsd2si %xmm0, %rax
; SSE-X64-NEXT:    # kill: def $eax killed $eax killed $rax
; SSE-X64-NEXT:    retq
;
; AVX1-X86-LABEL: fptoui_f64toi32:
; AVX1-X86:       # %bb.0:
; AVX1-X86-NEXT:    pushl %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa_offset 8
; AVX1-X86-NEXT:    .cfi_offset %ebp, -8
; AVX1-X86-NEXT:    movl %esp, %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa_register %ebp
; AVX1-X86-NEXT:    andl $-8, %esp
; AVX1-X86-NEXT:    subl $8, %esp
; AVX1-X86-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX1-X86-NEXT:    vmovsd %xmm0, (%esp)
; AVX1-X86-NEXT:    fldl (%esp)
; AVX1-X86-NEXT:    fisttpll (%esp)
; AVX1-X86-NEXT:    wait
; AVX1-X86-NEXT:    movl (%esp), %eax
; AVX1-X86-NEXT:    movl %ebp, %esp
; AVX1-X86-NEXT:    popl %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa %esp, 4
; AVX1-X86-NEXT:    retl
;
; AVX1-X64-LABEL: fptoui_f64toi32:
; AVX1-X64:       # %bb.0:
; AVX1-X64-NEXT:    vcvttsd2si %xmm0, %rax
; AVX1-X64-NEXT:    # kill: def $eax killed $eax killed $rax
; AVX1-X64-NEXT:    retq
;
; AVX512-X86-LABEL: fptoui_f64toi32:
; AVX512-X86:       # %bb.0:
; AVX512-X86-NEXT:    vcvttsd2usi {{[0-9]+}}(%esp), %eax
; AVX512-X86-NEXT:    retl
;
; AVX512-X64-LABEL: fptoui_f64toi32:
; AVX512-X64:       # %bb.0:
; AVX512-X64-NEXT:    vcvttsd2usi %xmm0, %eax
; AVX512-X64-NEXT:    retq
;
; X87-LABEL: fptoui_f64toi32:
; X87:       # %bb.0:
; X87-NEXT:    pushl %ebp
; X87-NEXT:    .cfi_def_cfa_offset 8
; X87-NEXT:    .cfi_offset %ebp, -8
; X87-NEXT:    movl %esp, %ebp
; X87-NEXT:    .cfi_def_cfa_register %ebp
; X87-NEXT:    andl $-8, %esp
; X87-NEXT:    subl $16, %esp
; X87-NEXT:    fldl 8(%ebp)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    orl $3072, %eax # imm = 0xC00
; X87-NEXT:    movw %ax, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistpll {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    movl %ebp, %esp
; X87-NEXT:    popl %ebp
; X87-NEXT:    .cfi_def_cfa %esp, 4
; X87-NEXT:    retl
  %result = call i32 @llvm.experimental.constrained.fptoui.i32.f64(double %x,
                                               metadata !"fpexcept.strict") #0
  ret i32 %result
}

define i64 @fptoui_f64toi64(double %x) #0 {
; SSE-X86-LABEL: fptoui_f64toi64:
; SSE-X86:       # %bb.0:
; SSE-X86-NEXT:    pushl %ebp
; SSE-X86-NEXT:    .cfi_def_cfa_offset 8
; SSE-X86-NEXT:    .cfi_offset %ebp, -8
; SSE-X86-NEXT:    movl %esp, %ebp
; SSE-X86-NEXT:    .cfi_def_cfa_register %ebp
; SSE-X86-NEXT:    andl $-8, %esp
; SSE-X86-NEXT:    subl $16, %esp
; SSE-X86-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
; SSE-X86-NEXT:    movsd {{.*#+}} xmm1 = [9.2233720368547758E+18,0.0E+0]
; SSE-X86-NEXT:    comisd %xmm0, %xmm1
; SSE-X86-NEXT:    jbe .LBB18_2
; SSE-X86-NEXT:  # %bb.1:
; SSE-X86-NEXT:    xorpd %xmm1, %xmm1
; SSE-X86-NEXT:  .LBB18_2:
; SSE-X86-NEXT:    subsd %xmm1, %xmm0
; SSE-X86-NEXT:    movsd %xmm0, {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    setbe %al
; SSE-X86-NEXT:    fldl {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    wait
; SSE-X86-NEXT:    fnstcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    movzwl {{[0-9]+}}(%esp), %ecx
; SSE-X86-NEXT:    orl $3072, %ecx # imm = 0xC00
; SSE-X86-NEXT:    movw %cx, {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fldcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fistpll {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    fldcw {{[0-9]+}}(%esp)
; SSE-X86-NEXT:    movzbl %al, %edx
; SSE-X86-NEXT:    shll $31, %edx
; SSE-X86-NEXT:    xorl {{[0-9]+}}(%esp), %edx
; SSE-X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
; SSE-X86-NEXT:    movl %ebp, %esp
; SSE-X86-NEXT:    popl %ebp
; SSE-X86-NEXT:    .cfi_def_cfa %esp, 4
; SSE-X86-NEXT:    retl
;
; SSE-X64-LABEL: fptoui_f64toi64:
; SSE-X64:       # %bb.0:
; SSE-X64-NEXT:    movsd {{.*#+}} xmm2 = [9.2233720368547758E+18,0.0E+0]
; SSE-X64-NEXT:    comisd %xmm2, %xmm0
; SSE-X64-NEXT:    xorpd %xmm1, %xmm1
; SSE-X64-NEXT:    jb .LBB18_2
; SSE-X64-NEXT:  # %bb.1:
; SSE-X64-NEXT:    movapd %xmm2, %xmm1
; SSE-X64-NEXT:  .LBB18_2:
; SSE-X64-NEXT:    subsd %xmm1, %xmm0
; SSE-X64-NEXT:    cvttsd2si %xmm0, %rcx
; SSE-X64-NEXT:    setae %al
; SSE-X64-NEXT:    movzbl %al, %eax
; SSE-X64-NEXT:    shlq $63, %rax
; SSE-X64-NEXT:    xorq %rcx, %rax
; SSE-X64-NEXT:    retq
;
; AVX1-X86-LABEL: fptoui_f64toi64:
; AVX1-X86:       # %bb.0:
; AVX1-X86-NEXT:    pushl %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa_offset 8
; AVX1-X86-NEXT:    .cfi_offset %ebp, -8
; AVX1-X86-NEXT:    movl %esp, %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa_register %ebp
; AVX1-X86-NEXT:    andl $-8, %esp
; AVX1-X86-NEXT:    subl $8, %esp
; AVX1-X86-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX1-X86-NEXT:    vmovsd {{.*#+}} xmm1 = [9.2233720368547758E+18,0.0E+0]
; AVX1-X86-NEXT:    vcomisd %xmm0, %xmm1
; AVX1-X86-NEXT:    jbe .LBB18_2
; AVX1-X86-NEXT:  # %bb.1:
; AVX1-X86-NEXT:    vxorpd %xmm1, %xmm1, %xmm1
; AVX1-X86-NEXT:  .LBB18_2:
; AVX1-X86-NEXT:    vsubsd %xmm1, %xmm0, %xmm0
; AVX1-X86-NEXT:    vmovsd %xmm0, (%esp)
; AVX1-X86-NEXT:    fldl (%esp)
; AVX1-X86-NEXT:    fisttpll (%esp)
; AVX1-X86-NEXT:    wait
; AVX1-X86-NEXT:    setbe %al
; AVX1-X86-NEXT:    movzbl %al, %edx
; AVX1-X86-NEXT:    shll $31, %edx
; AVX1-X86-NEXT:    xorl {{[0-9]+}}(%esp), %edx
; AVX1-X86-NEXT:    movl (%esp), %eax
; AVX1-X86-NEXT:    movl %ebp, %esp
; AVX1-X86-NEXT:    popl %ebp
; AVX1-X86-NEXT:    .cfi_def_cfa %esp, 4
; AVX1-X86-NEXT:    retl
;
; AVX1-X64-LABEL: fptoui_f64toi64:
; AVX1-X64:       # %bb.0:
; AVX1-X64-NEXT:    vmovsd {{.*#+}} xmm1 = [9.2233720368547758E+18,0.0E+0]
; AVX1-X64-NEXT:    vcomisd %xmm1, %xmm0
; AVX1-X64-NEXT:    vxorpd %xmm2, %xmm2, %xmm2
; AVX1-X64-NEXT:    jb .LBB18_2
; AVX1-X64-NEXT:  # %bb.1:
; AVX1-X64-NEXT:    vmovapd %xmm1, %xmm2
; AVX1-X64-NEXT:  .LBB18_2:
; AVX1-X64-NEXT:    vsubsd %xmm2, %xmm0, %xmm0
; AVX1-X64-NEXT:    vcvttsd2si %xmm0, %rcx
; AVX1-X64-NEXT:    setae %al
; AVX1-X64-NEXT:    movzbl %al, %eax
; AVX1-X64-NEXT:    shlq $63, %rax
; AVX1-X64-NEXT:    xorq %rcx, %rax
; AVX1-X64-NEXT:    retq
;
; AVX512-X86-LABEL: fptoui_f64toi64:
; AVX512-X86:       # %bb.0:
; AVX512-X86-NEXT:    pushl %ebp
; AVX512-X86-NEXT:    .cfi_def_cfa_offset 8
; AVX512-X86-NEXT:    .cfi_offset %ebp, -8
; AVX512-X86-NEXT:    movl %esp, %ebp
; AVX512-X86-NEXT:    .cfi_def_cfa_register %ebp
; AVX512-X86-NEXT:    andl $-8, %esp
; AVX512-X86-NEXT:    subl $8, %esp
; AVX512-X86-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX512-X86-NEXT:    vmovsd {{.*#+}} xmm1 = [9.2233720368547758E+18,0.0E+0]
; AVX512-X86-NEXT:    xorl %edx, %edx
; AVX512-X86-NEXT:    vcomisd %xmm0, %xmm1
; AVX512-X86-NEXT:    setbe %dl
; AVX512-X86-NEXT:    kmovw %edx, %k1
; AVX512-X86-NEXT:    vmovsd %xmm1, %xmm1, %xmm1 {%k1} {z}
; AVX512-X86-NEXT:    vsubsd %xmm1, %xmm0, %xmm0
; AVX512-X86-NEXT:    vmovsd %xmm0, (%esp)
; AVX512-X86-NEXT:    fldl (%esp)
; AVX512-X86-NEXT:    fisttpll (%esp)
; AVX512-X86-NEXT:    wait
; AVX512-X86-NEXT:    shll $31, %edx
; AVX512-X86-NEXT:    xorl {{[0-9]+}}(%esp), %edx
; AVX512-X86-NEXT:    movl (%esp), %eax
; AVX512-X86-NEXT:    movl %ebp, %esp
; AVX512-X86-NEXT:    popl %ebp
; AVX512-X86-NEXT:    .cfi_def_cfa %esp, 4
; AVX512-X86-NEXT:    retl
;
; AVX512-X64-LABEL: fptoui_f64toi64:
; AVX512-X64:       # %bb.0:
; AVX512-X64-NEXT:    vcvttsd2usi %xmm0, %rax
; AVX512-X64-NEXT:    retq
;
; X87-LABEL: fptoui_f64toi64:
; X87:       # %bb.0:
; X87-NEXT:    pushl %ebp
; X87-NEXT:    .cfi_def_cfa_offset 8
; X87-NEXT:    .cfi_offset %ebp, -8
; X87-NEXT:    movl %esp, %ebp
; X87-NEXT:    .cfi_def_cfa_register %ebp
; X87-NEXT:    andl $-8, %esp
; X87-NEXT:    subl $16, %esp
; X87-NEXT:    fldl 8(%ebp)
; X87-NEXT:    flds {{\.?LCPI[0-9]+_[0-9]+}}
; X87-NEXT:    fcom %st(1)
; X87-NEXT:    wait
; X87-NEXT:    fnstsw %ax
; X87-NEXT:    xorl %edx, %edx
; X87-NEXT:    # kill: def $ah killed $ah killed $ax
; X87-NEXT:    sahf
; X87-NEXT:    setbe %al
; X87-NEXT:    fldz
; X87-NEXT:    jbe .LBB18_2
; X87-NEXT:  # %bb.1:
; X87-NEXT:    fstp %st(1)
; X87-NEXT:    fldz
; X87-NEXT:  .LBB18_2:
; X87-NEXT:    fstp %st(0)
; X87-NEXT:    fsubrp %st, %st(1)
; X87-NEXT:    wait
; X87-NEXT:    fnstcw {{[0-9]+}}(%esp)
; X87-NEXT:    movzwl {{[0-9]+}}(%esp), %ecx
; X87-NEXT:    orl $3072, %ecx # imm = 0xC00
; X87-NEXT:    movw %cx, {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    fistpll {{[0-9]+}}(%esp)
; X87-NEXT:    fldcw {{[0-9]+}}(%esp)
; X87-NEXT:    movb %al, %dl
; X87-NEXT:    shll $31, %edx
; X87-NEXT:    xorl {{[0-9]+}}(%esp), %edx
; X87-NEXT:    movl {{[0-9]+}}(%esp), %eax
; X87-NEXT:    movl %ebp, %esp
; X87-NEXT:    popl %ebp
; X87-NEXT:    .cfi_def_cfa %esp, 4
; X87-NEXT:    retl
  %result = call i64 @llvm.experimental.constrained.fptoui.i64.f64(double %x,
                                               metadata !"fpexcept.strict") #0
  ret i64 %result
}

attributes #0 = { strictfp }