; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -verify-machineinstrs -mtriple=x86_64-unknown-unknown --show-mc-encoding -mattr=+avx10.2-512 | FileCheck %s --check-prefixes=CHECK,X64
; RUN: llc < %s -verify-machineinstrs -mtriple=i686-unknown-unknown --show-mc-encoding -mattr=+avx10.2-512 | FileCheck %s --check-prefixes=CHECK,X86
define <32 x half> @test_int_x86_avx10_vcvt2ps2phx512(<16 x float> %A, <16 x float> %B) {
; CHECK-LABEL: test_int_x86_avx10_vcvt2ps2phx512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvt2ps2phx %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x48,0x67,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512(<16 x float> %A, <16 x float> %B, <32 x half> zeroinitializer, i32 -1, i32 4)
ret <32 x half> %ret
}
define <32 x half> @test_int_x86_avx10_vcvt2ps2phx512_mask(<32 x half> %W, i32 %U, <16 x float> %A, <16 x float> %B) {
; X64-LABEL: test_int_x86_avx10_vcvt2ps2phx512_mask:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvt2ps2phx %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x75,0x49,0x67,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvt2ps2phx512_mask:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvt2ps2phx %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x75,0x49,0x67,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512(<16 x float> %A, <16 x float> %B, <32 x half> %W, i32 %U, i32 4)
ret <32 x half> %ret
}
define <32 x half> @test_int_x86_avx10_vcvt2ps2phx512_maskz(i32 %U, <16 x float> %A, <16 x float> %B) {
; X64-LABEL: test_int_x86_avx10_vcvt2ps2phx512_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvt2ps2phx %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xc9,0x67,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvt2ps2phx512_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvt2ps2phx %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xc9,0x67,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512(<16 x float> %A, <16 x float> %B, <32 x half> zeroinitializer, i32 %U, i32 4)
ret <32 x half> %ret
}
define <32 x half> @test_int_x86_avx10_vcvt2ps2phx512_round(<16 x float> %A, <16 x float> %B) {
; CHECK-LABEL: test_int_x86_avx10_vcvt2ps2phx512_round:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvt2ps2phx {rz-sae}, %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7d,0x78,0x67,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512(<16 x float> %A, <16 x float> %B, <32 x half> zeroinitializer, i32 -1, i32 11)
ret <32 x half> %ret
}
define <32 x half> @test_int_x86_avx10_vcvt2ps2phx512_round_mask(<32 x half> %W, i32 %U, <16 x float> %A, <16 x float> %B) {
; X64-LABEL: test_int_x86_avx10_vcvt2ps2phx512_round_mask:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvt2ps2phx {rz-sae}, %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x75,0x79,0x67,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvt2ps2phx512_round_mask:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvt2ps2phx {rz-sae}, %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x75,0x79,0x67,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512(<16 x float> %A, <16 x float> %B, <32 x half> %W, i32 %U, i32 11)
ret <32 x half> %ret
}
define <32 x half> @test_int_x86_avx10_vcvt2ps2phx512_round_maskz(i32 %U, <16 x float> %A, <16 x float> %B) {
; X64-LABEL: test_int_x86_avx10_vcvt2ps2phx512_round_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvt2ps2phx {rz-sae}, %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xf9,0x67,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvt2ps2phx512_round_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvt2ps2phx {rz-sae}, %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7d,0xf9,0x67,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx.512(<16 x float> %A, <16 x float> %B, <32 x half> zeroinitializer, i32 %U, i32 11)
ret <32 x half> %ret
}
declare <32 x half> @llvm.x86.avx10.mask.vcvt2ps2phx512(<16 x float>, <16 x float>, i32, i32)
define <32 x i8> @test_int_x86_avx10_vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2bf8512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtbiasph2bf8 %zmm1, %zmm0, %ymm0 # encoding: [0x62,0xf2,0x7c,0x48,0x74,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_mask_vcvtbiasph2bf8512(<32 x i8> %W, i32 %U, <64 x i8> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtbiasph2bf8 %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x74,0x49,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtbiasph2bf8 %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x74,0x49,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
ret <32 x i8> %ret
}
declare <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
define <32 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B, i32 %U) nounwind {
; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtbiasph2bf8 %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7c,0xc9,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtbiasph2bf8 %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7c,0xc9,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> zeroinitializer, i32 %U)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2bf8s512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtbiasph2bf8s %zmm1, %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x48,0x74,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_mask_vcvtbiasph2bf8s512(<32 x i8> %W, i32 %U, <64 x i8> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtbiasph2bf8s %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2bf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtbiasph2bf8s %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
ret <32 x i8> %ret
}
declare <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
define <32 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B, i32 %U) nounwind {
; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtbiasph2bf8s %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2bf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtbiasph2bf8s %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2bf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> zeroinitializer, i32 %U)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2hf8512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtbiasph2hf8 %zmm1, %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x48,0x18,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_mask_vcvtbiasph2hf8512(<32 x i8> %W, i32 %U, <64 x i8> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtbiasph2hf8 %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x18,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtbiasph2hf8 %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x18,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
ret <32 x i8> %ret
}
declare <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
define <32 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B, i32 %U) nounwind {
; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtbiasph2hf8 %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x18,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtbiasph2hf8 %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x18,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8512(<64 x i8> %A, <32 x half> %B, <32 x i8> zeroinitializer, i32 %U)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtbiasph2hf8s512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtbiasph2hf8s %zmm1, %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x48,0x1b,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_mask_vcvtbiasph2hf8s512(<32 x i8> %W, i32 %U, <64 x i8> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtbiasph2hf8s %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x1b,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_mask_vcvtbiasph2hf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtbiasph2hf8s %zmm2, %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x74,0x49,0x1b,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
ret <32 x i8> %ret
}
declare <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> %W, i32 %U)
define <32 x i8> @test_int_x86_avx10_maskz_vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B, i32 %U) nounwind {
; X64-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtbiasph2hf8s %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x1b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_maskz_vcvtbiasph2hf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtbiasph2hf8s %zmm1, %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xc9,0x1b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtbiasph2hf8s512(<64 x i8> %A, <32 x half> %B, <32 x i8> zeroinitializer, i32 %U)
ret <32 x i8> %ret
}
define <64 x i8> @test_int_x86_avx10_vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtne2ph2bf8 %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf2,0x7f,0x48,0x74,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B)
ret <64 x i8> %ret
}
define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512_mask:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
; X64-NEXT: vcvtne2ph2bf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x49,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512_mask:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtne2ph2bf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf2,0x77,0x49,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
%1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B)
%2 = bitcast <8 x i64> %C to <64 x i8>
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
; X64-NEXT: vcvtne2ph2bf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7f,0xc9,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8512_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtne2ph2bf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf2,0x7f,0xc9,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B)
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> zeroinitializer
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8512(<32 x half> %A, <32 x half> %B)
define <64 x i8> @test_int_x86_avx10_vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtne2ph2bf8s %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x74,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B)
ret <64 x i8> %ret
}
declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B)
define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8s512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512_mask:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
; X64-NEXT: vcvtne2ph2bf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x74,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512_mask:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtne2ph2bf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x74,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
%1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B)
%2 = bitcast <8 x i64> %C to <64 x i8>
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
define <8 x i64> @test_int_x86_avx10_vcvtne2ph2bf8s512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
; X64-NEXT: vcvtne2ph2bf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvtne2ph2bf8s512_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtne2ph2bf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2bf8s512(<32 x half> %A, <32 x half> %B)
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> zeroinitializer
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
define <64 x i8> @test_int_x86_avx10_vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtne2ph2hf8 %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x18,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B)
ret <64 x i8> %ret
}
define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512_mask:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
; X64-NEXT: vcvtne2ph2hf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x18,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512_mask:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtne2ph2hf8 %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x18,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
%1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B)
%2 = bitcast <8 x i64> %C to <64 x i8>
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
; X64-NEXT: vcvtne2ph2hf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x18,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8512_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtne2ph2hf8 %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x18,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B)
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> zeroinitializer
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8512(<32 x half> %A, <32 x half> %B)
define <64 x i8> @test_int_x86_avx10_vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtne2ph2hf8s %zmm1, %zmm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x1b,0xc1]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B)
ret <64 x i8> %ret
}
define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8s512_mask(<8 x i64> %C, i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512_mask:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
; X64-NEXT: vcvtne2ph2hf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x1b,0xc2]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512_mask:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtne2ph2hf8s %zmm2, %zmm1, %zmm0 {%k1} # encoding: [0x62,0xf5,0x77,0x49,0x1b,0xc2]
; X86-NEXT: retl # encoding: [0xc3]
%1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B)
%2 = bitcast <8 x i64> %C to <64 x i8>
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> %2
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
define <8 x i64> @test_int_x86_avx10_vcvtne2ph2hf8s512_maskz(i64 %U, <32 x half> %A, <32 x half> %B) nounwind {
; X64-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512_maskz:
; X64: # %bb.0:
; X64-NEXT: kmovq %rdi, %k1 # encoding: [0xc4,0xe1,0xfb,0x92,0xcf]
; X64-NEXT: vcvtne2ph2hf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x1b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_vcvtne2ph2hf8s512_maskz:
; X86: # %bb.0:
; X86-NEXT: kmovq {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtne2ph2hf8s %zmm1, %zmm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x1b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%1 = call <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B)
%3 = bitcast i64 %U to <64 x i1>
%4 = select <64 x i1> %3, <64 x i8> %1, <64 x i8> zeroinitializer
%5 = bitcast <64 x i8> %4 to <8 x i64>
ret <8 x i64> %5
}
declare <64 x i8> @llvm.x86.avx10.vcvtne2ph2hf8s512(<32 x half> %A, <32 x half> %B)
define <32 x half> @test_int_x86_avx10_vcvthf82ph512(<32 x i8> %A) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvthf82ph512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvthf82ph %ymm0, %zmm0 # encoding: [0x62,0xf5,0x7f,0x48,0x1e,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(<32 x i8> %A, <32 x half> undef, i32 -1)
ret <32 x half> %ret
}
define <32 x half> @test_int_x86_avx10_mask_vcvthf82ph512(<32 x i8> %A, <32 x half> %B, i32 %C) nounwind {
; X64-LABEL: test_int_x86_avx10_mask_vcvthf82ph512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvthf82ph %ymm0, %zmm1 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x1e,0xc8]
; X64-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_mask_vcvthf82ph512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvthf82ph %ymm0, %zmm1 {%k1} # encoding: [0x62,0xf5,0x7f,0x49,0x1e,0xc8]
; X86-NEXT: vmovdqa64 %zmm1, %zmm0 # encoding: [0x62,0xf1,0xfd,0x48,0x6f,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(<32 x i8> %A, <32 x half> %B, i32 %C)
ret <32 x half> %ret
}
declare <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(<32 x i8> %A, <32 x half> %B, i32 %C)
define <32 x half> @test_int_x86_avx10_maskz_vcvthf82ph512(<32 x i8> %A, i32 %B) nounwind {
; X64-LABEL: test_int_x86_avx10_maskz_vcvthf82ph512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvthf82ph %ymm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x1e,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_maskz_vcvthf82ph512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvthf82ph %ymm0, %zmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xc9,0x1e,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x half> @llvm.x86.avx10.mask.vcvthf82ph512(<32 x i8> %A, <32 x half> zeroinitializer, i32 %B)
ret <32 x half> %ret
}
define <32 x i8> @test_int_x86_avx10_vcvtneph2bf8512(<32 x half> %A) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtneph2bf8 %zmm0, %ymm0 # encoding: [0x62,0xf2,0x7e,0x48,0x74,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtneph2bf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtneph2bf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf2,0x7e,0x49,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
ret <32 x i8> %ret
}
declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8512(<32 x half> %A, i32 %B) nounwind {
; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtneph2bf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x74,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtneph2bf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf2,0x7e,0xc9,0x74,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_vcvtneph2bf8s512(<32 x half> %A) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtneph2bf8s512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtneph2bf8s %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x74,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2bf8s512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtneph2bf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x74,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2bf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtneph2bf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x74,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
ret <32 x i8> %ret
}
declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2bf8s512(<32 x half> %A, i32 %B) nounwind {
; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtneph2bf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x74,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2bf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtneph2bf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x74,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2bf8s512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_vcvtneph2hf8512(<32 x half> %A) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtneph2hf8 %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x18,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtneph2hf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x18,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtneph2hf8 %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x18,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
ret <32 x i8> %ret
}
declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> %B, i32 %C)
define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8512(<32 x half> %A, i32 %B) nounwind {
; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtneph2hf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x18,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtneph2hf8 %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x18,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_vcvtneph2hf8s512(<32 x half> %A) nounwind {
; CHECK-LABEL: test_int_x86_avx10_vcvtneph2hf8s512:
; CHECK: # %bb.0:
; CHECK-NEXT: vcvtneph2hf8s %zmm0, %ymm0 # encoding: [0x62,0xf5,0x7e,0x48,0x1b,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> undef, i32 -1)
ret <32 x i8> %ret
}
define <32 x i8> @test_int_x86_avx10_mask_vcvtneph2hf8s512(<32 x i8> %B, <32 x half> %A, i32 %C) nounwind {
; X64-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtneph2hf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x1b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_mask_vcvtneph2hf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtneph2hf8s %zmm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7e,0x49,0x1b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
ret <32 x i8> %ret
}
declare <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> %B, i32 %C)
define <32 x i8> @test_int_x86_avx10_maskz_vcvtneph2hf8s512(<32 x half> %A, i32 %B) nounwind {
; X64-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s512:
; X64: # %bb.0:
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtneph2hf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x1b,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_int_x86_avx10_maskz_vcvtneph2hf8s512:
; X86: # %bb.0:
; X86-NEXT: kmovd {{[0-9]+}}(%esp), %k1 # encoding: [0xc4,0xe1,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtneph2hf8s %zmm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7e,0xc9,0x1b,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
%ret = call <32 x i8> @llvm.x86.avx10.mask.vcvtneph2hf8s512(<32 x half> %A, <32 x i8> zeroinitializer, i32 %B)
ret <32 x i8> %ret
}