; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -verify-machineinstrs -mtriple=x86_64 --show-mc-encoding -mattr=+avx10.2-256 | FileCheck %s --check-prefixes=CHECK,X64
; RUN: llc < %s -verify-machineinstrs -mtriple=i686 --show-mc-encoding -mattr=+avx10.2-256 | FileCheck %s --check-prefixes=CHECK,X86
define dso_local <2 x i64> @test_mm_ipcvtnebf16_epi8(<8 x bfloat> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvtnebf16_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtnebf162ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x69,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162ibs128(<8 x bfloat> %__A)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvtnebf16_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvtnebf16_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtnebf162ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x69,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvtnebf16_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtnebf162ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x69,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <8 x i16>
%1 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162ibs128(<8 x bfloat> %__B)
%2 = bitcast i8 %__A to <8 x i1>
%3 = select <8 x i1> %2, <8 x i16> %1, <8 x i16> %0
%4 = bitcast <8 x i16> %3 to <2 x i64>
ret <2 x i64> %4
}
declare <8 x i16> @llvm.x86.avx10.vcvtnebf162ibs128(<8 x bfloat>)
define dso_local <2 x i64> @test_mm_maskz_ipcvtnebf16_epi8(i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvtnebf16_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtnebf162ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x69,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvtnebf16_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtnebf162ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x69,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162ibs128(<8 x bfloat> %__B)
%1 = bitcast i8 %__A to <8 x i1>
%2 = select <8 x i1> %1, <8 x i16> %0, <8 x i16> zeroinitializer
%3 = bitcast <8 x i16> %2 to <2 x i64>
ret <2 x i64> %3
}
define dso_local <4 x i64> @test_mm256_ipcvtnebf16_epi8(<16 x bfloat> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvtnebf16_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtnebf162ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x69,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162ibs256(<16 x bfloat> %__A)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvtnebf16_epi8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvtnebf16_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtnebf162ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x69,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvtnebf16_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtnebf162ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x69,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162ibs256(<16 x bfloat> %__B)
%2 = bitcast i16 %__A to <16 x i1>
%3 = select <16 x i1> %2, <16 x i16> %1, <16 x i16> %0
%4 = bitcast <16 x i16> %3 to <4 x i64>
ret <4 x i64> %4
}
declare <16 x i16> @llvm.x86.avx10.vcvtnebf162ibs256(<16 x bfloat>)
define dso_local <4 x i64> @test_mm256_maskz_ipcvtnebf16_epi8(i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvtnebf16_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtnebf162ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x69,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvtnebf16_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtnebf162ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x69,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162ibs256(<16 x bfloat> %__B)
%1 = bitcast i16 %__A to <16 x i1>
%2 = select <16 x i1> %1, <16 x i16> %0, <16 x i16> zeroinitializer
%3 = bitcast <16 x i16> %2 to <4 x i64>
ret <4 x i64> %3
}
define dso_local <2 x i64> @test_mm_ipcvtnebf16_epu8(<8 x bfloat> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvtnebf16_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtnebf162iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x6b,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162iubs128(<8 x bfloat> %__A)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvtnebf16_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvtnebf16_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtnebf162iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x6b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvtnebf16_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtnebf162iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x6b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <8 x i16>
%1 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162iubs128(<8 x bfloat> %__B)
%2 = bitcast i8 %__A to <8 x i1>
%3 = select <8 x i1> %2, <8 x i16> %1, <8 x i16> %0
%4 = bitcast <8 x i16> %3 to <2 x i64>
ret <2 x i64> %4
}
declare <8 x i16> @llvm.x86.avx10.vcvtnebf162iubs128(<8 x bfloat>)
define dso_local <2 x i64> @test_mm_maskz_ipcvtnebf16_epu8(i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvtnebf16_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtnebf162iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x6b,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvtnebf16_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtnebf162iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x6b,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.vcvtnebf162iubs128(<8 x bfloat> %__B)
%1 = bitcast i8 %__A to <8 x i1>
%2 = select <8 x i1> %1, <8 x i16> %0, <8 x i16> zeroinitializer
%3 = bitcast <8 x i16> %2 to <2 x i64>
ret <2 x i64> %3
}
define dso_local <4 x i64> @test_mm256_ipcvtnebf16_epu8(<16 x bfloat> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvtnebf16_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtnebf162iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x6b,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162iubs256(<16 x bfloat> %__A)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvtnebf16_epu8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvtnebf16_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtnebf162iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x6b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvtnebf16_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtnebf162iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x6b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162iubs256(<16 x bfloat> %__B)
%2 = bitcast i16 %__A to <16 x i1>
%3 = select <16 x i1> %2, <16 x i16> %1, <16 x i16> %0
%4 = bitcast <16 x i16> %3 to <4 x i64>
ret <4 x i64> %4
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvtnebf16_epu8(i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvtnebf16_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtnebf162iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x6b,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvtnebf16_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtnebf162iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x6b,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.vcvtnebf162iubs256(<16 x bfloat> %__B)
%1 = bitcast i16 %__A to <16 x i1>
%2 = select <16 x i1> %1, <16 x i16> %0, <16 x i16> zeroinitializer
%3 = bitcast <16 x i16> %2 to <4 x i64>
ret <4 x i64> %3
}
declare <16 x i16> @llvm.x86.avx10.vcvtnebf162iubs256(<16 x bfloat>)
define dso_local <2 x i64> @test_mm_ipcvtph_epi8(<8 x half> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvtph_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtph2ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x08,0x69,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2ibs128(<8 x half> %__A, <8 x i16> zeroinitializer, i8 -1)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvtph_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x half> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvtph_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2ibs %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x69,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvtph_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2ibs %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x69,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <8 x i16>
%1 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2ibs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
%2 = bitcast <8 x i16> %1 to <2 x i64>
ret <2 x i64> %2
}
declare <8 x i16> @llvm.x86.avx10.mask.vcvtph2ibs128(<8 x half>, <8 x i16>, i8)
define dso_local <2 x i64> @test_mm_maskz_ipcvtph_epi8(i8 noundef zeroext %__A, <8 x half> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvtph_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x69,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvtph_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x69,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2ibs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvtph_epi8(<16 x half> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvtph_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtph2ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x28,0x69,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 4)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvtph_epi8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvtph_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x69,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvtph_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x69,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 4)
%2 = bitcast <16 x i16> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvtph_epi8(i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvtph_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x69,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvtph_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x69,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 4)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvtph_epi8_round(<16 x half> noundef %__A) {
; CHECK-LABEL: test_mm256_ipcvtph_epi8_round:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtph2ibs {rz-sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x78,0x78,0x69,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 11)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvtph_epi8_round(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) {
; X64-LABEL: test_mm256_mask_ipcvtph_epi8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2ibs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x79,0x69,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvtph_epi8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2ibs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x79,0x69,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 11)
%2 = bitcast <16 x i16> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvtph_epi8_round(i16 noundef zeroext %__A, <16 x half> noundef %__B) {
; X64-LABEL: test_mm256_maskz_ipcvtph_epi8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2ibs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xf9,0x69,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvtph_epi8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2ibs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xf9,0x69,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 11)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
declare <16 x i16> @llvm.x86.avx10.mask.vcvtph2ibs256(<16 x half>, <16 x i16>, i16, i32)
define dso_local <2 x i64> @test_mm_ipcvtph_epu8(<8 x half> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvtph_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtph2iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x08,0x6b,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2iubs128(<8 x half> %__A, <8 x i16> zeroinitializer, i8 -1)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvtph_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x half> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvtph_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x09,0x6b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvtph_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x09,0x6b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <8 x i16>
%1 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2iubs128(<8 x half> %__B, <8 x i16> %0, i8 %__A)
%2 = bitcast <8 x i16> %1 to <2 x i64>
ret <2 x i64> %2
}
declare <8 x i16> @llvm.x86.avx10.mask.vcvtph2iubs128(<8 x half>, <8 x i16>, i8)
define dso_local <2 x i64> @test_mm_maskz_ipcvtph_epu8(i8 noundef zeroext %__A, <8 x half> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvtph_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6b,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvtph_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6b,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvtph2iubs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvtph_epu8(<16 x half> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvtph_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtph2iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x28,0x6b,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 4)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvtph_epu8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvtph_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x6b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvtph_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x6b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 4)
%2 = bitcast <16 x i16> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvtph_epu8(i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvtph_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x6b,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvtph_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x6b,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 4)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvtph_epu8_round(<16 x half> noundef %__A) {
; CHECK-LABEL: test_mm256_ipcvtph_epu8_round:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtph2iubs {rz-sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x78,0x78,0x6b,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 11)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvtph_epu8_round(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) {
; X64-LABEL: test_mm256_mask_ipcvtph_epu8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2iubs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x79,0x6b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvtph_epu8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2iubs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x79,0x6b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 11)
%2 = bitcast <16 x i16> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvtph_epu8_round(i16 noundef zeroext %__A, <16 x half> noundef %__B) {
; X64-LABEL: test_mm256_maskz_ipcvtph_epu8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtph2iubs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xf9,0x6b,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvtph_epu8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtph2iubs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0xf9,0x6b,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 11)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
declare <16 x i16> @llvm.x86.avx10.mask.vcvtph2iubs256(<16 x half>, <16 x i16>, i16, i32)
define dso_local <2 x i64> @test_mm_ipcvtps_epi8(<4 x float> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvtps_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtps2ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7d,0x08,0x69,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2ibs128(<4 x float> %__A, <4 x i32> zeroinitializer, i8 -1)
%1 = bitcast <4 x i32> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvtps_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <4 x float> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvtps_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x69,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvtps_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x69,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <4 x i32>
%1 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2ibs128(<4 x float> %__B, <4 x i32> %0, i8 %__A)
%2 = bitcast <4 x i32> %1 to <2 x i64>
ret <2 x i64> %2
}
define dso_local <2 x i64> @test_mm_maskz_ipcvtps_epi8(i8 noundef zeroext %__A, <4 x float> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvtps_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x69,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvtps_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x69,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2ibs128(<4 x float> %__B, <4 x i32> zeroinitializer, i8 %__A)
%1 = bitcast <4 x i32> %0 to <2 x i64>
ret <2 x i64> %1
}
declare <4 x i32> @llvm.x86.avx10.mask.vcvtps2ibs128(<4 x float>, <4 x i32>, i8)
define dso_local <4 x i64> @test_mm256_ipcvtps_epi8(<8 x float> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvtps_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtps2ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7d,0x28,0x69,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 4)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvtps_epi8(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvtps_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x69,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvtps_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x69,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <8 x i32>
%1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 4)
%2 = bitcast <8 x i32> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvtps_epi8(i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvtps_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x69,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvtps_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x69,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 4)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvtps_epi8_round(<8 x float> noundef %__A) {
; CHECK-LABEL: test_mm256_ipcvtps_epi8_round:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtps2ibs {rz-sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x79,0x78,0x69,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 11)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvtps_epi8_round(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) {
; X64-LABEL: test_mm256_mask_ipcvtps_epi8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2ibs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x79,0x69,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvtps_epi8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2ibs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x79,0x69,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <8 x i32>
%1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 11)
%2 = bitcast <8 x i32> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvtps_epi8_round(i8 noundef zeroext %__A, <8 x float> noundef %__B) {
; X64-LABEL: test_mm256_maskz_ipcvtps_epi8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2ibs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0xf9,0x69,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvtps_epi8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2ibs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0xf9,0x69,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 11)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
declare <8 x i32> @llvm.x86.avx10.mask.vcvtps2ibs256(<8 x float>, <8 x i32>, i8, i32)
define dso_local <2 x i64> @test_mm_ipcvtps_epu8(<4 x float> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvtps_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtps2iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7d,0x08,0x6b,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2iubs128(<4 x float> %__A, <4 x i32> zeroinitializer, i8 -1)
%1 = bitcast <4 x i32> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvtps_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <4 x float> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvtps_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x6b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvtps_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x6b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <4 x i32>
%1 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2iubs128(<4 x float> %__B, <4 x i32> %0, i8 %__A)
%2 = bitcast <4 x i32> %1 to <2 x i64>
ret <2 x i64> %2
}
define dso_local <2 x i64> @test_mm_maskz_ipcvtps_epu8(i8 noundef zeroext %__A, <4 x float> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvtps_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x6b,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvtps_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x6b,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvtps2iubs128(<4 x float> %__B, <4 x i32> zeroinitializer, i8 %__A)
%1 = bitcast <4 x i32> %0 to <2 x i64>
ret <2 x i64> %1
}
declare <4 x i32> @llvm.x86.avx10.mask.vcvtps2iubs128(<4 x float>, <4 x i32>, i8)
define dso_local <4 x i64> @test_mm256_ipcvtps_epu8(<8 x float> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvtps_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtps2iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7d,0x28,0x6b,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 4)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvtps_epu8(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvtps_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x6b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvtps_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x6b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <8 x i32>
%1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 4)
%2 = bitcast <8 x i32> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvtps_epu8(i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvtps_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x6b,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvtps_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x6b,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 4)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvtps_epu8_round(<8 x float> noundef %__A) {
; CHECK-LABEL: test_mm256_ipcvtps_epu8_round:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvtps2iubs {rz-sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x79,0x78,0x6b,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 11)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvtps_epu8_round(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) {
; X64-LABEL: test_mm256_mask_ipcvtps_epu8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2iubs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x79,0x6b,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvtps_epu8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2iubs {rz-sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x79,0x6b,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <8 x i32>
%1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 11)
%2 = bitcast <8 x i32> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvtps_epu8_round(i8 noundef zeroext %__A, <8 x float> noundef %__B) {
; X64-LABEL: test_mm256_maskz_ipcvtps_epu8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvtps2iubs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0xf9,0x6b,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvtps_epu8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvtps2iubs {rz-sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0xf9,0x6b,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 11)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
declare <8 x i32> @llvm.x86.avx10.mask.vcvtps2iubs256(<8 x float>, <8 x i32>, i8, i32)
define dso_local <2 x i64> @test_mm_ipcvttnebf16_epi8(<8 x bfloat> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvttnebf16_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttnebf162ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x68,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162ibs128(<8 x bfloat> %__A)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvttnebf16_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvttnebf16_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttnebf162ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x68,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvttnebf16_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttnebf162ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x68,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <8 x i16>
%1 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162ibs128(<8 x bfloat> %__B)
%2 = bitcast i8 %__A to <8 x i1>
%3 = select <8 x i1> %2, <8 x i16> %1, <8 x i16> %0
%4 = bitcast <8 x i16> %3 to <2 x i64>
ret <2 x i64> %4
}
declare <8 x i16> @llvm.x86.avx10.vcvttnebf162ibs128(<8 x bfloat>)
define dso_local <2 x i64> @test_mm_maskz_ipcvttnebf16_epi8(i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvttnebf16_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttnebf162ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x68,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvttnebf16_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttnebf162ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x68,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162ibs128(<8 x bfloat> %__B)
%1 = bitcast i8 %__A to <8 x i1>
%2 = select <8 x i1> %1, <8 x i16> %0, <8 x i16> zeroinitializer
%3 = bitcast <8 x i16> %2 to <2 x i64>
ret <2 x i64> %3
}
define dso_local <4 x i64> @test_mm256_ipcvttnebf16_epi8(<16 x bfloat> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvttnebf16_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttnebf162ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x68,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162ibs256(<16 x bfloat> %__A)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvttnebf16_epi8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvttnebf16_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttnebf162ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x68,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvttnebf16_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttnebf162ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x68,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162ibs256(<16 x bfloat> %__B)
%2 = bitcast i16 %__A to <16 x i1>
%3 = select <16 x i1> %2, <16 x i16> %1, <16 x i16> %0
%4 = bitcast <16 x i16> %3 to <4 x i64>
ret <4 x i64> %4
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvttnebf16_epi8(i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvttnebf16_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttnebf162ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x68,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvttnebf16_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttnebf162ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x68,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162ibs256(<16 x bfloat> %__B)
%1 = bitcast i16 %__A to <16 x i1>
%2 = select <16 x i1> %1, <16 x i16> %0, <16 x i16> zeroinitializer
%3 = bitcast <16 x i16> %2 to <4 x i64>
ret <4 x i64> %3
}
declare <16 x i16> @llvm.x86.avx10.vcvttnebf162ibs256(<16 x bfloat>)
define dso_local <2 x i64> @test_mm_ipcvttnebf16_epu8(<8 x bfloat> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvttnebf16_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttnebf162iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7f,0x08,0x6a,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162iubs128(<8 x bfloat> %__A)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvttnebf16_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvttnebf16_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttnebf162iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x6a,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvttnebf16_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttnebf162iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x09,0x6a,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <8 x i16>
%1 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162iubs128(<8 x bfloat> %__B)
%2 = bitcast i8 %__A to <8 x i1>
%3 = select <8 x i1> %2, <8 x i16> %1, <8 x i16> %0
%4 = bitcast <8 x i16> %3 to <2 x i64>
ret <2 x i64> %4
}
declare <8 x i16> @llvm.x86.avx10.vcvttnebf162iubs128(<8 x bfloat>)
define dso_local <2 x i64> @test_mm_maskz_ipcvttnebf16_epu8(i8 noundef zeroext %__A, <8 x bfloat> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvttnebf16_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttnebf162iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x6a,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvttnebf16_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttnebf162iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0x89,0x6a,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.vcvttnebf162iubs128(<8 x bfloat> %__B)
%1 = bitcast i8 %__A to <8 x i1>
%2 = select <8 x i1> %1, <8 x i16> %0, <8 x i16> zeroinitializer
%3 = bitcast <8 x i16> %2 to <2 x i64>
ret <2 x i64> %3
}
define dso_local <4 x i64> @test_mm256_ipcvttnebf16_epu8(<16 x bfloat> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvttnebf16_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttnebf162iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7f,0x28,0x6a,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162iubs256(<16 x bfloat> %__A)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvttnebf16_epu8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvttnebf16_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttnebf162iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x6a,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvttnebf16_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttnebf162iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7f,0x29,0x6a,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162iubs256(<16 x bfloat> %__B)
%2 = bitcast i16 %__A to <16 x i1>
%3 = select <16 x i1> %2, <16 x i16> %1, <16 x i16> %0
%4 = bitcast <16 x i16> %3 to <4 x i64>
ret <4 x i64> %4
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvttnebf16_epu8(i16 noundef zeroext %__A, <16 x bfloat> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvttnebf16_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttnebf162iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x6a,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvttnebf16_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttnebf162iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7f,0xa9,0x6a,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.vcvttnebf162iubs256(<16 x bfloat> %__B)
%1 = bitcast i16 %__A to <16 x i1>
%2 = select <16 x i1> %1, <16 x i16> %0, <16 x i16> zeroinitializer
%3 = bitcast <16 x i16> %2 to <4 x i64>
ret <4 x i64> %3
}
declare <16 x i16> @llvm.x86.avx10.vcvttnebf162iubs256(<16 x bfloat>)
define dso_local <2 x i64> @test_mm_ipcvttph_epi8(<8 x half> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvttph_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttph2ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x08,0x68,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2ibs128(<8 x half> %__A, <8 x i16> zeroinitializer, i8 -1)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvttph_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x half> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvttph_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x09,0x68,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvttph_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x09,0x68,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <8 x i16>
%1 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2ibs128(<8 x half> %__B, <8 x i16> %0, i8 %__A)
%2 = bitcast <8 x i16> %1 to <2 x i64>
ret <2 x i64> %2
}
declare <8 x i16> @llvm.x86.avx10.mask.vcvttph2ibs128(<8 x half>, <8 x i16>, i8)
define dso_local <2 x i64> @test_mm_maskz_ipcvttph_epi8(i8 noundef zeroext %__A, <8 x half> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvttph_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x68,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvttph_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x68,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2ibs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvttph_epi8(<16 x half> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvttph_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttph2ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x28,0x68,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 4)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvttph_epi8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvttph_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x68,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvttph_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x68,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 4)
%2 = bitcast <16 x i16> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvttph_epi8(i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvttph_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x68,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvttph_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x68,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 4)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvttph_epi8_round(<16 x half> noundef %__A) {
; CHECK-LABEL: test_mm256_ipcvttph_epi8_round:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttph2ibs {sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x78,0x18,0x68,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 8)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvttph_epi8_round(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) {
; X64-LABEL: test_mm256_mask_ipcvttph_epi8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2ibs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x19,0x68,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvttph_epi8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2ibs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x19,0x68,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 8)
%2 = bitcast <16 x i16> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvttph_epi8_round(i16 noundef zeroext %__A, <16 x half> noundef %__B) {
; X64-LABEL: test_mm256_maskz_ipcvttph_epi8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2ibs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0x99,0x68,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvttph_epi8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2ibs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0x99,0x68,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 8)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
declare <16 x i16> @llvm.x86.avx10.mask.vcvttph2ibs256(<16 x half>, <16 x i16>, i16, i32)
define dso_local <2 x i64> @test_mm_ipcvttph_epu8(<8 x half> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvttph_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttph2iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7c,0x08,0x6a,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2iubs128(<8 x half> %__A, <8 x i16> zeroinitializer, i8 -1)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvttph_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x half> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvttph_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2iubs %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6a,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvttph_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2iubs %xmm1, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6a,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <8 x i16>
%1 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2iubs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
%2 = bitcast <8 x i16> %1 to <2 x i64>
ret <2 x i64> %2
}
declare <8 x i16> @llvm.x86.avx10.mask.vcvttph2iubs128(<8 x half>, <8 x i16>, i8)
define dso_local <2 x i64> @test_mm_maskz_ipcvttph_epu8(i8 noundef zeroext %__A, <8 x half> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvttph_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6a,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvttph_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0x89,0x6a,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i16> @llvm.x86.avx10.mask.vcvttph2iubs128(<8 x half> %__B, <8 x i16> zeroinitializer, i8 %__A)
%1 = bitcast <8 x i16> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvttph_epu8(<16 x half> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvttph_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttph2iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7c,0x28,0x6a,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 4)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvttph_epu8(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvttph_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x6a,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvttph_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7c,0x29,0x6a,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 4)
%2 = bitcast <16 x i16> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvttph_epu8(i16 noundef zeroext %__A, <16 x half> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvttph_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x6a,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvttph_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7c,0xa9,0x6a,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 4)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvttph_epu8_round(<16 x half> noundef %__A) {
; CHECK-LABEL: test_mm256_ipcvttph_epu8_round:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttph2iubs {sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x78,0x18,0x6a,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__A, <16 x i16> zeroinitializer, i16 -1, i32 8)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvttph_epu8_round(<4 x i64> noundef %__S, i16 noundef zeroext %__A, <16 x half> noundef %__B) {
; X64-LABEL: test_mm256_mask_ipcvttph_epu8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2iubs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x19,0x6a,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvttph_epu8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2iubs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x78,0x19,0x6a,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <16 x i16>
%1 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__B, <16 x i16> %0, i16 %__A, i32 8)
%2 = bitcast <16 x i16> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvttph_epu8_round(i16 noundef zeroext %__A, <16 x half> noundef %__B) {
; X64-LABEL: test_mm256_maskz_ipcvttph_epu8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttph2iubs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0x99,0x6a,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvttph_epu8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovw {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf8,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttph2iubs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x78,0x99,0x6a,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half> %__B, <16 x i16> zeroinitializer, i16 %__A, i32 8)
%1 = bitcast <16 x i16> %0 to <4 x i64>
ret <4 x i64> %1
}
declare <16 x i16> @llvm.x86.avx10.mask.vcvttph2iubs256(<16 x half>, <16 x i16>, i16, i32)
define dso_local <2 x i64> @test_mm_ipcvttps_epi8(<4 x float> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvttps_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttps2ibs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7d,0x08,0x68,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2ibs128(<4 x float> %__A, <4 x i32> zeroinitializer, i8 -1)
%1 = bitcast <4 x i32> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvttps_epi8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <4 x float> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvttps_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x68,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvttps_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2ibs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x68,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <4 x i32>
%1 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2ibs128(<4 x float> %__B, <4 x i32> %0, i8 %__A)
%2 = bitcast <4 x i32> %1 to <2 x i64>
ret <2 x i64> %2
}
define dso_local <2 x i64> @test_mm_maskz_ipcvttps_epi8(i8 noundef zeroext %__A, <4 x float> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvttps_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x68,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvttps_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2ibs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x68,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2ibs128(<4 x float> %__B, <4 x i32> zeroinitializer, i8 %__A)
%1 = bitcast <4 x i32> %0 to <2 x i64>
ret <2 x i64> %1
}
declare <4 x i32> @llvm.x86.avx10.mask.vcvttps2ibs128(<4 x float>, <4 x i32>, i8)
define dso_local <4 x i64> @test_mm256_ipcvttps_epi8(<8 x float> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvttps_epi8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttps2ibs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7d,0x28,0x68,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 4)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvttps_epi8(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvttps_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x68,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvttps_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2ibs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x68,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <8 x i32>
%1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 4)
%2 = bitcast <8 x i32> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvttps_epi8(i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvttps_epi8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x68,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvttps_epi8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2ibs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x68,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 4)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvttps_epi8_round(<8 x float> noundef %__A) {
; CHECK-LABEL: test_mm256_ipcvttps_epi8_round:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttps2ibs {sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x79,0x18,0x68,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 8)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvttps_epi8_round(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) {
; X64-LABEL: test_mm256_mask_ipcvttps_epi8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2ibs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x19,0x68,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvttps_epi8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2ibs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x19,0x68,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <8 x i32>
%1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 8)
%2 = bitcast <8 x i32> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvttps_epi8_round(i8 noundef zeroext %__A, <8 x float> noundef %__B) {
; X64-LABEL: test_mm256_maskz_ipcvttps_epi8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2ibs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0x99,0x68,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvttps_epi8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2ibs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0x99,0x68,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 8)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
declare <8 x i32> @llvm.x86.avx10.mask.vcvttps2ibs256(<8 x float>, <8 x i32>, i8, i32)
define dso_local <2 x i64> @test_mm_ipcvttps_epu8(<4 x float> noundef %__A) {
; CHECK-LABEL: test_mm_ipcvttps_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttps2iubs %xmm0, %xmm0 # encoding: [0x62,0xf5,0x7d,0x08,0x6a,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2iubs128(<4 x float> %__A, <4 x i32> zeroinitializer, i8 -1)
%1 = bitcast <4 x i32> %0 to <2 x i64>
ret <2 x i64> %1
}
define dso_local <2 x i64> @test_mm_mask_ipcvttps_epu8(<2 x i64> noundef %__S, i8 noundef zeroext %__A, <4 x float> noundef %__B) {
; X64-LABEL: test_mm_mask_ipcvttps_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x6a,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_mask_ipcvttps_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2iubs %xmm1, %xmm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x09,0x6a,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <2 x i64> %__S to <4 x i32>
%1 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2iubs128(<4 x float> %__B, <4 x i32> %0, i8 %__A)
%2 = bitcast <4 x i32> %1 to <2 x i64>
ret <2 x i64> %2
}
define dso_local <2 x i64> @test_mm_maskz_ipcvttps_epu8(i8 noundef zeroext %__A, <4 x float> noundef %__B) {
; X64-LABEL: test_mm_maskz_ipcvttps_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x6a,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm_maskz_ipcvttps_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2iubs %xmm0, %xmm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0x89,0x6a,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <4 x i32> @llvm.x86.avx10.mask.vcvttps2iubs128(<4 x float> %__B, <4 x i32> zeroinitializer, i8 %__A)
%1 = bitcast <4 x i32> %0 to <2 x i64>
ret <2 x i64> %1
}
declare <4 x i32> @llvm.x86.avx10.mask.vcvttps2iubs128(<4 x float>, <4 x i32>, i8)
define dso_local <4 x i64> @test_mm256_ipcvttps_epu8(<8 x float> noundef %__A) local_unnamed_addr #2 {
; CHECK-LABEL: test_mm256_ipcvttps_epu8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttps2iubs %ymm0, %ymm0 # encoding: [0x62,0xf5,0x7d,0x28,0x6a,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 4)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvttps_epu8(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_mask_ipcvttps_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x6a,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvttps_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2iubs %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x7d,0x29,0x6a,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <8 x i32>
%1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 4)
%2 = bitcast <8 x i32> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvttps_epu8(i8 noundef zeroext %__A, <8 x float> noundef %__B) local_unnamed_addr #2 {
; X64-LABEL: test_mm256_maskz_ipcvttps_epu8:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x6a,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvttps_epu8:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2iubs %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x7d,0xa9,0x6a,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 4)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_ipcvttps_epu8_round(<8 x float> noundef %__A) {
; CHECK-LABEL: test_mm256_ipcvttps_epu8_round:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vcvttps2iubs {sae}, %ymm0, %ymm0 # encoding: [0x62,0xf5,0x79,0x18,0x6a,0xc0]
; CHECK-NEXT: ret{{[l|q]}} # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__A, <8 x i32> zeroinitializer, i8 -1, i32 8)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
define dso_local <4 x i64> @test_mm256_mask_ipcvttps_epu8_round(<4 x i64> noundef %__S, i8 noundef zeroext %__A, <8 x float> noundef %__B) {
; X64-LABEL: test_mm256_mask_ipcvttps_epu8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2iubs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x19,0x6a,0xc1]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_mask_ipcvttps_epu8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2iubs {sae}, %ymm1, %ymm0 {%k1} # encoding: [0x62,0xf5,0x79,0x19,0x6a,0xc1]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = bitcast <4 x i64> %__S to <8 x i32>
%1 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__B, <8 x i32> %0, i8 %__A, i32 8)
%2 = bitcast <8 x i32> %1 to <4 x i64>
ret <4 x i64> %2
}
define dso_local <4 x i64> @test_mm256_maskz_ipcvttps_epu8_round(i8 noundef zeroext %__A, <8 x float> noundef %__B) {
; X64-LABEL: test_mm256_maskz_ipcvttps_epu8_round:
; X64: # %bb.0: # %entry
; X64-NEXT: kmovd %edi, %k1 # encoding: [0xc5,0xfb,0x92,0xcf]
; X64-NEXT: vcvttps2iubs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0x99,0x6a,0xc0]
; X64-NEXT: retq # encoding: [0xc3]
;
; X86-LABEL: test_mm256_maskz_ipcvttps_epu8_round:
; X86: # %bb.0: # %entry
; X86-NEXT: kmovb {{[0-9]+}}(%esp), %k1 # encoding: [0xc5,0xf9,0x90,0x4c,0x24,0x04]
; X86-NEXT: vcvttps2iubs {sae}, %ymm0, %ymm0 {%k1} {z} # encoding: [0x62,0xf5,0x79,0x99,0x6a,0xc0]
; X86-NEXT: retl # encoding: [0xc3]
entry:
%0 = tail call <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float> %__B, <8 x i32> zeroinitializer, i8 %__A, i32 8)
%1 = bitcast <8 x i32> %0 to <4 x i64>
ret <4 x i64> %1
}
declare <8 x i32> @llvm.x86.avx10.mask.vcvttps2iubs256(<8 x float>, <8 x i32>, i8, i32)