11 "Never use <avx10_2_512convertintrin.h> directly; include <immintrin.h> instead."
16#ifndef __AVX10_2_512CONVERTINTRIN_H
17#define __AVX10_2_512CONVERTINTRIN_H
20#define __DEFAULT_FN_ATTRS512 \
21 __attribute__((__always_inline__, __nodebug__, __target__("avx10.2-512"), \
22 __min_vector_width__(512)))
26 return (__m512h)__builtin_ia32_vcvt2ps2phx512_mask(
27 (__v16sf)__A, (__v16sf)__B, (__v32hf)_mm512_setzero_ph(), (
__mmask32)(-1),
32_mm512_mask_cvtx2ps_ph(__m512h __W,
__mmask32 __U, __m512 __A, __m512 __B) {
33 return (__m512h)__builtin_ia32_vcvt2ps2phx512_mask(
34 (__v16sf)__A, (__v16sf)__B, (__v32hf)__W, (
__mmask32)__U,
39_mm512_maskz_cvtx2ps_ph(
__mmask32 __U, __m512 __A, __m512 __B) {
40 return (__m512h)__builtin_ia32_vcvt2ps2phx512_mask(
41 (__v16sf)__A, (__v16sf)__B, (__v32hf)_mm512_setzero_ph(), (
__mmask32)__U,
45#define _mm512_cvtx_round2ps_ph(A, B, R) \
46 ((__m512h)__builtin_ia32_vcvt2ps2phx512_mask( \
47 (__v16sf)(A), (__v16sf)(B), (__v32hf)_mm512_undefined_ph(), \
48 (__mmask32)(-1), (const int)(R)))
50#define _mm512_mask_cvtx_round2ps_ph(W, U, A, B, R) \
51 ((__m512h)__builtin_ia32_vcvt2ps2phx512_mask((__v16sf)(A), (__v16sf)(B), \
52 (__v32hf)(W), (__mmask32)(U), \
55#define _mm512_maskz_cvtx_round2ps_ph(U, A, B, R) \
56 ((__m512h)__builtin_ia32_vcvt2ps2phx512_mask( \
57 (__v16sf)(A), (__v16sf)(B), (__v32hf)_mm512_setzero_ph(), \
58 (__mmask32)(U), (const int)(R)))
61_mm512_cvtbiasph_pbf8(__m512i __A, __m512h __B) {
62 return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
68 __m256i __W,
__mmask32 __U, __m512i __A, __m512h __B) {
69 return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
70 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (
__mmask32)__U);
74_mm512_maskz_cvtbiasph_pbf8(
__mmask32 __U, __m512i __A, __m512h __B) {
75 return (__m256i)__builtin_ia32_vcvtbiasph2bf8_512_mask(
81_mm512_cvtbiassph_pbf8(__m512i __A, __m512h __B) {
82 return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
88 __m256i __W,
__mmask32 __U, __m512i __A, __m512h __B) {
89 return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
90 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (
__mmask32)__U);
94_mm512_maskz_cvtbiassph_pbf8(
__mmask32 __U, __m512i __A, __m512h __B) {
95 return (__m256i)__builtin_ia32_vcvtbiasph2bf8s_512_mask(
101_mm512_cvtbiasph_phf8(__m512i __A, __m512h __B) {
102 return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
108 __m256i __W,
__mmask32 __U, __m512i __A, __m512h __B) {
109 return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
110 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (
__mmask32)__U);
114_mm512_maskz_cvtbiasph_phf8(
__mmask32 __U, __m512i __A, __m512h __B) {
115 return (__m256i)__builtin_ia32_vcvtbiasph2hf8_512_mask(
121_mm512_cvtbiassph_phf8(__m512i __A, __m512h __B) {
122 return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
128 __m256i __W,
__mmask32 __U, __m512i __A, __m512h __B) {
129 return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
130 (__v64qi)__A, (__v32hf)__B, (__v32qi)(__m256i)__W, (
__mmask32)__U);
134_mm512_maskz_cvtbiassph_phf8(
__mmask32 __U, __m512i __A, __m512h __B) {
135 return (__m256i)__builtin_ia32_vcvtbiasph2hf8s_512_mask(
141_mm512_cvtne2ph_pbf8(__m512h __A, __m512h __B) {
142 return (__m512i)__builtin_ia32_vcvtne2ph2bf8_512((__v32hf)(__A),
147 __m512i __W,
__mmask64 __U, __m512h __A, __m512h __B) {
148 return (__m512i)__builtin_ia32_selectb_512(
149 (
__mmask64)__U, (__v64qi)_mm512_cvtne2ph_pbf8(__A, __B), (__v64qi)__W);
153_mm512_maskz_cvtne2ph_pbf8(
__mmask64 __U, __m512h __A, __m512h __B) {
154 return (__m512i)__builtin_ia32_selectb_512(
155 (
__mmask64)__U, (__v64qi)_mm512_cvtne2ph_pbf8(__A, __B),
160_mm512_cvtnes2ph_pbf8(__m512h __A, __m512h __B) {
161 return (__m512i)__builtin_ia32_vcvtne2ph2bf8s_512((__v32hf)(__A),
166 __m512i __W,
__mmask64 __U, __m512h __A, __m512h __B) {
167 return (__m512i)__builtin_ia32_selectb_512(
168 (
__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_pbf8(__A, __B), (__v64qi)__W);
172_mm512_maskz_cvtnes2ph_pbf8(
__mmask64 __U, __m512h __A, __m512h __B) {
173 return (__m512i)__builtin_ia32_selectb_512(
174 (
__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_pbf8(__A, __B),
179_mm512_cvtne2ph_phf8(__m512h __A, __m512h __B) {
180 return (__m512i)__builtin_ia32_vcvtne2ph2hf8_512((__v32hf)(__A),
185 __m512i __W,
__mmask64 __U, __m512h __A, __m512h __B) {
186 return (__m512i)__builtin_ia32_selectb_512(
187 (
__mmask64)__U, (__v64qi)_mm512_cvtne2ph_phf8(__A, __B), (__v64qi)__W);
191_mm512_maskz_cvtne2ph_phf8(
__mmask64 __U, __m512h __A, __m512h __B) {
192 return (__m512i)__builtin_ia32_selectb_512(
193 (
__mmask64)__U, (__v64qi)_mm512_cvtne2ph_phf8(__A, __B),
198_mm512_cvtnes2ph_phf8(__m512h __A, __m512h __B) {
199 return (__m512i)__builtin_ia32_vcvtne2ph2hf8s_512((__v32hf)(__A),
204 __m512i __W,
__mmask64 __U, __m512h __A, __m512h __B) {
205 return (__m512i)__builtin_ia32_selectb_512(
206 (
__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_phf8(__A, __B), (__v64qi)__W);
210_mm512_maskz_cvtnes2ph_phf8(
__mmask64 __U, __m512h __A, __m512h __B) {
211 return (__m512i)__builtin_ia32_selectb_512(
212 (
__mmask64)__U, (__v64qi)_mm512_cvtnes2ph_phf8(__A, __B),
217_mm512_cvtnehf8_ph(__m256i __A) {
218 return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
219 (__v32qi)__A, (__v32hf)(__m512h)_mm512_undefined_ph(), (
__mmask32)-1);
223_mm512_mask_cvtnehf8_ph(__m512h __W,
__mmask32 __U, __m256i __A) {
224 return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
225 (__v32qi)__A, (__v32hf)(__m512h)__W, (
__mmask32)__U);
229_mm512_maskz_cvtnehf8_ph(
__mmask32 __U, __m256i __A) {
230 return (__m512h)__builtin_ia32_vcvthf8_2ph512_mask(
231 (__v32qi)__A, (__v32hf)(__m512h)_mm512_setzero_ph(), (
__mmask32)__U);
235_mm512_cvtneph_pbf8(__m512h __A) {
236 return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
241_mm512_mask_cvtneph_pbf8(__m256i __W,
__mmask32 __U, __m512h __A) {
242 return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
243 (__v32hf)__A, (__v32qi)(__m256i)__W, (
__mmask32)__U);
247_mm512_maskz_cvtneph_pbf8(
__mmask32 __U, __m512h __A) {
248 return (__m256i)__builtin_ia32_vcvtneph2bf8_512_mask(
253_mm512_cvtnesph_pbf8(__m512h __A) {
254 return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
259_mm512_mask_cvtnesph_pbf8(__m256i __W,
__mmask32 __U, __m512h __A) {
260 return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
261 (__v32hf)__A, (__v32qi)(__m256i)__W, (
__mmask32)__U);
265_mm512_maskz_cvtnesph_pbf8(
__mmask32 __U, __m512h __A) {
266 return (__m256i)__builtin_ia32_vcvtneph2bf8s_512_mask(
271_mm512_cvtneph_phf8(__m512h __A) {
272 return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
277_mm512_mask_cvtneph_phf8(__m256i __W,
__mmask32 __U, __m512h __A) {
278 return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
279 (__v32hf)__A, (__v32qi)(__m256i)__W, (
__mmask32)__U);
283_mm512_maskz_cvtneph_phf8(
__mmask32 __U, __m512h __A) {
284 return (__m256i)__builtin_ia32_vcvtneph2hf8_512_mask(
289_mm512_cvtnesph_phf8(__m512h __A) {
290 return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
295_mm512_mask_cvtnesph_phf8(__m256i __W,
__mmask32 __U, __m512h __A) {
296 return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
297 (__v32hf)__A, (__v32qi)(__m256i)__W, (
__mmask32)__U);
301_mm512_maskz_cvtnesph_phf8(
__mmask32 __U, __m512h __A) {
302 return (__m256i)__builtin_ia32_vcvtneph2hf8s_512_mask(
311_mm512_mask_cvtpbf8_ph(__m512h __S,
__mmask16 __U, __m256i __A) {
312 return _mm512_castsi512_ph(
317_mm512_maskz_cvtpbf8_ph(
__mmask16 __U, __m256i __A) {
318 return _mm512_castsi512_ph(
322#undef __DEFAULT_FN_ATTRS512
static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_cvtepi8_epi16(__m256i __A)
static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_maskz_cvtepi8_epi16(__mmask32 __U, __m256i __A)
#define __DEFAULT_FN_ATTRS512
static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_slli_epi16(__m512i __A, unsigned int __B)
unsigned long long __mmask64
static __inline__ __m512i __DEFAULT_FN_ATTRS512 _mm512_mask_slli_epi16(__m512i __W, __mmask32 __U, __m512i __A, unsigned int __B)
#define _MM_FROUND_CUR_DIRECTION
static __inline __m512i __DEFAULT_FN_ATTRS512 _mm512_setzero_si512(void)
static __inline__ __m256i __DEFAULT_FN_ATTRS _mm256_undefined_si256(void)
Create a 256-bit integer vector with undefined values.
static __inline __m256i __DEFAULT_FN_ATTRS _mm256_setzero_si256(void)
Constructs a 256-bit integer vector initialized to zero.