12#include_next <intrin0.h>
18#if defined(__x86_64__) && !defined(__arm64ec__)
26unsigned char _BitScanForward(
unsigned long *_Index,
unsigned long _Mask);
27unsigned char _BitScanReverse(
unsigned long *_Index,
unsigned long _Mask);
28void _ReadWriteBarrier(
void);
30#if defined(__aarch64__) || defined(__arm64ec__)
31unsigned int _CountLeadingZeros(
unsigned long);
32unsigned int _CountLeadingZeros64(
unsigned _int64);
34 __int64 _ExchangeHigh,
36 __int64 *_ComparandResult);
38 __int64 _ExchangeHigh,
40 __int64 *_ComparandResult);
42 __int64 _ExchangeHigh,
44 __int64 *_ComparandResult);
47#ifdef __x86_64__ && !defined(__arm64ec__)
48unsigned __int64 _umul128(
unsigned __int64,
unsigned __int64,
50unsigned __int64 __shiftleft128(
unsigned __int64 _LowPart,
51 unsigned __int64 _HighPart,
52 unsigned char _Shift);
53unsigned __int64 __shiftright128(
unsigned __int64 _LowPart,
54 unsigned __int64 _HighPart,
55 unsigned char _Shift);
58#if defined(__i386__) || (defined(__x86_64__) && !defined(__arm64ec__))
62#if defined(__x86_64__) || defined(__aarch64__)
64 __int64 _ExchangeHigh,
66 __int64 *_ComparandResult);
69#if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
70unsigned char _BitScanForward64(
unsigned long *_Index,
unsigned __int64 _Mask);
71unsigned char _BitScanReverse64(
unsigned long *_Index,
unsigned __int64 _Mask);
74#if defined(__i386__) || defined(__x86_64__) || defined(__arm__) || \
76__int64 _InterlockedDecrement64(__int64
volatile *_Addend);
77__int64 _InterlockedExchange64(__int64
volatile *_Target, __int64 _Value);
78__int64 _InterlockedExchangeAdd64(__int64
volatile *_Addend, __int64 _Value);
79__int64 _InterlockedExchangeSub64(__int64
volatile *_Subend, __int64 _Value);
80__int64 _InterlockedIncrement64(__int64
volatile *_Addend);
81__int64 _InterlockedOr64(__int64
volatile *_Value, __int64 _Mask);
82__int64 _InterlockedXor64(__int64
volatile *_Value, __int64 _Mask);
83__int64 _InterlockedAnd64(__int64
volatile *_Value, __int64 _Mask);
86#if defined(__arm__) || defined(__aarch64__) || defined(__arm64ec__)
90char _InterlockedExchangeAdd8_acq(
char volatile *_Addend,
char _Value);
91char _InterlockedExchangeAdd8_nf(
char volatile *_Addend,
char _Value);
92char _InterlockedExchangeAdd8_rel(
char volatile *_Addend,
char _Value);
93short _InterlockedExchangeAdd16_acq(
short volatile *_Addend,
short _Value);
94short _InterlockedExchangeAdd16_nf(
short volatile *_Addend,
short _Value);
95short _InterlockedExchangeAdd16_rel(
short volatile *_Addend,
short _Value);
99__int64 _InterlockedExchangeAdd64_acq(__int64
volatile *_Addend,
101__int64 _InterlockedExchangeAdd64_nf(__int64
volatile *_Addend, __int64 _Value);
102__int64 _InterlockedExchangeAdd64_rel(__int64
volatile *_Addend,
108short _InterlockedIncrement16_acq(
short volatile *_Value);
109short _InterlockedIncrement16_nf(
short volatile *_Value);
110short _InterlockedIncrement16_rel(
short volatile *_Value);
114__int64 _InterlockedIncrement64_acq(__int64
volatile *_Value);
115__int64 _InterlockedIncrement64_nf(__int64
volatile *_Value);
116__int64 _InterlockedIncrement64_rel(__int64
volatile *_Value);
121short _InterlockedDecrement16_acq(
short volatile *_Value);
122short _InterlockedDecrement16_nf(
short volatile *_Value);
123short _InterlockedDecrement16_rel(
short volatile *_Value);
127__int64 _InterlockedDecrement64_acq(__int64
volatile *_Value);
128__int64 _InterlockedDecrement64_nf(__int64
volatile *_Value);
129__int64 _InterlockedDecrement64_rel(__int64
volatile *_Value);
134char _InterlockedAnd8_acq(
char volatile *_Value,
char _Mask);
135char _InterlockedAnd8_nf(
char volatile *_Value,
char _Mask);
136char _InterlockedAnd8_rel(
char volatile *_Value,
char _Mask);
137short _InterlockedAnd16_acq(
short volatile *_Value,
short _Mask);
138short _InterlockedAnd16_nf(
short volatile *_Value,
short _Mask);
139short _InterlockedAnd16_rel(
short volatile *_Value,
short _Mask);
143__int64 _InterlockedAnd64_acq(__int64
volatile *_Value, __int64 _Mask);
144__int64 _InterlockedAnd64_nf(__int64
volatile *_Value, __int64 _Mask);
145__int64 _InterlockedAnd64_rel(__int64
volatile *_Value, __int64 _Mask);
150unsigned char _interlockedbittestandset_acq(
long volatile *_BitBase,
152unsigned char _interlockedbittestandset_nf(
long volatile *_BitBase,
154unsigned char _interlockedbittestandset_rel(
long volatile *_BitBase,
156unsigned char _interlockedbittestandreset_acq(
long volatile *_BitBase,
158unsigned char _interlockedbittestandreset_nf(
long volatile *_BitBase,
160unsigned char _interlockedbittestandreset_rel(
long volatile *_BitBase,
166char _InterlockedOr8_acq(
char volatile *_Value,
char _Mask);
167char _InterlockedOr8_nf(
char volatile *_Value,
char _Mask);
168char _InterlockedOr8_rel(
char volatile *_Value,
char _Mask);
169short _InterlockedOr16_acq(
short volatile *_Value,
short _Mask);
170short _InterlockedOr16_nf(
short volatile *_Value,
short _Mask);
171short _InterlockedOr16_rel(
short volatile *_Value,
short _Mask);
175__int64 _InterlockedOr64_acq(__int64
volatile *_Value, __int64 _Mask);
176__int64 _InterlockedOr64_nf(__int64
volatile *_Value, __int64 _Mask);
177__int64 _InterlockedOr64_rel(__int64
volatile *_Value, __int64 _Mask);
182char _InterlockedXor8_acq(
char volatile *_Value,
char _Mask);
183char _InterlockedXor8_nf(
char volatile *_Value,
char _Mask);
184char _InterlockedXor8_rel(
char volatile *_Value,
char _Mask);
185short _InterlockedXor16_acq(
short volatile *_Value,
short _Mask);
186short _InterlockedXor16_nf(
short volatile *_Value,
short _Mask);
187short _InterlockedXor16_rel(
short volatile *_Value,
short _Mask);
191__int64 _InterlockedXor64_acq(__int64
volatile *_Value, __int64 _Mask);
192__int64 _InterlockedXor64_nf(__int64
volatile *_Value, __int64 _Mask);
193__int64 _InterlockedXor64_rel(__int64
volatile *_Value, __int64 _Mask);
198char _InterlockedExchange8_acq(
char volatile *_Target,
char _Value);
199char _InterlockedExchange8_nf(
char volatile *_Target,
char _Value);
200char _InterlockedExchange8_rel(
char volatile *_Target,
char _Value);
201short _InterlockedExchange16_acq(
short volatile *_Target,
short _Value);
202short _InterlockedExchange16_nf(
short volatile *_Target,
short _Value);
203short _InterlockedExchange16_rel(
short volatile *_Target,
short _Value);
207__int64 _InterlockedExchange64_acq(__int64
volatile *_Target, __int64 _Value);
208__int64 _InterlockedExchange64_nf(__int64
volatile *_Target, __int64 _Value);
209__int64 _InterlockedExchange64_rel(__int64
volatile *_Target, __int64 _Value);
214char _InterlockedCompareExchange8_acq(
char volatile *_Destination,
215 char _Exchange,
char _Comparand);
216char _InterlockedCompareExchange8_nf(
char volatile *_Destination,
217 char _Exchange,
char _Comparand);
218char _InterlockedCompareExchange8_rel(
char volatile *_Destination,
219 char _Exchange,
char _Comparand);
220short _InterlockedCompareExchange16_acq(
short volatile *_Destination,
221 short _Exchange,
short _Comparand);
222short _InterlockedCompareExchange16_nf(
short volatile *_Destination,
223 short _Exchange,
short _Comparand);
224short _InterlockedCompareExchange16_rel(
short volatile *_Destination,
225 short _Exchange,
short _Comparand);
227 long _Exchange,
long _Comparand);
231 long _Exchange,
long _Comparand);
232__int64 _InterlockedCompareExchange64_acq(__int64
volatile *_Destination,
235__int64 _InterlockedCompareExchange64_nf(__int64
volatile *_Destination,
236 __int64 _Exchange, __int64 _Comparand);
237__int64 _InterlockedCompareExchange64_rel(__int64
volatile *_Destination,
@ _InterlockedExchangeAdd_rel
@ _InterlockedIncrement_acq
@ _InterlockedExchange_nf
@ _InterlockedIncrement_nf
@ _InterlockedExchange_acq
@ _InterlockedCompareExchange128_rel
@ _InterlockedCompareExchange128_acq
@ _InterlockedCompareExchange_acq
@ _InterlockedExchangeAdd_nf
@ _InterlockedCompareExchange_nf
@ _InterlockedDecrement_rel
@ _InterlockedExchangeAdd_acq
@ _InterlockedIncrement_rel
@ _InterlockedCompareExchange128_nf
@ _InterlockedCompareExchange128
@ _InterlockedExchange_rel
@ _InterlockedDecrement_nf
@ _InterlockedDecrement_acq
@ _InterlockedCompareExchange_rel
void _mm_pause(void)
Indicates that a spin loop is being executed for the purposes of optimizing power consumption during ...