26 #include_next <intrin.h> 33 #if defined(__i386__) || defined(__x86_64__) 41 #if defined(__aarch64__) 51 #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__)) 59 __m64 _m_from_float(
float);
60 float _m_to_float(__m64);
64 void __addfsbyte(
unsigned long,
unsigned char);
65 void __addfsdword(
unsigned long,
unsigned long);
66 void __addfsword(
unsigned long,
unsigned short);
67 void __code_seg(
const char *);
71 void __cpuidex(
int[4],
int,
int);
73 __int64 __emul(
int,
int);
75 unsigned __int64 __emulu(
unsigned int,
unsigned int);
76 unsigned int __getcallerseflags(
void);
79 unsigned char __inbyte(
unsigned short);
80 void __inbytestring(
unsigned short,
unsigned char *,
unsigned long);
81 void __incfsbyte(
unsigned long);
82 void __incfsdword(
unsigned long);
83 void __incfsword(
unsigned long);
84 unsigned long __indword(
unsigned short);
85 void __indwordstring(
unsigned short,
unsigned long *,
unsigned long);
87 void __invlpg(
void *);
88 unsigned short __inword(
unsigned short);
89 void __inwordstring(
unsigned short,
unsigned short *,
unsigned long);
91 unsigned __int64 __ll_lshift(
unsigned __int64,
int);
92 __int64 __ll_rshift(__int64,
int);
93 unsigned int __lzcnt(
unsigned int);
96 void __movsb(
unsigned char *,
unsigned char const *,
size_t);
98 void __movsd(
unsigned long *,
unsigned long const *,
size_t);
100 void __movsw(
unsigned short *,
unsigned short const *,
size_t);
103 void __nvreg_restore_fence(
void);
104 void __nvreg_save_fence(
void);
105 void __outbyte(
unsigned short,
unsigned char);
106 void __outbytestring(
unsigned short,
unsigned char *,
unsigned long);
107 void __outdword(
unsigned short,
unsigned long);
108 void __outdwordstring(
unsigned short,
unsigned long *,
unsigned long);
109 void __outword(
unsigned short,
unsigned short);
110 void __outwordstring(
unsigned short,
unsigned short *,
unsigned long);
111 unsigned long __readcr0(
void);
112 unsigned long __readcr2(
void);
114 unsigned long __readcr3(
void);
115 unsigned long __readcr4(
void);
116 unsigned long __readcr8(
void);
117 unsigned int __readdr(
unsigned int);
120 unsigned char __readfsbyte(
unsigned long);
122 unsigned __int64 __readfsqword(
unsigned long);
124 unsigned short __readfsword(
unsigned long);
127 unsigned __int64 __readmsr(
unsigned long);
128 unsigned __int64 __readpmc(
unsigned long);
129 unsigned long __segmentlimit(
unsigned long);
132 void __stosb(
unsigned char *,
unsigned char,
size_t);
134 void __stosd(
unsigned long *,
unsigned long,
size_t);
136 void __stosw(
unsigned short *,
unsigned short,
size_t);
137 void __svm_clgi(
void);
138 void __svm_invlpga(
void *,
int);
139 void __svm_skinit(
int);
140 void __svm_stgi(
void);
141 void __svm_vmload(
size_t);
142 void __svm_vmrun(
size_t);
143 void __svm_vmsave(
size_t);
145 unsigned __int64 __ull_rshift(
unsigned __int64,
int);
146 void __vmx_off(
void);
147 void __vmx_vmptrst(
unsigned __int64 *);
149 void __writecr0(
unsigned int);
151 void __writecr3(
unsigned int);
152 void __writecr4(
unsigned int);
153 void __writecr8(
unsigned int);
154 void __writedr(
unsigned int,
unsigned int);
155 void __writefsbyte(
unsigned long,
unsigned char);
156 void __writefsdword(
unsigned long,
unsigned long);
157 void __writefsqword(
unsigned long,
unsigned __int64);
158 void __writefsword(
unsigned long,
unsigned short);
159 void __writemsr(
unsigned long,
unsigned __int64);
161 void *_AddressOfReturnAddress(
void);
163 unsigned char _BitScanForward(
unsigned long *_Index,
unsigned long _Mask);
165 unsigned char _BitScanReverse(
unsigned long *_Index,
unsigned long _Mask);
166 unsigned char _bittest(
long const *,
long);
167 unsigned char _bittestandcomplement(
long *,
long);
168 unsigned char _bittestandreset(
long *,
long);
169 unsigned char _bittestandset(
long *,
long);
170 void __cdecl _disable(
void);
171 void __cdecl _enable(
void);
172 long _InterlockedAddLargeStatistic(__int64
volatile *_Addend,
long _Value);
173 unsigned char _interlockedbittestandreset(
long volatile *,
long);
174 unsigned char _interlockedbittestandset(
long volatile *,
long);
175 void *_InterlockedCompareExchangePointer_HLEAcquire(
void *
volatile *,
void *,
177 void *_InterlockedCompareExchangePointer_HLERelease(
void *
volatile *,
void *,
179 long _InterlockedExchangeAdd_HLEAcquire(
long volatile *,
long);
180 long _InterlockedExchangeAdd_HLERelease(
long volatile *,
long);
181 __int64 _InterlockedExchangeAdd64_HLEAcquire(__int64
volatile *, __int64);
182 __int64 _InterlockedExchangeAdd64_HLERelease(__int64
volatile *, __int64);
183 void __cdecl _invpcid(
unsigned int,
void *);
184 static __inline__
void 185 __attribute__((__deprecated__(
"use other intrinsics or C++11 atomics instead")))
187 static __inline__
void 188 __attribute__((__deprecated__("use other intrinsics
or C++11 atomics instead")))
189 _ReadWriteBarrier(
void);
190 unsigned int _rorx_u32(
unsigned int, const
unsigned int);
191 int _sarx_i32(
int,
unsigned int);
193 int __cdecl _setjmp(jmp_buf);
195 unsigned int _shlx_u32(
unsigned int,
unsigned int);
196 unsigned int _shrx_u32(
unsigned int,
unsigned int);
197 void _Store_HLERelease(
long volatile *,
long);
198 void _Store64_HLERelease(__int64
volatile *, __int64);
199 void _StorePointer_HLERelease(
void *
volatile *,
void *);
200 static __inline__
void 201 __attribute__((__deprecated__(
"use other intrinsics or C++11 atomics instead")))
203 unsigned __int32 xbegin(
void);
206 #define _XCR_XFEATURE_ENABLED_MASK 0 207 unsigned __int64 __cdecl _xgetbv(
unsigned int);
208 void __cdecl _xsetbv(
unsigned int,
unsigned __int64);
212 void __addgsbyte(
unsigned long,
unsigned char);
213 void __addgsdword(
unsigned long,
unsigned long);
214 void __addgsqword(
unsigned long,
unsigned __int64);
215 void __addgsword(
unsigned long,
unsigned short);
217 void __faststorefence(
void);
218 void __incgsbyte(
unsigned long);
219 void __incgsdword(
unsigned long);
220 void __incgsqword(
unsigned long);
221 void __incgsword(
unsigned long);
222 unsigned __int64 __lzcnt64(
unsigned __int64);
224 void __movsq(
unsigned long long *,
unsigned long long const *,
size_t);
226 unsigned char __readgsbyte(
unsigned long);
228 unsigned long __readgsdword(
unsigned long);
230 unsigned __int64 __readgsqword(
unsigned long);
231 unsigned short __readgsword(
unsigned long);
232 unsigned __int64 __shiftleft128(
unsigned __int64 _LowPart,
233 unsigned __int64 _HighPart,
234 unsigned char _Shift);
235 unsigned __int64 __shiftright128(
unsigned __int64 _LowPart,
236 unsigned __int64 _HighPart,
237 unsigned char _Shift);
239 void __stosq(
unsigned __int64 *,
unsigned __int64,
size_t);
240 unsigned char __vmx_on(
unsigned __int64 *);
241 unsigned char __vmx_vmclear(
unsigned __int64 *);
242 unsigned char __vmx_vmlaunch(
void);
243 unsigned char __vmx_vmptrld(
unsigned __int64 *);
244 unsigned char __vmx_vmread(
size_t,
size_t *);
245 unsigned char __vmx_vmresume(
void);
246 unsigned char __vmx_vmwrite(
size_t,
size_t);
247 void __writegsbyte(
unsigned long,
unsigned char);
248 void __writegsdword(
unsigned long,
unsigned long);
249 void __writegsqword(
unsigned long,
unsigned __int64);
250 void __writegsword(
unsigned long,
unsigned short);
251 unsigned char _bittest64(__int64
const *, __int64);
252 unsigned char _bittestandcomplement64(__int64 *, __int64);
253 unsigned char _bittestandreset64(__int64 *, __int64);
254 unsigned char _bittestandset64(__int64 *, __int64);
255 long _InterlockedAnd_np(
long volatile *_Value,
long _Mask);
256 short _InterlockedAnd16_np(
short volatile *_Value,
short _Mask);
257 __int64 _InterlockedAnd64_np(__int64
volatile *_Value, __int64 _Mask);
258 char _InterlockedAnd8_np(
char volatile *_Value,
char _Mask);
259 unsigned char _interlockedbittestandreset64(__int64
volatile *, __int64);
260 unsigned char _interlockedbittestandset64(__int64
volatile *, __int64);
261 long _InterlockedCompareExchange_np(
long volatile *_Destination,
long _Exchange,
263 unsigned char _InterlockedCompareExchange128(__int64
volatile *_Destination,
264 __int64 _ExchangeHigh,
265 __int64 _ExchangeLow,
266 __int64 *_CompareandResult);
267 unsigned char _InterlockedCompareExchange128_np(__int64
volatile *_Destination,
268 __int64 _ExchangeHigh,
269 __int64 _ExchangeLow,
270 __int64 *_ComparandResult);
271 short _InterlockedCompareExchange16_np(
short volatile *_Destination,
272 short _Exchange,
short _Comparand);
273 __int64 _InterlockedCompareExchange64_np(__int64
volatile *_Destination,
274 __int64 _Exchange, __int64 _Comparand);
275 void *_InterlockedCompareExchangePointer_np(
void *
volatile *_Destination,
276 void *_Exchange,
void *_Comparand);
277 long _InterlockedOr_np(
long volatile *_Value,
long _Mask);
278 short _InterlockedOr16_np(
short volatile *_Value,
short _Mask);
279 __int64 _InterlockedOr64_np(__int64
volatile *_Value, __int64 _Mask);
280 char _InterlockedOr8_np(
char volatile *_Value,
char _Mask);
281 long _InterlockedXor_np(
long volatile *_Value,
long _Mask);
282 short _InterlockedXor16_np(
short volatile *_Value,
short _Mask);
283 __int64 _InterlockedXor64_np(__int64
volatile *_Value, __int64 _Mask);
284 char _InterlockedXor8_np(
char volatile *_Value,
char _Mask);
285 unsigned __int64 _rorx_u64(
unsigned __int64,
const unsigned int);
286 __int64 _sarx_i64(__int64,
unsigned int);
287 unsigned __int64 _shlx_u64(
unsigned __int64,
unsigned int);
288 unsigned __int64 _shrx_u64(
unsigned __int64,
unsigned int);
290 __int64 __mulh(__int64, __int64);
292 unsigned __int64 __umulh(
unsigned __int64,
unsigned __int64);
294 __int64 _mul128(__int64, __int64, __int64*);
296 unsigned __int64 _umul128(
unsigned __int64,
302 #if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__) 305 unsigned char _BitScanForward64(
unsigned long *_Index,
unsigned __int64 _Mask);
307 unsigned char _BitScanReverse64(
unsigned long *_Index,
unsigned __int64 _Mask);
310 __int64 _InterlockedDecrement64(__int64
volatile *_Addend);
312 __int64 _InterlockedExchange64(__int64
volatile *_Target, __int64 _Value);
314 __int64 _InterlockedExchangeAdd64(__int64
volatile *_Addend, __int64 _Value);
316 __int64 _InterlockedExchangeSub64(__int64
volatile *_Subend, __int64 _Value);
318 __int64 _InterlockedIncrement64(__int64
volatile *_Addend);
320 __int64 _InterlockedOr64(__int64
volatile *_Value, __int64 _Mask);
322 __int64 _InterlockedXor64(__int64
volatile *_Value, __int64 _Mask);
324 __int64 _InterlockedAnd64(__int64
volatile *_Value, __int64 _Mask);
331 #if defined(__arm__) || defined(__aarch64__) 333 _InterlockedExchangeAdd8_acq(
char volatile *_Addend,
char _Value) {
334 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
337 _InterlockedExchangeAdd8_nf(
char volatile *_Addend,
char _Value) {
338 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
341 _InterlockedExchangeAdd8_rel(
char volatile *_Addend,
char _Value) {
342 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
345 _InterlockedExchangeAdd16_acq(
short volatile *_Addend,
short _Value) {
346 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
349 _InterlockedExchangeAdd16_nf(
short volatile *_Addend,
short _Value) {
350 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
353 _InterlockedExchangeAdd16_rel(
short volatile *_Addend,
short _Value) {
354 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
357 _InterlockedExchangeAdd_acq(
long volatile *_Addend,
long _Value) {
358 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
361 _InterlockedExchangeAdd_nf(
long volatile *_Addend,
long _Value) {
362 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
365 _InterlockedExchangeAdd_rel(
long volatile *_Addend,
long _Value) {
366 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
369 _InterlockedExchangeAdd64_acq(__int64
volatile *_Addend, __int64 _Value) {
370 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
373 _InterlockedExchangeAdd64_nf(__int64
volatile *_Addend, __int64 _Value) {
374 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
377 _InterlockedExchangeAdd64_rel(__int64
volatile *_Addend, __int64 _Value) {
378 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
384 #if defined(__arm__) || defined(__aarch64__) 386 _InterlockedIncrement16_acq(
short volatile *_Value) {
387 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
390 _InterlockedIncrement16_nf(
short volatile *_Value) {
391 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
394 _InterlockedIncrement16_rel(
short volatile *_Value) {
395 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
398 _InterlockedIncrement_acq(
long volatile *_Value) {
399 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
402 _InterlockedIncrement_nf(
long volatile *_Value) {
403 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
406 _InterlockedIncrement_rel(
long volatile *_Value) {
407 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
410 _InterlockedIncrement64_acq(__int64
volatile *_Value) {
411 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
414 _InterlockedIncrement64_nf(__int64
volatile *_Value) {
415 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
418 _InterlockedIncrement64_rel(__int64
volatile *_Value) {
419 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
425 #if defined(__arm__) || defined(__aarch64__) 427 _InterlockedDecrement16_acq(
short volatile *_Value) {
428 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
431 _InterlockedDecrement16_nf(
short volatile *_Value) {
432 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
435 _InterlockedDecrement16_rel(
short volatile *_Value) {
436 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
439 _InterlockedDecrement_acq(
long volatile *_Value) {
440 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
443 _InterlockedDecrement_nf(
long volatile *_Value) {
444 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
447 _InterlockedDecrement_rel(
long volatile *_Value) {
448 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
451 _InterlockedDecrement64_acq(__int64
volatile *_Value) {
452 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
455 _InterlockedDecrement64_nf(__int64
volatile *_Value) {
456 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
459 _InterlockedDecrement64_rel(__int64
volatile *_Value) {
460 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
466 #if defined(__arm__) || defined(__aarch64__) 468 _InterlockedAnd8_acq(
char volatile *_Value,
char _Mask) {
469 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
472 _InterlockedAnd8_nf(
char volatile *_Value,
char _Mask) {
473 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
476 _InterlockedAnd8_rel(
char volatile *_Value,
char _Mask) {
477 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
480 _InterlockedAnd16_acq(
short volatile *_Value,
short _Mask) {
481 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
484 _InterlockedAnd16_nf(
short volatile *_Value,
short _Mask) {
485 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
488 _InterlockedAnd16_rel(
short volatile *_Value,
short _Mask) {
489 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
492 _InterlockedAnd_acq(
long volatile *_Value,
long _Mask) {
493 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
496 _InterlockedAnd_nf(
long volatile *_Value,
long _Mask) {
497 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
500 _InterlockedAnd_rel(
long volatile *_Value,
long _Mask) {
501 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
504 _InterlockedAnd64_acq(__int64
volatile *_Value, __int64 _Mask) {
505 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
508 _InterlockedAnd64_nf(__int64
volatile *_Value, __int64 _Mask) {
509 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
512 _InterlockedAnd64_rel(__int64
volatile *_Value, __int64 _Mask) {
513 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
519 #if defined(__arm__) || defined(__aarch64__) 520 unsigned char _interlockedbittestandset_acq(
long volatile *_BitBase,
522 unsigned char _interlockedbittestandset_nf(
long volatile *_BitBase,
524 unsigned char _interlockedbittestandset_rel(
long volatile *_BitBase,
526 unsigned char _interlockedbittestandreset_acq(
long volatile *_BitBase,
528 unsigned char _interlockedbittestandreset_nf(
long volatile *_BitBase,
530 unsigned char _interlockedbittestandreset_rel(
long volatile *_BitBase,
536 #if defined(__arm__) || defined(__aarch64__) 538 _InterlockedOr8_acq(
char volatile *_Value,
char _Mask) {
539 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
542 _InterlockedOr8_nf(
char volatile *_Value,
char _Mask) {
543 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
546 _InterlockedOr8_rel(
char volatile *_Value,
char _Mask) {
547 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
550 _InterlockedOr16_acq(
short volatile *_Value,
short _Mask) {
551 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
554 _InterlockedOr16_nf(
short volatile *_Value,
short _Mask) {
555 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
558 _InterlockedOr16_rel(
short volatile *_Value,
short _Mask) {
559 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
562 _InterlockedOr_acq(
long volatile *_Value,
long _Mask) {
563 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
566 _InterlockedOr_nf(
long volatile *_Value,
long _Mask) {
567 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
570 _InterlockedOr_rel(
long volatile *_Value,
long _Mask) {
571 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
574 _InterlockedOr64_acq(__int64
volatile *_Value, __int64 _Mask) {
575 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
578 _InterlockedOr64_nf(__int64
volatile *_Value, __int64 _Mask) {
579 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
582 _InterlockedOr64_rel(__int64
volatile *_Value, __int64 _Mask) {
583 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
589 #if defined(__arm__) || defined(__aarch64__) 591 _InterlockedXor8_acq(
char volatile *_Value,
char _Mask) {
592 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
595 _InterlockedXor8_nf(
char volatile *_Value,
char _Mask) {
596 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
599 _InterlockedXor8_rel(
char volatile *_Value,
char _Mask) {
600 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
603 _InterlockedXor16_acq(
short volatile *_Value,
short _Mask) {
604 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
607 _InterlockedXor16_nf(
short volatile *_Value,
short _Mask) {
608 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
611 _InterlockedXor16_rel(
short volatile *_Value,
short _Mask) {
612 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
615 _InterlockedXor_acq(
long volatile *_Value,
long _Mask) {
616 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
619 _InterlockedXor_nf(
long volatile *_Value,
long _Mask) {
620 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
623 _InterlockedXor_rel(
long volatile *_Value,
long _Mask) {
624 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
627 _InterlockedXor64_acq(__int64
volatile *_Value, __int64 _Mask) {
628 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
631 _InterlockedXor64_nf(__int64
volatile *_Value, __int64 _Mask) {
632 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
635 _InterlockedXor64_rel(__int64
volatile *_Value, __int64 _Mask) {
636 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
642 #if defined(__arm__) || defined(__aarch64__) 644 _InterlockedExchange8_acq(
char volatile *_Target,
char _Value) {
645 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
649 _InterlockedExchange8_nf(
char volatile *_Target,
char _Value) {
650 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
654 _InterlockedExchange8_rel(
char volatile *_Target,
char _Value) {
655 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
659 _InterlockedExchange16_acq(
short volatile *_Target,
short _Value) {
660 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
664 _InterlockedExchange16_nf(
short volatile *_Target,
short _Value) {
665 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
669 _InterlockedExchange16_rel(
short volatile *_Target,
short _Value) {
670 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
674 _InterlockedExchange_acq(
long volatile *_Target,
long _Value) {
675 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
679 _InterlockedExchange_nf(
long volatile *_Target,
long _Value) {
680 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
684 _InterlockedExchange_rel(
long volatile *_Target,
long _Value) {
685 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
689 _InterlockedExchange64_acq(__int64
volatile *_Target, __int64 _Value) {
690 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
694 _InterlockedExchange64_nf(__int64
volatile *_Target, __int64 _Value) {
695 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
699 _InterlockedExchange64_rel(__int64
volatile *_Target, __int64 _Value) {
700 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
707 #if defined(__arm__) || defined(__aarch64__) 709 _InterlockedCompareExchange8_acq(
char volatile *_Destination,
710 char _Exchange,
char _Comparand) {
711 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
712 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
716 _InterlockedCompareExchange8_nf(
char volatile *_Destination,
717 char _Exchange,
char _Comparand) {
718 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
719 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
723 _InterlockedCompareExchange8_rel(
char volatile *_Destination,
724 char _Exchange,
char _Comparand) {
725 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
726 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
730 _InterlockedCompareExchange16_acq(
short volatile *_Destination,
731 short _Exchange,
short _Comparand) {
732 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
733 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
737 _InterlockedCompareExchange16_nf(
short volatile *_Destination,
738 short _Exchange,
short _Comparand) {
739 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
740 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
744 _InterlockedCompareExchange16_rel(
short volatile *_Destination,
745 short _Exchange,
short _Comparand) {
746 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
747 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
751 _InterlockedCompareExchange_acq(
long volatile *_Destination,
752 long _Exchange,
long _Comparand) {
753 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
754 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
758 _InterlockedCompareExchange_nf(
long volatile *_Destination,
759 long _Exchange,
long _Comparand) {
760 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
761 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
765 _InterlockedCompareExchange_rel(
long volatile *_Destination,
766 long _Exchange,
long _Comparand) {
767 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
768 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
772 _InterlockedCompareExchange64_acq(__int64
volatile *_Destination,
773 __int64 _Exchange, __int64 _Comparand) {
774 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
775 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
779 _InterlockedCompareExchange64_nf(__int64
volatile *_Destination,
780 __int64 _Exchange, __int64 _Comparand) {
781 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
782 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
786 _InterlockedCompareExchange64_rel(__int64
volatile *_Destination,
787 __int64 _Exchange, __int64 _Comparand) {
788 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
789 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
797 #if defined(__i386__) || defined(__x86_64__) 799 __movsb(
unsigned char *__dst,
unsigned char const *
__src,
size_t __n) {
800 __asm__ __volatile__(
"rep movsb" :
"+D"(__dst),
"+S"(__src),
"+c"(__n)
804 __movsd(
unsigned long *__dst,
unsigned long const *__src,
size_t __n) {
805 __asm__ __volatile__(
"rep movsl" :
"+D"(__dst),
"+S"(__src),
"+c"(__n)
809 __movsw(
unsigned short *__dst,
unsigned short const *__src,
size_t __n) {
810 __asm__ __volatile__(
"rep movsw" :
"+D"(__dst),
"+S"(__src),
"+c"(__n)
814 __stosd(
unsigned long *__dst,
unsigned long __x,
size_t __n) {
815 __asm__ __volatile__(
"rep stosl" :
"+D"(__dst),
"+c"(__n) :
"a"(__x)
819 __stosw(
unsigned short *__dst,
unsigned short __x,
size_t __n) {
820 __asm__ __volatile__(
"rep stosw" :
"+D"(__dst),
"+c"(__n) :
"a"(__x)
826 __movsq(
unsigned long long *__dst,
unsigned long long const *__src,
size_t __n) {
827 __asm__ __volatile__(
"rep movsq" :
"+D"(__dst),
"+S"(__src),
"+c"(__n)
831 __stosq(
unsigned __int64 *__dst,
unsigned __int64 __x,
size_t __n) {
832 __asm__ __volatile__(
"rep stosq" :
"+D"(__dst),
"+c"(__n) :
"a"(__x)
840 #if defined(__i386__) || defined(__x86_64__) 842 __cpuid(
int __info[4],
int __level) {
843 __asm__ (
"cpuid" :
"=a"(__info[0]),
"=b" (__info[1]),
"=c"(__info[2]),
"=d"(__info[3])
847 __cpuidex(
int __info[4],
int __level,
int __ecx) {
848 __asm__ (
"cpuid" :
"=a"(__info[0]),
"=b" (__info[1]),
"=c"(__info[2]),
"=d"(__info[3])
849 :
"a"(__level),
"c"(__ecx));
852 _xgetbv(
unsigned int __xcr_no) {
853 unsigned int __eax, __edx;
854 __asm__ (
"xgetbv" :
"=a" (__eax),
"=d" (__edx) :
"c" (__xcr_no));
855 return ((
unsigned __int64)__edx << 32) | __eax;
859 __asm__
volatile (
"hlt");
863 __asm__
volatile (
"nop");
870 #if defined(__i386__) || defined(__x86_64__) 872 __readmsr(
unsigned long __register) {
881 __asm__ (
"rdmsr" :
"=d"(__edx),
"=a"(__eax) :
"c"(__register));
882 return (((
unsigned __int64)__edx) << 32) | (
unsigned __int64)__eax;
887 unsigned long __cr3_val;
888 __asm__ __volatile__ (
"mov %%cr3, %0" :
"=q"(__cr3_val) : :
"memory");
893 __writecr3(
unsigned int __cr3_val) {
894 __asm__ (
"mov %0, %%cr3" : :
"q"(__cr3_val) :
"memory");
902 #undef __DEFAULT_FN_ATTRS #define __cpuid(__leaf, __eax, __ebx, __ecx, __edx)
static __inline__ void const void * __src
static __inline__ void __DEFAULT_FN_ATTRS _xend(void)
char __v64qi __attribute__((__vector_size__(64)))
Zeroes the upper 128 bits (bits 255:128) of all YMM registers.
#define __DEFAULT_FN_ATTRS
static __inline__ unsigned short __DEFAULT_FN_ATTRS __lzcnt16(unsigned short __X)
Counts the number of leading zero bits in the operand.
static __inline unsigned char unsigned int __x