26 #include_next <intrin.h>
33 #if defined(__i386__) || defined(__x86_64__)
47 #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
55 __m64 _m_from_float(
float);
56 float _m_to_float(__m64);
60 void __addfsbyte(
unsigned long,
unsigned char);
61 void __addfsdword(
unsigned long,
unsigned long);
62 void __addfsword(
unsigned long,
unsigned short);
63 void __code_seg(
const char *);
67 void __cpuidex(
int[4],
int,
int);
69 __int64 __emul(
int,
int);
71 unsigned __int64 __emulu(
unsigned int,
unsigned int);
72 unsigned int __getcallerseflags(
void);
75 unsigned char __inbyte(
unsigned short);
76 void __inbytestring(
unsigned short,
unsigned char *,
unsigned long);
77 void __incfsbyte(
unsigned long);
78 void __incfsdword(
unsigned long);
79 void __incfsword(
unsigned long);
80 unsigned long __indword(
unsigned short);
81 void __indwordstring(
unsigned short,
unsigned long *,
unsigned long);
82 void __invlpg(
void *);
83 unsigned short __inword(
unsigned short);
84 void __inwordstring(
unsigned short,
unsigned short *,
unsigned long);
86 unsigned __int64 __ll_lshift(
unsigned __int64,
int);
87 __int64 __ll_rshift(__int64,
int);
88 unsigned int __lzcnt(
unsigned int);
91 void __movsb(
unsigned char *,
unsigned char const *,
size_t);
93 void __movsd(
unsigned long *,
unsigned long const *,
size_t);
95 void __movsw(
unsigned short *,
unsigned short const *,
size_t);
98 void __nvreg_restore_fence(
void);
99 void __nvreg_save_fence(
void);
100 void __outbyte(
unsigned short,
unsigned char);
101 void __outbytestring(
unsigned short,
unsigned char *,
unsigned long);
102 void __outdword(
unsigned short,
unsigned long);
103 void __outdwordstring(
unsigned short,
unsigned long *,
unsigned long);
104 void __outword(
unsigned short,
unsigned short);
105 void __outwordstring(
unsigned short,
unsigned short *,
unsigned long);
106 unsigned long __readcr0(
void);
107 unsigned long __readcr2(
void);
109 unsigned long __readcr3(
void);
110 unsigned long __readcr4(
void);
111 unsigned long __readcr8(
void);
112 unsigned int __readdr(
unsigned int);
115 unsigned char __readfsbyte(
unsigned long);
117 unsigned __int64 __readfsqword(
unsigned long);
119 unsigned short __readfsword(
unsigned long);
122 unsigned __int64 __readmsr(
unsigned long);
123 unsigned __int64 __readpmc(
unsigned long);
124 unsigned long __segmentlimit(
unsigned long);
127 void __stosb(
unsigned char *,
unsigned char,
size_t);
129 void __stosd(
unsigned long *,
unsigned long,
size_t);
131 void __stosw(
unsigned short *,
unsigned short,
size_t);
132 void __svm_clgi(
void);
133 void __svm_invlpga(
void *,
int);
134 void __svm_skinit(
int);
135 void __svm_stgi(
void);
136 void __svm_vmload(
size_t);
137 void __svm_vmrun(
size_t);
138 void __svm_vmsave(
size_t);
139 unsigned __int64 __ull_rshift(
unsigned __int64,
int);
140 void __vmx_off(
void);
141 void __vmx_vmptrst(
unsigned __int64 *);
143 void __writecr0(
unsigned int);
145 void __writecr3(
unsigned int);
146 void __writecr4(
unsigned int);
147 void __writecr8(
unsigned int);
148 void __writedr(
unsigned int,
unsigned int);
149 void __writefsbyte(
unsigned long,
unsigned char);
150 void __writefsdword(
unsigned long,
unsigned long);
151 void __writefsqword(
unsigned long,
unsigned __int64);
152 void __writefsword(
unsigned long,
unsigned short);
153 void __writemsr(
unsigned long,
unsigned __int64);
155 void *_AddressOfReturnAddress(
void);
157 unsigned char _BitScanForward(
unsigned long *_Index,
unsigned long _Mask);
159 unsigned char _BitScanReverse(
unsigned long *_Index,
unsigned long _Mask);
161 unsigned char _bittest(
long const *,
long);
163 unsigned char _bittestandcomplement(
long *,
long);
165 unsigned char _bittestandreset(
long *,
long);
167 unsigned char _bittestandset(
long *,
long);
168 void __cdecl _disable(
void);
169 void __cdecl _enable(
void);
170 long _InterlockedAddLargeStatistic(__int64
volatile *_Addend,
long _Value);
171 unsigned char _interlockedbittestandreset(
long volatile *,
long);
172 unsigned char _interlockedbittestandset(
long volatile *,
long);
173 long _InterlockedCompareExchange_HLEAcquire(
long volatile *,
long,
long);
174 long _InterlockedCompareExchange_HLERelease(
long volatile *,
long,
long);
175 __int64 _InterlockedcompareExchange64_HLEAcquire(__int64
volatile *, __int64,
177 __int64 _InterlockedCompareExchange64_HLERelease(__int64
volatile *, __int64,
179 void *_InterlockedCompareExchangePointer_HLEAcquire(
void *
volatile *,
void *,
181 void *_InterlockedCompareExchangePointer_HLERelease(
void *
volatile *,
void *,
183 long _InterlockedExchangeAdd_HLEAcquire(
long volatile *,
long);
184 long _InterlockedExchangeAdd_HLERelease(
long volatile *,
long);
185 __int64 _InterlockedExchangeAdd64_HLEAcquire(__int64
volatile *, __int64);
186 __int64 _InterlockedExchangeAdd64_HLERelease(__int64
volatile *, __int64);
187 void __cdecl _invpcid(
unsigned int,
void *);
188 static __inline__
void
189 __attribute__((__deprecated__(
"use other intrinsics or C++11 atomics instead")))
191 static __inline__
void
192 __attribute__((__deprecated__("use other intrinsics
or C++11 atomics instead")))
193 _ReadWriteBarrier(
void);
194 unsigned int _rorx_u32(
unsigned int, const
unsigned int);
195 int _sarx_i32(
int,
unsigned int);
197 int __cdecl _setjmp(jmp_buf);
199 unsigned int _shlx_u32(
unsigned int,
unsigned int);
200 unsigned int _shrx_u32(
unsigned int,
unsigned int);
201 void _Store_HLERelease(
long volatile *,
long);
202 void _Store64_HLERelease(__int64
volatile *, __int64);
203 void _StorePointer_HLERelease(
void *
volatile *,
void *);
204 static __inline__
void
205 __attribute__((__deprecated__(
"use other intrinsics or C++11 atomics instead")))
207 unsigned __int32 xbegin(
void);
210 #define _XCR_XFEATURE_ENABLED_MASK 0
211 unsigned __int64 __cdecl _xgetbv(
unsigned int);
212 void __cdecl _xsetbv(
unsigned int,
unsigned __int64);
216 void __addgsbyte(
unsigned long,
unsigned char);
217 void __addgsdword(
unsigned long,
unsigned long);
218 void __addgsqword(
unsigned long,
unsigned __int64);
219 void __addgsword(
unsigned long,
unsigned short);
221 void __faststorefence(
void);
222 void __incgsbyte(
unsigned long);
223 void __incgsdword(
unsigned long);
224 void __incgsqword(
unsigned long);
225 void __incgsword(
unsigned long);
226 unsigned __int64 __lzcnt64(
unsigned __int64);
228 void __movsq(
unsigned long long *,
unsigned long long const *,
size_t);
230 unsigned char __readgsbyte(
unsigned long);
232 unsigned long __readgsdword(
unsigned long);
234 unsigned __int64 __readgsqword(
unsigned long);
235 unsigned short __readgsword(
unsigned long);
236 unsigned __int64 __shiftleft128(
unsigned __int64 _LowPart,
237 unsigned __int64 _HighPart,
238 unsigned char _Shift);
239 unsigned __int64 __shiftright128(
unsigned __int64 _LowPart,
240 unsigned __int64 _HighPart,
241 unsigned char _Shift);
243 void __stosq(
unsigned __int64 *,
unsigned __int64,
size_t);
244 unsigned char __vmx_on(
unsigned __int64 *);
245 unsigned char __vmx_vmclear(
unsigned __int64 *);
246 unsigned char __vmx_vmlaunch(
void);
247 unsigned char __vmx_vmptrld(
unsigned __int64 *);
248 unsigned char __vmx_vmread(
size_t,
size_t *);
249 unsigned char __vmx_vmresume(
void);
250 unsigned char __vmx_vmwrite(
size_t,
size_t);
251 void __writegsbyte(
unsigned long,
unsigned char);
252 void __writegsdword(
unsigned long,
unsigned long);
253 void __writegsqword(
unsigned long,
unsigned __int64);
254 void __writegsword(
unsigned long,
unsigned short);
256 unsigned char _BitScanForward64(
unsigned long *_Index,
unsigned __int64 _Mask);
258 unsigned char _BitScanReverse64(
unsigned long *_Index,
unsigned __int64 _Mask);
260 unsigned char _bittest64(__int64
const *, __int64);
262 unsigned char _bittestandcomplement64(__int64 *, __int64);
264 unsigned char _bittestandreset64(__int64 *, __int64);
266 unsigned char _bittestandset64(__int64 *, __int64);
267 long _InterlockedAnd_np(
long volatile *_Value,
long _Mask);
268 short _InterlockedAnd16_np(
short volatile *_Value,
short _Mask);
269 __int64 _InterlockedAnd64_np(__int64
volatile *_Value, __int64 _Mask);
270 char _InterlockedAnd8_np(
char volatile *_Value,
char _Mask);
271 unsigned char _interlockedbittestandreset64(__int64
volatile *, __int64);
273 unsigned char _interlockedbittestandset64(__int64
volatile *, __int64);
274 long _InterlockedCompareExchange_np(
long volatile *_Destination,
long _Exchange,
276 unsigned char _InterlockedCompareExchange128(__int64
volatile *_Destination,
277 __int64 _ExchangeHigh,
278 __int64 _ExchangeLow,
279 __int64 *_CompareandResult);
280 unsigned char _InterlockedCompareExchange128_np(__int64
volatile *_Destination,
281 __int64 _ExchangeHigh,
282 __int64 _ExchangeLow,
283 __int64 *_ComparandResult);
284 short _InterlockedCompareExchange16_np(
short volatile *_Destination,
285 short _Exchange,
short _Comparand);
286 __int64 _InterlockedCompareExchange64_HLEAcquire(__int64
volatile *, __int64,
288 __int64 _InterlockedCompareExchange64_HLERelease(__int64
volatile *, __int64,
290 __int64 _InterlockedCompareExchange64_np(__int64
volatile *_Destination,
291 __int64 _Exchange, __int64 _Comparand);
292 void *_InterlockedCompareExchangePointer_np(
void *
volatile *_Destination,
293 void *_Exchange,
void *_Comparand);
294 long _InterlockedOr_np(
long volatile *_Value,
long _Mask);
295 short _InterlockedOr16_np(
short volatile *_Value,
short _Mask);
296 __int64 _InterlockedOr64_np(__int64
volatile *_Value, __int64 _Mask);
297 char _InterlockedOr8_np(
char volatile *_Value,
char _Mask);
298 long _InterlockedXor_np(
long volatile *_Value,
long _Mask);
299 short _InterlockedXor16_np(
short volatile *_Value,
short _Mask);
300 __int64 _InterlockedXor64_np(__int64
volatile *_Value, __int64 _Mask);
301 char _InterlockedXor8_np(
char volatile *_Value,
char _Mask);
302 unsigned __int64 _rorx_u64(
unsigned __int64,
const unsigned int);
303 __int64 _sarx_i64(__int64,
unsigned int);
304 unsigned __int64 _shlx_u64(
unsigned __int64,
unsigned int);
305 unsigned __int64 _shrx_u64(
unsigned __int64,
unsigned int);
307 __int64 __mulh(__int64, __int64);
309 unsigned __int64 __umulh(
unsigned __int64,
unsigned __int64);
311 __int64 _mul128(__int64, __int64, __int64*);
313 unsigned __int64 _umul128(
unsigned __int64,
319 #if defined(__x86_64__) || defined(__arm__)
322 __int64 _InterlockedDecrement64(__int64
volatile *_Addend);
324 __int64 _InterlockedExchange64(__int64
volatile *_Target, __int64 _Value);
326 __int64 _InterlockedExchangeAdd64(__int64
volatile *_Addend, __int64 _Value);
328 __int64 _InterlockedExchangeSub64(__int64
volatile *_Subend, __int64 _Value);
330 __int64 _InterlockedIncrement64(__int64
volatile *_Addend);
332 __int64 _InterlockedOr64(__int64
volatile *_Value, __int64 _Mask);
334 __int64 _InterlockedXor64(__int64
volatile *_Value, __int64 _Mask);
336 __int64 _InterlockedAnd64(__int64
volatile *_Value, __int64 _Mask);
344 _bittest(
long const *_BitBase,
long _BitPos) {
345 return (*_BitBase >> _BitPos) & 1;
348 _bittestandcomplement(
long *_BitBase,
long _BitPos) {
349 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
350 *_BitBase = *_BitBase ^ (1 << _BitPos);
354 _bittestandreset(
long *_BitBase,
long _BitPos) {
355 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
356 *_BitBase = *_BitBase & ~(1 << _BitPos);
360 _bittestandset(
long *_BitBase,
long _BitPos) {
361 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
362 *_BitBase = *_BitBase | (1 << _BitPos);
365 #if defined(__arm__) || defined(__aarch64__)
367 _interlockedbittestandset_acq(
long volatile *_BitBase,
long _BitPos) {
368 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_ACQUIRE);
369 return (_PrevVal >> _BitPos) & 1;
372 _interlockedbittestandset_nf(
long volatile *_BitBase,
long _BitPos) {
373 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELAXED);
374 return (_PrevVal >> _BitPos) & 1;
377 _interlockedbittestandset_rel(
long volatile *_BitBase,
long _BitPos) {
378 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELEASE);
379 return (_PrevVal >> _BitPos) & 1;
384 _bittest64(__int64
const *_BitBase, __int64 _BitPos) {
385 return (*_BitBase >> _BitPos) & 1;
388 _bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
389 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
390 *_BitBase = *_BitBase ^ (1ll << _BitPos);
394 _bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
395 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
396 *_BitBase = *_BitBase & ~(1ll << _BitPos);
400 _bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
401 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
402 *_BitBase = *_BitBase | (1ll << _BitPos);
406 _interlockedbittestandset64(__int64
volatile *_BitBase, __int64 _BitPos) {
408 __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
409 return (_PrevVal >> _BitPos) & 1;
415 #if defined(__arm__) || defined(__aarch64__)
417 _InterlockedExchangeAdd8_acq(
char volatile *_Addend,
char _Value) {
418 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
421 _InterlockedExchangeAdd8_nf(
char volatile *_Addend,
char _Value) {
422 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
425 _InterlockedExchangeAdd8_rel(
char volatile *_Addend,
char _Value) {
426 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
429 _InterlockedExchangeAdd16_acq(
short volatile *_Addend,
short _Value) {
430 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
433 _InterlockedExchangeAdd16_nf(
short volatile *_Addend,
short _Value) {
434 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
437 _InterlockedExchangeAdd16_rel(
short volatile *_Addend,
short _Value) {
438 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
441 _InterlockedExchangeAdd_acq(
long volatile *_Addend,
long _Value) {
442 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
445 _InterlockedExchangeAdd_nf(
long volatile *_Addend,
long _Value) {
446 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
449 _InterlockedExchangeAdd_rel(
long volatile *_Addend,
long _Value) {
450 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
453 _InterlockedExchangeAdd64_acq(__int64
volatile *_Addend, __int64 _Value) {
454 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
457 _InterlockedExchangeAdd64_nf(__int64
volatile *_Addend, __int64 _Value) {
458 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
461 _InterlockedExchangeAdd64_rel(__int64
volatile *_Addend, __int64 _Value) {
462 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
468 #if defined(__arm__) || defined(__aarch64__)
470 _InterlockedIncrement16_acq(
short volatile *_Value) {
471 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
474 _InterlockedIncrement16_nf(
short volatile *_Value) {
475 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
478 _InterlockedIncrement16_rel(
short volatile *_Value) {
479 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
482 _InterlockedIncrement_acq(
long volatile *_Value) {
483 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
486 _InterlockedIncrement_nf(
long volatile *_Value) {
487 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
490 _InterlockedIncrement_rel(
long volatile *_Value) {
491 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
494 _InterlockedIncrement64_acq(__int64
volatile *_Value) {
495 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
498 _InterlockedIncrement64_nf(__int64
volatile *_Value) {
499 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
502 _InterlockedIncrement64_rel(__int64
volatile *_Value) {
503 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
509 #if defined(__arm__) || defined(__aarch64__)
511 _InterlockedDecrement16_acq(
short volatile *_Value) {
512 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
515 _InterlockedDecrement16_nf(
short volatile *_Value) {
516 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
519 _InterlockedDecrement16_rel(
short volatile *_Value) {
520 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
523 _InterlockedDecrement_acq(
long volatile *_Value) {
524 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
527 _InterlockedDecrement_nf(
long volatile *_Value) {
528 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
531 _InterlockedDecrement_rel(
long volatile *_Value) {
532 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
535 _InterlockedDecrement64_acq(__int64
volatile *_Value) {
536 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
539 _InterlockedDecrement64_nf(__int64
volatile *_Value) {
540 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
543 _InterlockedDecrement64_rel(__int64
volatile *_Value) {
544 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
550 #if defined(__arm__) || defined(__aarch64__)
552 _InterlockedAnd8_acq(
char volatile *_Value,
char _Mask) {
553 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
556 _InterlockedAnd8_nf(
char volatile *_Value,
char _Mask) {
557 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
560 _InterlockedAnd8_rel(
char volatile *_Value,
char _Mask) {
561 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
564 _InterlockedAnd16_acq(
short volatile *_Value,
short _Mask) {
565 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
568 _InterlockedAnd16_nf(
short volatile *_Value,
short _Mask) {
569 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
572 _InterlockedAnd16_rel(
short volatile *_Value,
short _Mask) {
573 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
576 _InterlockedAnd_acq(
long volatile *_Value,
long _Mask) {
577 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
580 _InterlockedAnd_nf(
long volatile *_Value,
long _Mask) {
581 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
584 _InterlockedAnd_rel(
long volatile *_Value,
long _Mask) {
585 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
588 _InterlockedAnd64_acq(__int64
volatile *_Value, __int64 _Mask) {
589 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
592 _InterlockedAnd64_nf(__int64
volatile *_Value, __int64 _Mask) {
593 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
596 _InterlockedAnd64_rel(__int64
volatile *_Value, __int64 _Mask) {
597 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
603 #if defined(__arm__) || defined(__aarch64__)
605 _InterlockedOr8_acq(
char volatile *_Value,
char _Mask) {
606 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
609 _InterlockedOr8_nf(
char volatile *_Value,
char _Mask) {
610 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
613 _InterlockedOr8_rel(
char volatile *_Value,
char _Mask) {
614 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
617 _InterlockedOr16_acq(
short volatile *_Value,
short _Mask) {
618 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
621 _InterlockedOr16_nf(
short volatile *_Value,
short _Mask) {
622 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
625 _InterlockedOr16_rel(
short volatile *_Value,
short _Mask) {
626 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
629 _InterlockedOr_acq(
long volatile *_Value,
long _Mask) {
630 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
633 _InterlockedOr_nf(
long volatile *_Value,
long _Mask) {
634 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
637 _InterlockedOr_rel(
long volatile *_Value,
long _Mask) {
638 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
641 _InterlockedOr64_acq(__int64
volatile *_Value, __int64 _Mask) {
642 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
645 _InterlockedOr64_nf(__int64
volatile *_Value, __int64 _Mask) {
646 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
649 _InterlockedOr64_rel(__int64
volatile *_Value, __int64 _Mask) {
650 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
656 #if defined(__arm__) || defined(__aarch64__)
658 _InterlockedXor8_acq(
char volatile *_Value,
char _Mask) {
659 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
662 _InterlockedXor8_nf(
char volatile *_Value,
char _Mask) {
663 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
666 _InterlockedXor8_rel(
char volatile *_Value,
char _Mask) {
667 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
670 _InterlockedXor16_acq(
short volatile *_Value,
short _Mask) {
671 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
674 _InterlockedXor16_nf(
short volatile *_Value,
short _Mask) {
675 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
678 _InterlockedXor16_rel(
short volatile *_Value,
short _Mask) {
679 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
682 _InterlockedXor_acq(
long volatile *_Value,
long _Mask) {
683 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
686 _InterlockedXor_nf(
long volatile *_Value,
long _Mask) {
687 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
690 _InterlockedXor_rel(
long volatile *_Value,
long _Mask) {
691 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
694 _InterlockedXor64_acq(__int64
volatile *_Value, __int64 _Mask) {
695 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
698 _InterlockedXor64_nf(__int64
volatile *_Value, __int64 _Mask) {
699 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
702 _InterlockedXor64_rel(__int64
volatile *_Value, __int64 _Mask) {
703 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
709 #if defined(__arm__) || defined(__aarch64__)
711 _InterlockedExchange8_acq(
char volatile *_Target,
char _Value) {
712 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
716 _InterlockedExchange8_nf(
char volatile *_Target,
char _Value) {
717 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
721 _InterlockedExchange8_rel(
char volatile *_Target,
char _Value) {
722 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
726 _InterlockedExchange16_acq(
short volatile *_Target,
short _Value) {
727 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
731 _InterlockedExchange16_nf(
short volatile *_Target,
short _Value) {
732 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
736 _InterlockedExchange16_rel(
short volatile *_Target,
short _Value) {
737 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
741 _InterlockedExchange_acq(
long volatile *_Target,
long _Value) {
742 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
746 _InterlockedExchange_nf(
long volatile *_Target,
long _Value) {
747 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
751 _InterlockedExchange_rel(
long volatile *_Target,
long _Value) {
752 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
756 _InterlockedExchange64_acq(__int64
volatile *_Target, __int64 _Value) {
757 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
761 _InterlockedExchange64_nf(__int64
volatile *_Target, __int64 _Value) {
762 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
766 _InterlockedExchange64_rel(__int64
volatile *_Target, __int64 _Value) {
767 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
774 #if defined(__arm__) || defined(__aarch64__)
776 _InterlockedCompareExchange8_acq(
char volatile *_Destination,
777 char _Exchange,
char _Comparand) {
778 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
779 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
783 _InterlockedCompareExchange8_nf(
char volatile *_Destination,
784 char _Exchange,
char _Comparand) {
785 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
786 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
790 _InterlockedCompareExchange8_rel(
char volatile *_Destination,
791 char _Exchange,
char _Comparand) {
792 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
793 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
797 _InterlockedCompareExchange16_acq(
short volatile *_Destination,
798 short _Exchange,
short _Comparand) {
799 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
800 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
804 _InterlockedCompareExchange16_nf(
short volatile *_Destination,
805 short _Exchange,
short _Comparand) {
806 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
807 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
811 _InterlockedCompareExchange16_rel(
short volatile *_Destination,
812 short _Exchange,
short _Comparand) {
813 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
814 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
818 _InterlockedCompareExchange_acq(
long volatile *_Destination,
819 long _Exchange,
long _Comparand) {
820 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
821 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
825 _InterlockedCompareExchange_nf(
long volatile *_Destination,
826 long _Exchange,
long _Comparand) {
827 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
828 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
832 _InterlockedCompareExchange_rel(
long volatile *_Destination,
833 long _Exchange,
long _Comparand) {
834 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
835 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
839 _InterlockedCompareExchange64_acq(__int64
volatile *_Destination,
840 __int64 _Exchange, __int64 _Comparand) {
841 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
842 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
846 _InterlockedCompareExchange64_nf(__int64
volatile *_Destination,
847 __int64 _Exchange, __int64 _Comparand) {
848 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
849 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
853 _InterlockedCompareExchange64_rel(__int64
volatile *_Destination,
854 __int64 _Exchange, __int64 _Comparand) {
855 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
856 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
864 #if defined(__i386__) || defined(__x86_64__)
866 __movsb(
unsigned char *__dst,
unsigned char const *__src,
size_t __n) {
867 __asm__(
"rep movsb" : :
"D"(__dst),
"S"(__src),
"c"(__n));
870 __movsd(
unsigned long *__dst,
unsigned long const *__src,
size_t __n) {
871 __asm__(
"rep movsl" : :
"D"(__dst),
"S"(__src),
"c"(__n));
874 __movsw(
unsigned short *__dst,
unsigned short const *__src,
size_t __n) {
875 __asm__(
"rep movsw" : :
"D"(__dst),
"S"(__src),
"c"(__n));
878 __stosd(
unsigned long *__dst,
unsigned long __x,
size_t __n) {
879 __asm__(
"rep stosl" : :
"D"(__dst),
"a"(__x),
"c"(__n));
882 __stosw(
unsigned short *__dst,
unsigned short __x,
size_t __n) {
883 __asm__(
"rep stosw" : :
"D"(__dst),
"a"(__x),
"c"(__n));
888 __movsq(
unsigned long long *__dst,
unsigned long long const *__src,
size_t __n) {
889 __asm__(
"rep movsq" : :
"D"(__dst),
"S"(__src),
"c"(__n));
892 __stosq(
unsigned __int64 *__dst,
unsigned __int64 __x,
size_t __n) {
893 __asm__(
"rep stosq" : :
"D"(__dst),
"a"(__x),
"c"(__n));
900 #if defined(__i386__) || defined(__x86_64__)
902 __cpuid(
int __info[4],
int __level) {
903 __asm__ (
"cpuid" :
"=a"(__info[0]),
"=b" (__info[1]),
"=c"(__info[2]),
"=d"(__info[3])
907 __cpuidex(
int __info[4],
int __level,
int __ecx) {
908 __asm__ (
"cpuid" :
"=a"(__info[0]),
"=b" (__info[1]),
"=c"(__info[2]),
"=d"(__info[3])
909 :
"a"(__level),
"c"(__ecx));
912 _xgetbv(
unsigned int __xcr_no) {
913 unsigned int __eax, __edx;
914 __asm__ (
"xgetbv" :
"=a" (__eax),
"=d" (__edx) :
"c" (__xcr_no));
915 return ((
unsigned __int64)__edx << 32) | __eax;
919 __asm__
volatile (
"hlt");
923 __asm__
volatile (
"nop");
930 #if defined(__i386__) || defined(__x86_64__)
932 __readmsr(
unsigned long __register) {
941 __asm__ (
"rdmsr" :
"=d"(__edx),
"=a"(__eax) :
"c"(__register));
942 return (((
unsigned __int64)__edx) << 32) | (
unsigned __int64)__eax;
947 unsigned long __cr3_val;
948 __asm__ __volatile__ (
"mov %%cr3, %0" :
"=q"(__cr3_val) : :
"memory");
953 __writecr3(
unsigned int __cr3_val) {
954 __asm__ (
"mov %0, %%cr3" : :
"q"(__cr3_val) :
"memory");
962 #undef __DEFAULT_FN_ATTRS
#define __cpuid(__leaf, __eax, __ebx, __ecx, __edx)
static __inline__ void __DEFAULT_FN_ATTRS _xend(void)
char __v64qi __attribute__((__vector_size__(64)))
#define __DEFAULT_FN_ATTRS
static __inline__ unsigned short __DEFAULT_FN_ATTRS __lzcnt16(unsigned short __X)
Counts the number of leading zero bits in the operand.
static __inline unsigned char unsigned int __x