Lines Matching refs:_Value
180 long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
182 long _InterlockedAnd(long volatile *_Value, long _Mask);
184 short _InterlockedAnd16(short volatile *_Value, short _Mask);
186 char _InterlockedAnd8(char volatile *_Value, char _Mask);
216 long _InterlockedExchange(long volatile *_Target, long _Value);
218 short _InterlockedExchange16(short volatile *_Target, short _Value);
220 char _InterlockedExchange8(char volatile *_Target, char _Value);
222 long __cdecl _InterlockedExchangeAdd(long volatile *_Addend, long _Value);
226 short _InterlockedExchangeAdd16(short volatile *_Addend, short _Value);
230 char _InterlockedExchangeAdd8(char volatile *_Addend, char _Value);
236 long _InterlockedOr(long volatile *_Value, long _Mask);
238 short _InterlockedOr16(short volatile *_Value, short _Mask);
240 char _InterlockedOr8(char volatile *_Value, char _Mask);
242 long _InterlockedXor(long volatile *_Value, long _Mask);
244 short _InterlockedXor16(short volatile *_Value, short _Mask);
246 char _InterlockedXor8(char volatile *_Value, char _Mask);
260 unsigned int __cdecl _rotl(unsigned int _Value, int _Shift);
262 unsigned short _rotl16(unsigned short _Value, unsigned char _Shift);
264 unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift);
266 unsigned char _rotl8(unsigned char _Value, unsigned char _Shift);
268 unsigned int __cdecl _rotr(unsigned int _Value, int _Shift);
270 unsigned short _rotr16(unsigned short _Value, unsigned char _Shift);
272 unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift);
274 unsigned char _rotr8(unsigned char _Value, unsigned char _Shift);
352 long _InterlockedAnd_np(long volatile *_Value, long _Mask);
353 short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
354 __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
355 char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
384 __int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
386 __int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
387 void *_InterlockedExchangePointer(void *volatile *_Target, void *_Value);
390 long _InterlockedOr_np(long volatile *_Value, long _Mask);
391 short _InterlockedOr16_np(short volatile *_Value, short _Mask);
393 __int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
394 __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
395 char _InterlockedOr8_np(char volatile *_Value, char _Mask);
396 long _InterlockedXor_np(long volatile *_Value, long _Mask);
397 short _InterlockedXor16_np(short volatile *_Value, short _Mask);
399 __int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
400 __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
401 char _InterlockedXor8_np(char volatile *_Value, char _Mask);
448 _rotl8(unsigned char _Value, unsigned char _Shift) { in _rotl8() argument
450 return _Shift ? (_Value << _Shift) | (_Value >> (8 - _Shift)) : _Value; in _rotl8()
453 _rotr8(unsigned char _Value, unsigned char _Shift) { in _rotr8() argument
455 return _Shift ? (_Value >> _Shift) | (_Value << (8 - _Shift)) : _Value; in _rotr8()
458 _rotl16(unsigned short _Value, unsigned char _Shift) { in _rotl16() argument
460 return _Shift ? (_Value << _Shift) | (_Value >> (16 - _Shift)) : _Value; in _rotl16()
463 _rotr16(unsigned short _Value, unsigned char _Shift) { in _rotr16() argument
465 return _Shift ? (_Value >> _Shift) | (_Value << (16 - _Shift)) : _Value; in _rotr16()
468 _rotl(unsigned int _Value, int _Shift) { in _rotl() argument
470 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value; in _rotl()
473 _rotr(unsigned int _Value, int _Shift) { in _rotr() argument
475 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value; in _rotr()
478 _lrotl(unsigned long _Value, int _Shift) { in _lrotl() argument
480 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value; in _lrotl()
483 _lrotr(unsigned long _Value, int _Shift) { in _lrotr() argument
485 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value; in _lrotr()
489 _rotl64(unsigned __int64 _Value, int _Shift) { in _rotl64() argument
491 return _Shift ? (_Value << _Shift) | (_Value >> (64 - _Shift)) : _Value; in _rotl64()
495 _rotr64(unsigned __int64 _Value, int _Shift) { in _rotr64() argument
497 return _Shift ? (_Value >> _Shift) | (_Value << (64 - _Shift)) : _Value; in _rotr64()
517 __popcnt16(unsigned short _Value) { in __popcnt16() argument
518 return __builtin_popcount((int)_Value); in __popcnt16()
521 __popcnt(unsigned int _Value) { in __popcnt() argument
522 return __builtin_popcount(_Value); in __popcnt()
568 __popcnt64(unsigned __int64 _Value) { in __popcnt64() argument
569 return __builtin_popcountll(_Value); in __popcnt64()
604 _InterlockedExchangeAdd8(char volatile *_Addend, char _Value) { in _InterlockedExchangeAdd8() argument
605 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST); in _InterlockedExchangeAdd8()
608 _InterlockedExchangeAdd16(short volatile *_Addend, short _Value) { in _InterlockedExchangeAdd16() argument
609 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST); in _InterlockedExchangeAdd16()
613 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) { in _InterlockedExchangeAdd64() argument
614 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST); in _InterlockedExchangeAdd64()
621 _InterlockedExchangeSub8(char volatile *_Subend, char _Value) { in _InterlockedExchangeSub8() argument
622 return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST); in _InterlockedExchangeSub8()
625 _InterlockedExchangeSub16(short volatile *_Subend, short _Value) { in _InterlockedExchangeSub16() argument
626 return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST); in _InterlockedExchangeSub16()
629 _InterlockedExchangeSub(long volatile *_Subend, long _Value) { in _InterlockedExchangeSub() argument
630 return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST); in _InterlockedExchangeSub()
634 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) { in _InterlockedExchangeSub64() argument
635 return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST); in _InterlockedExchangeSub64()
642 _InterlockedIncrement16(short volatile *_Value) { in _InterlockedIncrement16() argument
643 return __atomic_add_fetch(_Value, 1, __ATOMIC_SEQ_CST); in _InterlockedIncrement16()
647 _InterlockedIncrement64(__int64 volatile *_Value) { in _InterlockedIncrement64() argument
648 return __atomic_add_fetch(_Value, 1, __ATOMIC_SEQ_CST); in _InterlockedIncrement64()
655 _InterlockedDecrement16(short volatile *_Value) { in _InterlockedDecrement16() argument
656 return __atomic_sub_fetch(_Value, 1, __ATOMIC_SEQ_CST); in _InterlockedDecrement16()
660 _InterlockedDecrement64(__int64 volatile *_Value) { in _InterlockedDecrement64() argument
661 return __atomic_sub_fetch(_Value, 1, __ATOMIC_SEQ_CST); in _InterlockedDecrement64()
668 _InterlockedAnd8(char volatile *_Value, char _Mask) { in _InterlockedAnd8() argument
669 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedAnd8()
672 _InterlockedAnd16(short volatile *_Value, short _Mask) { in _InterlockedAnd16() argument
673 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedAnd16()
676 _InterlockedAnd(long volatile *_Value, long _Mask) { in _InterlockedAnd() argument
677 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedAnd()
681 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) { in _InterlockedAnd64() argument
682 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedAnd64()
689 _InterlockedOr8(char volatile *_Value, char _Mask) { in _InterlockedOr8() argument
690 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedOr8()
693 _InterlockedOr16(short volatile *_Value, short _Mask) { in _InterlockedOr16() argument
694 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedOr16()
697 _InterlockedOr(long volatile *_Value, long _Mask) { in _InterlockedOr() argument
698 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedOr()
702 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) { in _InterlockedOr64() argument
703 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedOr64()
710 _InterlockedXor8(char volatile *_Value, char _Mask) { in _InterlockedXor8() argument
711 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedXor8()
714 _InterlockedXor16(short volatile *_Value, short _Mask) { in _InterlockedXor16() argument
715 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedXor16()
718 _InterlockedXor(long volatile *_Value, long _Mask) { in _InterlockedXor() argument
719 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedXor()
723 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) { in _InterlockedXor64() argument
724 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); in _InterlockedXor64()
731 _InterlockedExchange8(char volatile *_Target, char _Value) { in _InterlockedExchange8() argument
732 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST); in _InterlockedExchange8()
733 return _Value; in _InterlockedExchange8()
736 _InterlockedExchange16(short volatile *_Target, short _Value) { in _InterlockedExchange16() argument
737 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST); in _InterlockedExchange16()
738 return _Value; in _InterlockedExchange16()
742 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) { in _InterlockedExchange64() argument
743 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST); in _InterlockedExchange64()
744 return _Value; in _InterlockedExchange64()