Lines Matching refs:_Value

182 long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
184 long _InterlockedAnd(long volatile *_Value, long _Mask);
186 short _InterlockedAnd16(short volatile *_Value, short _Mask);
188 char _InterlockedAnd8(char volatile *_Value, char _Mask);
218 long _InterlockedExchange(long volatile *_Target, long _Value);
220 short _InterlockedExchange16(short volatile *_Target, short _Value);
222 char _InterlockedExchange8(char volatile *_Target, char _Value);
224 long __cdecl _InterlockedExchangeAdd(long volatile *_Addend, long _Value);
228 short _InterlockedExchangeAdd16(short volatile *_Addend, short _Value);
232 char _InterlockedExchangeAdd8(char volatile *_Addend, char _Value);
238 long _InterlockedOr(long volatile *_Value, long _Mask);
240 short _InterlockedOr16(short volatile *_Value, short _Mask);
242 char _InterlockedOr8(char volatile *_Value, char _Mask);
244 long _InterlockedXor(long volatile *_Value, long _Mask);
246 short _InterlockedXor16(short volatile *_Value, short _Mask);
248 char _InterlockedXor8(char volatile *_Value, char _Mask);
263 unsigned int __cdecl _rotl(unsigned int _Value, int _Shift);
265 unsigned short _rotl16(unsigned short _Value, unsigned char _Shift);
267 unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift);
269 unsigned char _rotl8(unsigned char _Value, unsigned char _Shift);
271 unsigned int __cdecl _rotr(unsigned int _Value, int _Shift);
273 unsigned short _rotr16(unsigned short _Value, unsigned char _Shift);
275 unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift);
277 unsigned char _rotr8(unsigned char _Value, unsigned char _Shift);
359 long _InterlockedAnd_np(long volatile *_Value, long _Mask);
360 short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
361 __int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
362 char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
391 __int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
393 __int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
394 void *_InterlockedExchangePointer(void *volatile *_Target, void *_Value);
397 long _InterlockedOr_np(long volatile *_Value, long _Mask);
398 short _InterlockedOr16_np(short volatile *_Value, short _Mask);
400 __int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
401 __int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
402 char _InterlockedOr8_np(char volatile *_Value, char _Mask);
403 long _InterlockedXor_np(long volatile *_Value, long _Mask);
404 short _InterlockedXor16_np(short volatile *_Value, short _Mask);
406 __int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
407 __int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
408 char _InterlockedXor8_np(char volatile *_Value, char _Mask);
447 _rotl8(unsigned char _Value, unsigned char _Shift) { in _rotl8() argument
449 return _Shift ? (_Value << _Shift) | (_Value >> (8 - _Shift)) : _Value; in _rotl8()
452 _rotr8(unsigned char _Value, unsigned char _Shift) { in _rotr8() argument
454 return _Shift ? (_Value >> _Shift) | (_Value << (8 - _Shift)) : _Value; in _rotr8()
457 _rotl16(unsigned short _Value, unsigned char _Shift) { in _rotl16() argument
459 return _Shift ? (_Value << _Shift) | (_Value >> (16 - _Shift)) : _Value; in _rotl16()
462 _rotr16(unsigned short _Value, unsigned char _Shift) { in _rotr16() argument
464 return _Shift ? (_Value >> _Shift) | (_Value << (16 - _Shift)) : _Value; in _rotr16()
467 _rotl(unsigned int _Value, int _Shift) { in _rotl() argument
469 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value; in _rotl()
472 _rotr(unsigned int _Value, int _Shift) { in _rotr() argument
474 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value; in _rotr()
477 _lrotl(unsigned long _Value, int _Shift) { in _lrotl() argument
479 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value; in _lrotl()
482 _lrotr(unsigned long _Value, int _Shift) { in _lrotr() argument
484 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value; in _lrotr()
488 _rotl64(unsigned __int64 _Value, int _Shift) { in _rotl64() argument
490 return _Shift ? (_Value << _Shift) | (_Value >> (64 - _Shift)) : _Value; in _rotl64()
494 _rotr64(unsigned __int64 _Value, int _Shift) { in _rotr64() argument
496 return _Shift ? (_Value >> _Shift) | (_Value << (64 - _Shift)) : _Value; in _rotr64()
614 _InterlockedExchangeAdd8(char volatile *_Addend, char _Value) { in _InterlockedExchangeAdd8() argument
615 return __atomic_add_fetch(_Addend, _Value, 0) - _Value; in _InterlockedExchangeAdd8()
618 _InterlockedExchangeAdd16(short volatile *_Addend, short _Value) { in _InterlockedExchangeAdd16() argument
619 return __atomic_add_fetch(_Addend, _Value, 0) - _Value; in _InterlockedExchangeAdd16()
623 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) { in _InterlockedExchangeAdd64() argument
624 return __atomic_add_fetch(_Addend, _Value, 0) - _Value; in _InterlockedExchangeAdd64()
631 _InterlockedExchangeSub8(char volatile *_Subend, char _Value) { in _InterlockedExchangeSub8() argument
632 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value; in _InterlockedExchangeSub8()
635 _InterlockedExchangeSub16(short volatile *_Subend, short _Value) { in _InterlockedExchangeSub16() argument
636 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value; in _InterlockedExchangeSub16()
639 _InterlockedExchangeSub(long volatile *_Subend, long _Value) { in _InterlockedExchangeSub() argument
640 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value; in _InterlockedExchangeSub()
644 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) { in _InterlockedExchangeSub64() argument
645 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value; in _InterlockedExchangeSub64()
652 _InterlockedIncrement16(short volatile *_Value) { in _InterlockedIncrement16() argument
653 return __atomic_add_fetch(_Value, 1, 0); in _InterlockedIncrement16()
657 _InterlockedIncrement64(__int64 volatile *_Value) { in _InterlockedIncrement64() argument
658 return __atomic_add_fetch(_Value, 1, 0); in _InterlockedIncrement64()
665 _InterlockedDecrement16(short volatile *_Value) { in _InterlockedDecrement16() argument
666 return __atomic_sub_fetch(_Value, 1, 0); in _InterlockedDecrement16()
670 _InterlockedDecrement64(__int64 volatile *_Value) { in _InterlockedDecrement64() argument
671 return __atomic_sub_fetch(_Value, 1, 0); in _InterlockedDecrement64()
678 _InterlockedAnd8(char volatile *_Value, char _Mask) { in _InterlockedAnd8() argument
679 return __atomic_and_fetch(_Value, _Mask, 0); in _InterlockedAnd8()
682 _InterlockedAnd16(short volatile *_Value, short _Mask) { in _InterlockedAnd16() argument
683 return __atomic_and_fetch(_Value, _Mask, 0); in _InterlockedAnd16()
686 _InterlockedAnd(long volatile *_Value, long _Mask) { in _InterlockedAnd() argument
687 return __atomic_and_fetch(_Value, _Mask, 0); in _InterlockedAnd()
691 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) { in _InterlockedAnd64() argument
692 return __atomic_and_fetch(_Value, _Mask, 0); in _InterlockedAnd64()
699 _InterlockedOr8(char volatile *_Value, char _Mask) { in _InterlockedOr8() argument
700 return __atomic_or_fetch(_Value, _Mask, 0); in _InterlockedOr8()
703 _InterlockedOr16(short volatile *_Value, short _Mask) { in _InterlockedOr16() argument
704 return __atomic_or_fetch(_Value, _Mask, 0); in _InterlockedOr16()
707 _InterlockedOr(long volatile *_Value, long _Mask) { in _InterlockedOr() argument
708 return __atomic_or_fetch(_Value, _Mask, 0); in _InterlockedOr()
712 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) { in _InterlockedOr64() argument
713 return __atomic_or_fetch(_Value, _Mask, 0); in _InterlockedOr64()
720 _InterlockedXor8(char volatile *_Value, char _Mask) { in _InterlockedXor8() argument
721 return __atomic_xor_fetch(_Value, _Mask, 0); in _InterlockedXor8()
724 _InterlockedXor16(short volatile *_Value, short _Mask) { in _InterlockedXor16() argument
725 return __atomic_xor_fetch(_Value, _Mask, 0); in _InterlockedXor16()
728 _InterlockedXor(long volatile *_Value, long _Mask) { in _InterlockedXor() argument
729 return __atomic_xor_fetch(_Value, _Mask, 0); in _InterlockedXor()
733 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) { in _InterlockedXor64() argument
734 return __atomic_xor_fetch(_Value, _Mask, 0); in _InterlockedXor64()
741 _InterlockedExchange8(char volatile *_Target, char _Value) { in _InterlockedExchange8() argument
742 __atomic_exchange(_Target, &_Value, &_Value, 0); in _InterlockedExchange8()
743 return _Value; in _InterlockedExchange8()
746 _InterlockedExchange16(short volatile *_Target, short _Value) { in _InterlockedExchange16() argument
747 __atomic_exchange(_Target, &_Value, &_Value, 0); in _InterlockedExchange16()
748 return _Value; in _InterlockedExchange16()
752 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) { in _InterlockedExchange64() argument
753 __atomic_exchange(_Target, &_Value, &_Value, 0); in _InterlockedExchange64()
754 return _Value; in _InterlockedExchange64()