Lines Matching defs:Ptr

71     void recycle(void *Ptr) {
73 Chunk::loadHeader(Allocator.Cookie, Ptr, &Header);
75 reportInvalidChunkState(AllocatorAction::Recycling, Ptr);
78 Chunk::storeHeader(Allocator.Cookie, Ptr, &Header);
81 Ptr = untagPointer(Ptr);
82 void *BlockBegin = Allocator::getBlockBegin(Ptr, &Header);
92 void *Ptr = Cache.allocate(QuarantineClassId);
94 if (UNLIKELY(!Ptr))
97 Ptr = reinterpret_cast<void *>(reinterpret_cast<uptr>(Ptr) +
103 Chunk::storeHeader(Allocator.Cookie, Ptr, &Header);
109 storeTags(reinterpret_cast<uptr>(Ptr),
110 reinterpret_cast<uptr>(Ptr) + sizeof(QuarantineBatch));
112 return Ptr;
115 void deallocate(void *Ptr) {
119 Chunk::loadHeader(Allocator.Cookie, Ptr, &Header);
122 reportInvalidChunkState(AllocatorAction::Deallocating, Ptr);
128 Chunk::storeHeader(Allocator.Cookie, Ptr, &Header);
130 reinterpret_cast<void *>(reinterpret_cast<uptr>(Ptr) -
285 ALWAYS_INLINE void *getHeaderTaggedPointer(void *Ptr) {
287 return Ptr;
288 auto UntaggedPtr = untagPointer(Ptr);
289 if (UntaggedPtr != Ptr)
294 return addHeaderTag(Ptr);
297 ALWAYS_INLINE uptr addHeaderTag(uptr Ptr) {
299 return Ptr;
300 return addFixedTag(Ptr, 2);
303 ALWAYS_INLINE void *addHeaderTag(void *Ptr) {
304 return reinterpret_cast<void *>(addHeaderTag(reinterpret_cast<uptr>(Ptr)));
321 uptr computeOddEvenMaskForPointerMaybe(const Options &Options, uptr Ptr,
328 // Size to Ptr will flip the least significant set bit of Size in Ptr, so
331 return 0x5555U << ((Ptr >> SizeClassMap::getSizeLSBByClassId(ClassId)) & 1);
350 if (void *Ptr = GuardedAlloc.allocate(Size, Alignment)) {
355 return Ptr;
427 NOINLINE void deallocate(void *Ptr, Chunk::Origin Origin, uptr DeleteSize = 0,
429 if (UNLIKELY(!Ptr))
441 if (UNLIKELY(GuardedAlloc.pointerIsMine(Ptr))) {
442 GuardedAlloc.deallocate(Ptr);
451 if (UNLIKELY(!isAligned(reinterpret_cast<uptr>(Ptr), MinAlignment)))
452 reportMisalignedPointer(AllocatorAction::Deallocating, Ptr);
454 void *TaggedPtr = Ptr;
455 Ptr = getHeaderTaggedPointer(Ptr);
458 Chunk::loadHeader(Cookie, Ptr, &Header);
461 reportInvalidChunkState(AllocatorAction::Deallocating, Ptr);
469 reportDeallocTypeMismatch(AllocatorAction::Deallocating, Ptr,
474 const uptr Size = getSize(Ptr, &Header);
477 reportDeleteSizeMismatch(Ptr, DeleteSize, Size);
749 uptr getUsableSize(const void *Ptr) {
750 if (UNLIKELY(!Ptr))
753 return getAllocSize(Ptr);
756 uptr getAllocSize(const void *Ptr) {
760 if (UNLIKELY(GuardedAlloc.pointerIsMine(Ptr)))
761 return GuardedAlloc.getSize(Ptr);
764 Ptr = getHeaderTaggedPointer(const_cast<void *>(Ptr));
766 Chunk::loadHeader(Cookie, Ptr, &Header);
770 reportInvalidChunkState(AllocatorAction::Sizing, const_cast<void *>(Ptr));
772 return getSize(Ptr, &Header);
783 bool isOwned(const void *Ptr) {
786 if (GuardedAlloc.pointerIsMine(Ptr))
789 if (!Ptr || !isAligned(reinterpret_cast<uptr>(Ptr), MinAlignment))
791 Ptr = getHeaderTaggedPointer(const_cast<void *>(Ptr));
793 return Chunk::isValid(Cookie, Ptr, &Header) &&
978 atomic_uptr Ptr;
1046 static inline void *getBlockBegin(const void *Ptr,
1049 reinterpret_cast<uptr>(Ptr) - Chunk::getHeaderSize() -
1054 inline uptr getSize(const void *Ptr, Chunk::UnpackedHeader *Header) {
1059 Ptr = untagPointer(const_cast<void *>(Ptr));
1060 return SecondaryT::getBlockEnd(getBlockBegin(Ptr, Header)) -
1061 reinterpret_cast<uptr>(Ptr) - SizeOrUnusedBytes;
1118 void *Ptr = reinterpret_cast<void *>(UserPtr);
1119 void *TaggedPtr = Ptr;
1202 TaggedPtr = prepareTaggedChunk(Ptr, Size, OddEvenMask, BlockEnd);
1204 storePrimaryAllocationStackMaybe(Options, Ptr);
1209 Ptr = addHeaderTag(Ptr);
1210 storeTags(reinterpret_cast<uptr>(Block), reinterpret_cast<uptr>(Ptr));
1211 storeSecondaryAllocationStackMaybe(Options, Ptr, Size);
1232 Chunk::storeHeader(Cookie, Ptr, &Header);
1240 void *Ptr = getHeaderTaggedPointer(TaggedPtr);
1256 Ptr = untagPointer(Ptr);
1257 BlockBegin = getBlockBegin(Ptr, Header);
1262 retagBlock(Options, TaggedPtr, Ptr, Header, Size, BypassQuarantine);
1265 Chunk::storeHeader(Cookie, Ptr, Header);
1287 QuarantineCallback(*this, TSD->getCache()), Ptr, Size);
1291 NOINLINE void *retagBlock(const Options &Options, void *TaggedPtr, void *&Ptr,
1297 storeDeallocationStackMaybe(Options, Ptr, PrevTag, Size);
1301 Options, reinterpret_cast<uptr>(getBlockBegin(Ptr, Header)),
1305 setRandomTag(Ptr, Size, OddEvenMask | (1UL << PrevTag), &TaggedBegin,
1309 Ptr = untagPointer(Ptr);
1310 void *BlockBegin = getBlockBegin(Ptr, Header);
1313 reinterpret_cast<uptr>(Ptr));
1355 void *prepareTaggedChunk(void *Ptr, uptr Size, uptr ExcludeMask,
1362 storeTag(reinterpret_cast<uptr>(Ptr) - archMemoryTagGranuleSize());
1365 setRandomTag(Ptr, Size, ExcludeMask, &TaggedBegin, &TaggedEnd);
1388 void storePrimaryAllocationStackMaybe(const Options &Options, void *Ptr) {
1394 auto *Ptr32 = reinterpret_cast<u32 *>(Ptr);
1399 void storeRingBufferEntry(AllocationRingBuffer *RB, void *Ptr,
1409 // ensure that the compiler does not move the stores to Ptr in between the
1411 atomic_store_relaxed(&Entry->Ptr, 0);
1421 atomic_store_relaxed(&Entry->Ptr, reinterpret_cast<uptr>(Ptr));
1424 void storeSecondaryAllocationStackMaybe(const Options &Options, void *Ptr,
1434 auto *Ptr32 = reinterpret_cast<u32 *>(Ptr);
1438 storeRingBufferEntry(RB, untagPointer(Ptr), Trace, Tid, Size, 0, 0);
1441 void storeDeallocationStackMaybe(const Options &Options, void *Ptr,
1448 auto *Ptr32 = reinterpret_cast<u32 *>(Ptr);
1455 storeRingBufferEntry(RB, addFixedTag(untagPointer(Ptr), PrevTag),
1565 uptr EntryPtr = atomic_load_relaxed(&Entry->Ptr);