Lines Matching refs:stack
55 u32 tid, u32 stack) { in AtomicContextStore() argument
58 context += stack; in AtomicContextStore()
63 u32 &tid, u32 &stack) { in AtomicContextLoad() argument
65 stack = context; in AtomicContextLoad()
119 void SetAllocContext(u32 tid, u32 stack) { in SetAllocContext() argument
120 AtomicContextStore(&alloc_context_id, tid, stack); in SetAllocContext()
123 void GetAllocContext(u32 &tid, u32 &stack) const { in GetAllocContext()
124 AtomicContextLoad(&alloc_context_id, tid, stack); in GetAllocContext()
132 void SetFreeContext(u32 tid, u32 stack) { in SetFreeContext() argument
133 AtomicContextStore(&free_context_id, tid, stack); in SetFreeContext()
136 void GetFreeContext(u32 &tid, u32 &stack) const { in GetFreeContext()
137 AtomicContextLoad(&free_context_id, tid, stack); in GetFreeContext()
194 QuarantineCallback(AllocatorCache *cache, BufferedStackTrace *stack) in QuarantineCallback()
196 stack_(stack) { in QuarantineCallback()
471 bool UpdateAllocationStack(uptr addr, BufferedStackTrace *stack) { in UpdateAllocationStack()
478 m->SetAllocContext(t ? t->tid() : kMainTid, StackDepotPut(*stack)); in UpdateAllocationStack()
483 void *Allocate(uptr size, uptr alignment, BufferedStackTrace *stack, in Allocate()
490 ReportRssLimitExceeded(stack); in Allocate()
493 CHECK(stack); in Allocate()
529 ReportAllocationSizeTooBig(size, needed_size, malloc_limit, stack); in Allocate()
546 ReportOutOfMemory(size, stack); in Allocate()
572 m->SetAllocContext(t ? t->tid() : kMainTid, StackDepotPut(*stack)); in Allocate()
617 BufferedStackTrace *stack) { in AtomicallySetQuarantineFlagIfAllocated()
623 ReportInvalidFree(ptr, old_chunk_state, stack); in AtomicallySetQuarantineFlagIfAllocated()
635 void QuarantineChunk(AsanChunk *m, void *ptr, BufferedStackTrace *stack) { in QuarantineChunk()
639 m->SetFreeContext(t ? t->tid() : 0, StackDepotPut(*stack)); in QuarantineChunk()
665 quarantine.Put(GetQuarantineCache(ms), QuarantineCallback(ac, stack), m, in QuarantineChunk()
670 quarantine.Put(&fallback_quarantine_cache, QuarantineCallback(ac, stack), in QuarantineChunk()
676 BufferedStackTrace *stack, AllocType alloc_type) { in Deallocate()
688 ReportFreeNotMalloced(p, stack); in Deallocate()
696 if (!AtomicallySetQuarantineFlagIfAllocated(m, ptr, stack)) return; in Deallocate()
700 ReportAllocTypeMismatch((uptr)ptr, stack, (AllocType)m->alloc_type, in Deallocate()
709 ReportNewDeleteTypeMismatch(p, delete_size, delete_alignment, stack); in Deallocate()
713 QuarantineChunk(m, ptr, stack); in Deallocate()
716 void *Reallocate(void *old_ptr, uptr new_size, BufferedStackTrace *stack) { in Reallocate()
726 void *new_ptr = Allocate(new_size, 8, stack, FROM_MALLOC, true); in Reallocate()
730 ReportInvalidFree(old_ptr, chunk_state, stack); in Reallocate()
736 Deallocate(old_ptr, 0, 0, stack, FROM_MALLOC); in Reallocate()
741 void *Calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) { in Calloc()
745 ReportCallocOverflow(nmemb, size, stack); in Calloc()
747 void *ptr = Allocate(nmemb * size, 8, stack, FROM_MALLOC, false); in Calloc()
755 void ReportInvalidFree(void *ptr, u8 chunk_state, BufferedStackTrace *stack) { in ReportInvalidFree()
757 ReportDoubleFree((uptr)ptr, stack); in ReportInvalidFree()
759 ReportFreeNotMalloced((uptr)ptr, stack); in ReportInvalidFree()
762 void CommitBack(AsanThreadLocalMallocStorage *ms, BufferedStackTrace *stack) { in CommitBack()
764 quarantine.Drain(GetQuarantineCache(ms), QuarantineCallback(ac, stack)); in CommitBack()
831 void Purge(BufferedStackTrace *stack) { in Purge()
837 stack)); in Purge()
843 stack)); in Purge()
892 u32 stack = 0; in AllocTid() local
893 chunk_->GetAllocContext(tid, stack); in AllocTid()
901 u32 stack = 0; in FreeTid() local
902 chunk_->GetFreeContext(tid, stack); in FreeTid()
912 u32 stack = 0; in GetAllocStackId() local
913 chunk_->GetAllocContext(tid, stack); in GetAllocStackId()
914 return stack; in GetAllocStackId()
921 u32 stack = 0; in GetFreeStackId() local
922 chunk_->GetFreeContext(tid, stack); in GetFreeStackId()
923 return stack; in GetFreeStackId()
947 instance.CommitBack(this, &stack); in CommitBack()
954 void asan_free(void *ptr, BufferedStackTrace *stack, AllocType alloc_type) { in asan_free() argument
955 instance.Deallocate(ptr, 0, 0, stack, alloc_type); in asan_free()
959 BufferedStackTrace *stack, AllocType alloc_type) { in asan_delete() argument
960 instance.Deallocate(ptr, size, alignment, stack, alloc_type); in asan_delete()
963 void *asan_malloc(uptr size, BufferedStackTrace *stack) { in asan_malloc() argument
964 return SetErrnoOnNull(instance.Allocate(size, 8, stack, FROM_MALLOC, true)); in asan_malloc()
967 void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) { in asan_calloc() argument
968 return SetErrnoOnNull(instance.Calloc(nmemb, size, stack)); in asan_calloc()
972 BufferedStackTrace *stack) { in asan_reallocarray() argument
977 ReportReallocArrayOverflow(nmemb, size, stack); in asan_reallocarray()
979 return asan_realloc(p, nmemb * size, stack); in asan_reallocarray()
982 void *asan_realloc(void *p, uptr size, BufferedStackTrace *stack) { in asan_realloc() argument
984 return SetErrnoOnNull(instance.Allocate(size, 8, stack, FROM_MALLOC, true)); in asan_realloc()
987 instance.Deallocate(p, 0, 0, stack, FROM_MALLOC); in asan_realloc()
993 return SetErrnoOnNull(instance.Reallocate(p, size, stack)); in asan_realloc()
996 void *asan_valloc(uptr size, BufferedStackTrace *stack) { in asan_valloc() argument
998 instance.Allocate(size, GetPageSizeCached(), stack, FROM_MALLOC, true)); in asan_valloc()
1001 void *asan_pvalloc(uptr size, BufferedStackTrace *stack) { in asan_pvalloc() argument
1007 ReportPvallocOverflow(size, stack); in asan_pvalloc()
1012 instance.Allocate(size, PageSize, stack, FROM_MALLOC, true)); in asan_pvalloc()
1015 void *asan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack, in asan_memalign() argument
1021 ReportInvalidAllocationAlignment(alignment, stack); in asan_memalign()
1024 instance.Allocate(size, alignment, stack, alloc_type, true)); in asan_memalign()
1027 void *asan_aligned_alloc(uptr alignment, uptr size, BufferedStackTrace *stack) { in asan_aligned_alloc() argument
1032 ReportInvalidAlignedAllocAlignment(size, alignment, stack); in asan_aligned_alloc()
1035 instance.Allocate(size, alignment, stack, FROM_MALLOC, true)); in asan_aligned_alloc()
1039 BufferedStackTrace *stack) { in asan_posix_memalign() argument
1043 ReportInvalidPosixMemalignAlignment(alignment, stack); in asan_posix_memalign()
1045 void *ptr = instance.Allocate(size, alignment, stack, FROM_MALLOC, true); in asan_posix_memalign()
1059 ReportMallocUsableSizeNotOwned((uptr)ptr, &stack); in asan_malloc_usable_size()
1145 u32 stack = 0; in stack_trace_id() local
1146 m->GetAllocContext(tid, stack); in stack_trace_id()
1147 return stack; in stack_trace_id()
1219 ReportSanitizerGetAllocatedSizeNotOwned(ptr, &stack); in __sanitizer_get_allocated_size()
1226 instance.Purge(&stack); in __sanitizer_purge_allocator()
1231 return instance.UpdateAllocationStack((uptr)addr, &stack); in __asan_update_allocation_context()