Lines Matching defs:stack

57                                u32 tid, u32 stack) {
60 context += stack;
65 u32 &tid, u32 &stack) {
67 stack = context;
121 void SetAllocContext(u32 tid, u32 stack) {
122 AtomicContextStore(&alloc_context_id, tid, stack);
125 void GetAllocContext(u32 &tid, u32 &stack) const {
126 AtomicContextLoad(&alloc_context_id, tid, stack);
134 void SetFreeContext(u32 tid, u32 stack) {
135 AtomicContextStore(&free_context_id, tid, stack);
138 void GetFreeContext(u32 &tid, u32 &stack) const {
139 AtomicContextLoad(&free_context_id, tid, stack);
211 QuarantineCallback(AllocatorCache *cache, BufferedStackTrace *stack)
213 stack_(stack) {
516 bool UpdateAllocationStack(uptr addr, BufferedStackTrace *stack) {
523 m->SetAllocContext(t ? t->tid() : kMainTid, StackDepotPut(*stack));
528 void *Allocate(uptr size, uptr alignment, BufferedStackTrace *stack,
535 ReportRssLimitExceeded(stack);
538 CHECK(stack);
575 ReportAllocationSizeTooBig(size, needed_size, malloc_limit, stack);
592 ReportOutOfMemory(size, stack);
609 m->SetAllocContext(t ? t->tid() : kMainTid, StackDepotPut(*stack));
665 BufferedStackTrace *stack) {
671 ReportInvalidFree(ptr, old_chunk_state, stack);
683 void QuarantineChunk(AsanChunk *m, void *ptr, BufferedStackTrace *stack) {
687 m->SetFreeContext(t ? t->tid() : 0, StackDepotPut(*stack));
693 quarantine.Put(GetQuarantineCache(ms), QuarantineCallback(ac, stack), m,
698 quarantine.Put(&fallback_quarantine_cache, QuarantineCallback(ac, stack),
704 BufferedStackTrace *stack, AllocType alloc_type) {
716 ReportFreeNotMalloced(p, stack);
732 if (!AtomicallySetQuarantineFlagIfAllocated(m, ptr, stack)) return;
736 ReportAllocTypeMismatch((uptr)ptr, stack, (AllocType)m->alloc_type,
745 ReportNewDeleteTypeMismatch(p, delete_size, delete_alignment, stack);
753 QuarantineChunk(m, ptr, stack);
756 void *Reallocate(void *old_ptr, uptr new_size, BufferedStackTrace *stack) {
766 void *new_ptr = Allocate(new_size, 8, stack, FROM_MALLOC, true);
770 ReportInvalidFree(old_ptr, chunk_state, stack);
776 Deallocate(old_ptr, 0, 0, stack, FROM_MALLOC);
781 void *Calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) {
785 ReportCallocOverflow(nmemb, size, stack);
787 void *ptr = Allocate(nmemb * size, 8, stack, FROM_MALLOC, false);
795 void ReportInvalidFree(void *ptr, u8 chunk_state, BufferedStackTrace *stack) {
797 ReportDoubleFree((uptr)ptr, stack);
799 ReportFreeNotMalloced((uptr)ptr, stack);
802 void CommitBack(AsanThreadLocalMallocStorage *ms, BufferedStackTrace *stack) {
804 quarantine.Drain(GetQuarantineCache(ms), QuarantineCallback(ac, stack));
875 void Purge(BufferedStackTrace *stack) {
881 stack));
887 stack));
936 u32 stack = 0;
937 chunk_->GetAllocContext(tid, stack);
945 u32 stack = 0;
946 chunk_->GetFreeContext(tid, stack);
956 u32 stack = 0;
957 chunk_->GetAllocContext(tid, stack);
958 return stack;
965 u32 stack = 0;
966 chunk_->GetFreeContext(tid, stack);
967 return stack;
991 instance.CommitBack(this, &stack);
998 void asan_free(void *ptr, BufferedStackTrace *stack, AllocType alloc_type) {
999 instance.Deallocate(ptr, 0, 0, stack, alloc_type);
1003 BufferedStackTrace *stack, AllocType alloc_type) {
1004 instance.Deallocate(ptr, size, alignment, stack, alloc_type);
1007 void *asan_malloc(uptr size, BufferedStackTrace *stack) {
1008 return SetErrnoOnNull(instance.Allocate(size, 8, stack, FROM_MALLOC, true));
1011 void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) {
1012 return SetErrnoOnNull(instance.Calloc(nmemb, size, stack));
1016 BufferedStackTrace *stack) {
1021 ReportReallocArrayOverflow(nmemb, size, stack);
1023 return asan_realloc(p, nmemb * size, stack);
1026 void *asan_realloc(void *p, uptr size, BufferedStackTrace *stack) {
1028 return SetErrnoOnNull(instance.Allocate(size, 8, stack, FROM_MALLOC, true));
1031 instance.Deallocate(p, 0, 0, stack, FROM_MALLOC);
1037 return SetErrnoOnNull(instance.Reallocate(p, size, stack));
1040 void *asan_valloc(uptr size, BufferedStackTrace *stack) {
1042 instance.Allocate(size, GetPageSizeCached(), stack, FROM_MALLOC, true));
1045 void *asan_pvalloc(uptr size, BufferedStackTrace *stack) {
1051 ReportPvallocOverflow(size, stack);
1056 instance.Allocate(size, PageSize, stack, FROM_MALLOC, true));
1059 void *asan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack,
1065 ReportInvalidAllocationAlignment(alignment, stack);
1068 instance.Allocate(size, alignment, stack, alloc_type, true));
1071 void *asan_aligned_alloc(uptr alignment, uptr size, BufferedStackTrace *stack) {
1076 ReportInvalidAlignedAllocAlignment(size, alignment, stack);
1079 instance.Allocate(size, alignment, stack, FROM_MALLOC, true));
1083 BufferedStackTrace *stack) {
1087 ReportInvalidPosixMemalignAlignment(alignment, stack);
1089 void *ptr = instance.Allocate(size, alignment, stack, FROM_MALLOC, true);
1103 ReportMallocUsableSizeNotOwned((uptr)ptr, &stack);
1193 u32 stack = 0;
1194 m->GetAllocContext(tid, stack);
1195 return stack;
1251 ReportSanitizerGetAllocatedSizeNotOwned(ptr, &stack);
1269 instance.Purge(&stack);
1274 return instance.UpdateAllocationStack((uptr)addr, &stack);