Lines Matching full:stack
57 u32 tid, u32 stack) { in AtomicContextStore() argument
60 context += stack; in AtomicContextStore()
65 u32 &tid, u32 &stack) { in AtomicContextLoad() argument
67 stack = context; in AtomicContextLoad()
121 void SetAllocContext(u32 tid, u32 stack) { in SetAllocContext() argument
122 AtomicContextStore(&alloc_context_id, tid, stack); in SetAllocContext()
125 void GetAllocContext(u32 &tid, u32 &stack) const { in GetAllocContext()
126 AtomicContextLoad(&alloc_context_id, tid, stack); in GetAllocContext()
134 void SetFreeContext(u32 tid, u32 stack) { in SetFreeContext() argument
135 AtomicContextStore(&free_context_id, tid, stack); in SetFreeContext()
138 void GetFreeContext(u32 &tid, u32 &stack) const { in GetFreeContext()
139 AtomicContextLoad(&free_context_id, tid, stack); in GetFreeContext()
211 QuarantineCallback(AllocatorCache *cache, BufferedStackTrace *stack) in QuarantineCallback()
213 stack_(stack) { in QuarantineCallback()
516 bool UpdateAllocationStack(uptr addr, BufferedStackTrace *stack) { in UpdateAllocationStack()
523 m->SetAllocContext(t ? t->tid() : kMainTid, StackDepotPut(*stack)); in UpdateAllocationStack()
528 void *Allocate(uptr size, uptr alignment, BufferedStackTrace *stack, in Allocate()
535 ReportRssLimitExceeded(stack); in Allocate()
538 CHECK(stack); in Allocate()
575 ReportAllocationSizeTooBig(size, needed_size, malloc_limit, stack); in Allocate()
592 ReportOutOfMemory(size, stack); in Allocate()
609 m->SetAllocContext(t ? t->tid() : kMainTid, StackDepotPut(*stack)); in Allocate()
665 BufferedStackTrace *stack) { in AtomicallySetQuarantineFlagIfAllocated()
671 ReportInvalidFree(ptr, old_chunk_state, stack); in AtomicallySetQuarantineFlagIfAllocated()
683 void QuarantineChunk(AsanChunk *m, void *ptr, BufferedStackTrace *stack) { in QuarantineChunk()
687 m->SetFreeContext(t ? t->tid() : 0, StackDepotPut(*stack)); in QuarantineChunk()
693 quarantine.Put(GetQuarantineCache(ms), QuarantineCallback(ac, stack), m, in QuarantineChunk()
698 quarantine.Put(&fallback_quarantine_cache, QuarantineCallback(ac, stack), in QuarantineChunk()
704 BufferedStackTrace *stack, AllocType alloc_type) { in Deallocate()
716 ReportFreeNotMalloced(p, stack); in Deallocate()
732 if (!AtomicallySetQuarantineFlagIfAllocated(m, ptr, stack)) return; in Deallocate()
736 ReportAllocTypeMismatch((uptr)ptr, stack, (AllocType)m->alloc_type, in Deallocate()
745 ReportNewDeleteTypeMismatch(p, delete_size, delete_alignment, stack); in Deallocate()
753 QuarantineChunk(m, ptr, stack); in Deallocate()
756 void *Reallocate(void *old_ptr, uptr new_size, BufferedStackTrace *stack) { in Reallocate()
766 void *new_ptr = Allocate(new_size, 8, stack, FROM_MALLOC, true); in Reallocate()
770 ReportInvalidFree(old_ptr, chunk_state, stack); in Reallocate()
776 Deallocate(old_ptr, 0, 0, stack, FROM_MALLOC); in Reallocate()
781 void *Calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) { in Calloc()
785 ReportCallocOverflow(nmemb, size, stack); in Calloc()
787 void *ptr = Allocate(nmemb * size, 8, stack, FROM_MALLOC, false); in Calloc()
795 void ReportInvalidFree(void *ptr, u8 chunk_state, BufferedStackTrace *stack) { in ReportInvalidFree()
797 ReportDoubleFree((uptr)ptr, stack); in ReportInvalidFree()
799 ReportFreeNotMalloced((uptr)ptr, stack); in ReportInvalidFree()
802 void CommitBack(AsanThreadLocalMallocStorage *ms, BufferedStackTrace *stack) { in CommitBack()
804 quarantine.Drain(GetQuarantineCache(ms), QuarantineCallback(ac, stack)); in CommitBack()
875 void Purge(BufferedStackTrace *stack) { in Purge()
881 stack)); in Purge()
887 stack)); in Purge()
936 u32 stack = 0; in AllocTid() local
937 chunk_->GetAllocContext(tid, stack); in AllocTid()
945 u32 stack = 0; in FreeTid() local
946 chunk_->GetFreeContext(tid, stack); in FreeTid()
956 u32 stack = 0; in GetAllocStackId() local
957 chunk_->GetAllocContext(tid, stack); in GetAllocStackId()
958 return stack; in GetAllocStackId()
965 u32 stack = 0; in GetFreeStackId() local
966 chunk_->GetFreeContext(tid, stack); in GetFreeStackId()
967 return stack; in GetFreeStackId()
991 instance.CommitBack(this, &stack); in CommitBack()
998 void asan_free(void *ptr, BufferedStackTrace *stack, AllocType alloc_type) { in asan_free() argument
999 instance.Deallocate(ptr, 0, 0, stack, alloc_type); in asan_free()
1003 BufferedStackTrace *stack, AllocType alloc_type) { in asan_delete() argument
1004 instance.Deallocate(ptr, size, alignment, stack, alloc_type); in asan_delete()
1007 void *asan_malloc(uptr size, BufferedStackTrace *stack) { in asan_malloc() argument
1008 return SetErrnoOnNull(instance.Allocate(size, 8, stack, FROM_MALLOC, true)); in asan_malloc()
1011 void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) { in asan_calloc() argument
1012 return SetErrnoOnNull(instance.Calloc(nmemb, size, stack)); in asan_calloc()
1016 BufferedStackTrace *stack) { in asan_reallocarray() argument
1021 ReportReallocArrayOverflow(nmemb, size, stack); in asan_reallocarray()
1023 return asan_realloc(p, nmemb * size, stack); in asan_reallocarray()
1026 void *asan_realloc(void *p, uptr size, BufferedStackTrace *stack) { in asan_realloc() argument
1028 return SetErrnoOnNull(instance.Allocate(size, 8, stack, FROM_MALLOC, true)); in asan_realloc()
1031 instance.Deallocate(p, 0, 0, stack, FROM_MALLOC); in asan_realloc()
1037 return SetErrnoOnNull(instance.Reallocate(p, size, stack)); in asan_realloc()
1040 void *asan_valloc(uptr size, BufferedStackTrace *stack) { in asan_valloc() argument
1042 instance.Allocate(size, GetPageSizeCached(), stack, FROM_MALLOC, true)); in asan_valloc()
1045 void *asan_pvalloc(uptr size, BufferedStackTrace *stack) { in asan_pvalloc() argument
1051 ReportPvallocOverflow(size, stack); in asan_pvalloc()
1056 instance.Allocate(size, PageSize, stack, FROM_MALLOC, true)); in asan_pvalloc()
1059 void *asan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack, in asan_memalign() argument
1065 ReportInvalidAllocationAlignment(alignment, stack); in asan_memalign()
1068 instance.Allocate(size, alignment, stack, alloc_type, true)); in asan_memalign()
1071 void *asan_aligned_alloc(uptr alignment, uptr size, BufferedStackTrace *stack) { in asan_aligned_alloc() argument
1076 ReportInvalidAlignedAllocAlignment(size, alignment, stack); in asan_aligned_alloc()
1079 instance.Allocate(size, alignment, stack, FROM_MALLOC, true)); in asan_aligned_alloc()
1083 BufferedStackTrace *stack) { in asan_posix_memalign() argument
1087 ReportInvalidPosixMemalignAlignment(alignment, stack); in asan_posix_memalign()
1089 void *ptr = instance.Allocate(size, alignment, stack, FROM_MALLOC, true); in asan_posix_memalign()
1103 ReportMallocUsableSizeNotOwned((uptr)ptr, &stack); in asan_malloc_usable_size()
1193 u32 stack = 0; in stack_trace_id() local
1194 m->GetAllocContext(tid, stack); in stack_trace_id()
1195 return stack; in stack_trace_id()
1251 ReportSanitizerGetAllocatedSizeNotOwned(ptr, &stack); in __sanitizer_get_allocated_size()
1269 instance.Purge(&stack); in __sanitizer_purge_allocator()
1274 return instance.UpdateAllocationStack((uptr)addr, &stack); in __asan_update_allocation_context()