@@ -64,6 +64,10 @@ u32 HwasanChunkView::GetAllocStackId() const {
6464 return metadata_->GetAllocStackId ();
6565}
6666
67+ u32 HwasanChunkView::GetAllocThreadId () const {
68+ return metadata_->GetAllocThreadId ();
69+ }
70+
6771uptr HwasanChunkView::ActualSize () const {
6872 return allocator.GetActuallyAllocatedSize (reinterpret_cast <void *>(block_));
6973}
@@ -106,6 +110,12 @@ inline u32 Metadata::GetAllocStackId() const {
106110 return atomic_load (&alloc_context_id, memory_order_relaxed);
107111}
108112
113+ inline u32 Metadata::GetAllocThreadId () const {
114+ u64 context = atomic_load (&alloc_context_id, memory_order_relaxed);
115+ u32 tid = context >> 32 ;
116+ return tid;
117+ }
118+
109119void GetAllocatorStats (AllocatorStatCounters s) {
110120 allocator.GetStats (s);
111121}
@@ -296,6 +306,7 @@ static void HwasanDeallocate(StackTrace *stack, void *tagged_ptr) {
296306 uptr orig_size = meta->GetRequestedSize ();
297307 u32 free_context_id = StackDepotPut (*stack);
298308 u32 alloc_context_id = meta->GetAllocStackId ();
309+ u32 alloc_thread_id = meta->GetAllocThreadId ();
299310
300311 // Check tail magic.
301312 uptr tagged_size = TaggedSize (orig_size);
@@ -347,8 +358,9 @@ static void HwasanDeallocate(StackTrace *stack, void *tagged_ptr) {
347358 if (t) {
348359 allocator.Deallocate (t->allocator_cache (), aligned_ptr);
349360 if (auto *ha = t->heap_allocations ())
350- ha->push ({reinterpret_cast <uptr>(tagged_ptr), alloc_context_id,
351- free_context_id, static_cast <u32 >(orig_size)});
361+ ha->push ({reinterpret_cast <uptr>(tagged_ptr), alloc_thread_id,
362+ alloc_context_id, free_context_id,
363+ static_cast <u32 >(orig_size)});
352364 } else {
353365 SpinMutexLock l (&fallback_mutex);
354366 AllocatorCache *cache = &fallback_allocator_cache;
0 commit comments