diff --git a/compiler-rt/lib/hwasan/hwasan_thread.h b/compiler-rt/lib/hwasan/hwasan_thread.h
--- a/compiler-rt/lib/hwasan/hwasan_thread.h
+++ b/compiler-rt/lib/hwasan/hwasan_thread.h
@@ -69,6 +69,9 @@
     Print("Thread: ");
   }
 
+  tid_t os_id() const { return os_id_; }
+  void set_os_id(tid_t os_id) { os_id_ = os_id; }
+
   uptr &vfork_spill() { return vfork_spill_; }
 
  private:
@@ -93,6 +96,8 @@
 
   u32 unique_id_;  // counting from zero.
 
+  tid_t os_id_;
+
   u32 tagging_disabled_;  // if non-zero, malloc uses zero tag in this thread.
 
   bool announced_;
diff --git a/compiler-rt/lib/hwasan/hwasan_thread.cpp b/compiler-rt/lib/hwasan/hwasan_thread.cpp
--- a/compiler-rt/lib/hwasan/hwasan_thread.cpp
+++ b/compiler-rt/lib/hwasan/hwasan_thread.cpp
@@ -5,6 +5,7 @@
 #include "hwasan_interface_internal.h"
 #include "hwasan_mapping.h"
 #include "hwasan_poisoning.h"
+#include "hwasan_thread_list.h"
 #include "sanitizer_common/sanitizer_atomic.h"
 #include "sanitizer_common/sanitizer_file.h"
 #include "sanitizer_common/sanitizer_placement_new.h"
@@ -149,3 +150,54 @@
 }
 
 } // namespace __hwasan
+
+// --- Implementation of LSan-specific functions --- {{{1
+namespace __lsan {
+
+static __hwasan::HwasanThreadList *GetHwasanThreadListLocked() {
+  auto &tl = __hwasan::hwasanThreadList();
+  tl.CheckLocked();
+  return &tl;
+}
+
+static __hwasan::Thread *GetThreadByOsIDLocked(tid_t os_id) {
+  return GetHwasanThreadListLocked()->FindThreadLocked(
+      [os_id](__hwasan::Thread *t) { return t->os_id() == os_id; });
+}
+
+void LockThreadRegistry() { __hwasan::hwasanThreadList().Lock(); }
+
+void UnlockThreadRegistry() { __hwasan::hwasanThreadList().Unlock(); }
+
+void EnsureMainThreadIDIsCorrect() {
+  auto *t = __hwasan::GetCurrentThread();
+  if (t && (t->IsMainThread()))
+    t->set_os_id(GetTid());
+}
+
+bool GetThreadRangesLocked(tid_t os_id, uptr *stack_begin, uptr *stack_end,
+                           uptr *tls_begin, uptr *tls_end, uptr *cache_begin,
+                           uptr *cache_end, DTLS **dtls) {
+  auto *t = GetThreadByOsIDLocked(os_id);
+  if (!t)
+    return false;
+  *stack_begin = t->stack_bottom();
+  *stack_end = t->stack_top();
+  *tls_begin = t->tls_begin();
+  *tls_end = t->tls_end();
+  // Fixme: is this correct for HWASan.
+  *cache_begin = 0;
+  *cache_end = 0;
+  *dtls = t->dtls();
+  return true;
+}
+
+void GetAllThreadAllocatorCachesLocked(InternalMmapVector<uptr> *caches) {}
+
+void ForEachExtraStackRange(tid_t os_id, RangeIteratorCallback callback,
+                            void *arg) {}
+
+void GetAdditionalThreadContextPtrsLocked(InternalMmapVector<uptr> *ptrs) {}
+void ReportUnsuspendedThreadsLocked(InternalMmapVector<tid_t> *threads) {}
+
+}  // namespace __lsan
diff --git a/compiler-rt/lib/hwasan/hwasan_thread_list.h b/compiler-rt/lib/hwasan/hwasan_thread_list.h
--- a/compiler-rt/lib/hwasan/hwasan_thread_list.h
+++ b/compiler-rt/lib/hwasan/hwasan_thread_list.h
@@ -71,7 +71,7 @@
   uptr total_stack_size;
 };
 
-class HwasanThreadList {
+class SANITIZER_MUTEX HwasanThreadList {
  public:
   HwasanThreadList(uptr storage, uptr size)
       : free_space_(storage), free_space_end_(storage + size) {
@@ -156,6 +156,15 @@
     for (Thread *t : live_list_) cb(t);
   }
 
+  template <class CB>
+  Thread *FindThreadLocked(CB cb) SANITIZER_CHECK_LOCKED(stats_mutex_) {
+    CheckLocked();
+    for (Thread *t : live_list_)
+      if (cb(t))
+        return t;
+    return nullptr;
+  }
+
   void AddThreadStats(Thread *t) SANITIZER_EXCLUDES(stats_mutex_) {
     SpinMutexLock l(&stats_mutex_);
     stats_.n_live_threads++;
@@ -175,6 +184,14 @@
 
   uptr GetRingBufferSize() const { return ring_buffer_size_; }
 
+  void Lock() SANITIZER_ACQUIRE(live_list_mutex_) { live_list_mutex_.Lock(); }
+  void CheckLocked() const SANITIZER_CHECK_LOCKED(live_list_mutex_) {
+    live_list_mutex_.CheckLocked();
+  }
+  void Unlock() SANITIZER_RELEASE(live_list_mutex_) {
+    live_list_mutex_.Unlock();
+  }
+
  private:
   Thread *AllocThread() {
     SpinMutexLock l(&free_space_mutex_);