diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_stack_store.h b/compiler-rt/lib/sanitizer_common/sanitizer_stack_store.h --- a/compiler-rt/lib/sanitizer_common/sanitizer_stack_store.h +++ b/compiler-rt/lib/sanitizer_common/sanitizer_stack_store.h @@ -19,16 +19,22 @@ class StackStore { public: + enum class Compression : u8 { + None = 0, + Test, + }; + constexpr StackStore() = default; using Id = u32; // Enough for 2^32 * sizeof(uptr) bytes of traces. Id Store(const StackTrace &trace, uptr *pack /* number of blocks completed by this call */); - StackTrace Load(Id id) const; + StackTrace Load(Id id); uptr Allocated() const; - void Pack(); + // Returns the number of released bytes. + uptr Pack(Compression type); void TestOnlyUnmap(); @@ -65,16 +71,28 @@ // Counter to track store progress to know when we can Pack() the block. atomic_uint32_t stored_ = {}; // Protects alloc of new blocks. - StaticSpinMutex mtx_ = {}; + mutable StaticSpinMutex mtx_ = {}; + + enum class State : u8 { + Storing = 0, + Packed, + Unpacked, + }; + State state GUARDED_BY(mtx_) = State::Storing; uptr *Create(); public: uptr *Get() const; uptr *GetOrCreate(); + uptr *GetOrUnpack(); + uptr Pack(Compression type); + uptr Allocated() const; void TestOnlyUnmap(); bool Stored(uptr n); + bool IsPacked() const; }; + BlockInfo blocks_[kBlockCount] = {}; static_assert(ARRAY_SIZE(blocks_) == 0x100000000ull / kBlockSizeFrames, ""); }; diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_stack_store.cpp b/compiler-rt/lib/sanitizer_common/sanitizer_stack_store.cpp --- a/compiler-rt/lib/sanitizer_common/sanitizer_stack_store.cpp +++ b/compiler-rt/lib/sanitizer_common/sanitizer_stack_store.cpp @@ -46,13 +46,13 @@ return OffsetToId(idx); } -StackTrace StackStore::Load(Id id) const { +StackTrace StackStore::Load(Id id) { if (!id) return {}; uptr idx = IdToOffset(id); uptr block_idx = GetBlockIdx(idx); CHECK_LT(block_idx, ARRAY_SIZE(blocks_)); - const uptr *stack_trace = blocks_[block_idx].Get(); + const uptr *stack_trace = blocks_[block_idx].GetOrUnpack(); if (!stack_trace) return {}; stack_trace += GetInBlockIdx(idx); @@ -61,9 +61,11 @@ } uptr StackStore::Allocated() const { - return RoundUpTo(atomic_load_relaxed(&total_frames_) * sizeof(uptr), - GetPageSizeCached()) + - sizeof(*this); + uptr next_block = GetBlockIdx( + RoundUpTo(atomic_load_relaxed(&total_frames_), kBlockSizeFrames)); + uptr res = 0; + for (uptr i = 0; i < next_block; ++i) res += blocks_[i].Allocated(); + return res + sizeof(*this); } uptr *StackStore::Alloc(uptr count, uptr *idx, uptr *pack) { @@ -90,8 +92,10 @@ } } -void StackStore::Pack() { - // TODO +uptr StackStore::Pack(Compression type) { + uptr res = 0; + for (BlockInfo &b : blocks_) res += b.Pack(type); + return res; } void StackStore::TestOnlyUnmap() { @@ -121,6 +125,60 @@ return Create(); } +uptr *StackStore::BlockInfo::GetOrUnpack() { + SpinMutexLock l(&mtx_); + switch (state) { + case State::Storing: + state = State::Unpacked; + FALLTHROUGH; + case State::Unpacked: + return Get(); + case State::Packed: + break; + } + + uptr *ptr = Get(); + CHECK_NE(nullptr, ptr); + // Fake unpacking. + for (uptr i = 0; i < kBlockSizeFrames; ++i) ptr[i] ^= uptr(0) - 1; + state = State::Unpacked; + return Get(); +} + +uptr StackStore::BlockInfo::Pack(Compression type) { + if (type == Compression::None) + return 0; + + SpinMutexLock l(&mtx_); + switch (state) { + case State::Unpacked: + case State::Packed: + return 0; + case State::Storing: + break; + } + + uptr *ptr = Get(); + if (!ptr || !Stored(0)) + return 0; + + // Fake packing. + for (uptr i = 0; i < kBlockSizeFrames; ++i) ptr[i] ^= uptr(0) - 1; + state = State::Packed; + return kBlockSizeBytes - kBlockSizeBytes / 10; +} + +uptr StackStore::BlockInfo::Allocated() const { + SpinMutexLock l(&mtx_); + switch (state) { + case State::Packed: + return kBlockSizeBytes / 10; + case State::Unpacked: + case State::Storing: + return kBlockSizeBytes; + } +} + void StackStore::BlockInfo::TestOnlyUnmap() { if (uptr *ptr = Get()) UnmapOrDie(ptr, StackStore::kBlockSizeBytes); @@ -131,4 +189,9 @@ kBlockSizeFrames; } +bool StackStore::BlockInfo::IsPacked() const { + SpinMutexLock l(&mtx_); + return state == State::Packed; +} + } // namespace __sanitizer diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_stackdepot.cpp b/compiler-rt/lib/sanitizer_common/sanitizer_stackdepot.cpp --- a/compiler-rt/lib/sanitizer_common/sanitizer_stackdepot.cpp +++ b/compiler-rt/lib/sanitizer_common/sanitizer_stackdepot.cpp @@ -81,7 +81,9 @@ return pack; } -void StackDepotNode::post_process() { stackStore.Pack(); } +void StackDepotNode::post_process() { + stackStore.Pack(StackStore::Compression::None); +} StackDepotNode::args_type StackDepotNode::load(u32 id) const { if (!store_id) diff --git a/compiler-rt/lib/sanitizer_common/tests/sanitizer_stack_store_test.cpp b/compiler-rt/lib/sanitizer_common/tests/sanitizer_stack_store_test.cpp --- a/compiler-rt/lib/sanitizer_common/tests/sanitizer_stack_store_test.cpp +++ b/compiler-rt/lib/sanitizer_common/tests/sanitizer_stack_store_test.cpp @@ -48,6 +48,12 @@ return res; } + uptr CountPackedBlocks() const { + uptr res = 0; + for (const BlockInfo& b : store_.blocks_) res += b.IsPacked(); + return res; + } + uptr IdToOffset(StackStore::Id id) const { return store_.IdToOffset(id); } static constexpr uptr kBlockSizeFrames = StackStore::kBlockSizeFrames; @@ -114,4 +120,46 @@ EXPECT_EQ(12u, total_ready); } +struct StackStorePackTest + : public StackStoreTest, + public ::testing::WithParamInterface {}; + +INSTANTIATE_TEST_SUITE_P(PackUnpacks, StackStorePackTest, + ::testing::ValuesIn({StackStore::Compression::Test})); + +TEST_P(StackStorePackTest, PackUnpack) { + std::vector ids; + ForEachTrace([&](const StackTrace& s) { + uptr pack = 0; + ids.push_back(store_.Store(s, &pack)); + if (pack) { + uptr before = store_.Allocated(); + uptr diff = store_.Pack(GetParam()); + uptr after = store_.Allocated(); + EXPECT_EQ(before - after, diff); + EXPECT_LT(after, before); + EXPECT_GT(after - before, kBlockSizeFrames * sizeof(uptr) * 3 / 4); + } + }); + EXPECT_EQ(122u, CountPackedBlocks()); + + // Unpack random block. + store_.Load(kBlockSizeFrames * 7 + 123); + EXPECT_EQ(121u, CountPackedBlocks()); + + // Unpack all blocks. + auto id = ids.begin(); + ForEachTrace([&](const StackTrace& s) { + StackTrace trace = store_.Load(*(id++)); + EXPECT_EQ(s.size, trace.size); + EXPECT_EQ(s.tag, trace.tag); + EXPECT_EQ(std::vector(s.trace, s.trace + s.size), + std::vector(trace.trace, trace.trace + trace.size)); + }); + EXPECT_EQ(0u, CountPackedBlocks()); + + EXPECT_EQ(0u, store_.Pack(GetParam())); + EXPECT_EQ(0u, CountPackedBlocks()); +} + } // namespace __sanitizer