diff --git a/compiler-rt/lib/sanitizer_common/CMakeLists.txt b/compiler-rt/lib/sanitizer_common/CMakeLists.txt --- a/compiler-rt/lib/sanitizer_common/CMakeLists.txt +++ b/compiler-rt/lib/sanitizer_common/CMakeLists.txt @@ -153,6 +153,7 @@ sanitizer_local_address_space_view.h sanitizer_mac.h sanitizer_malloc_mac.inc + sanitizer_mutable_array_ref.h sanitizer_mutex.h sanitizer_persistent_allocator.h sanitizer_placement_new.h diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_mutable_array_ref.h b/compiler-rt/lib/sanitizer_common/sanitizer_mutable_array_ref.h new file mode 100644 --- /dev/null +++ b/compiler-rt/lib/sanitizer_common/sanitizer_mutable_array_ref.h @@ -0,0 +1,93 @@ +//===-- sanitizer_mutable_array_ref.h ---------------------------*- C++ -*-===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// +// +// Lightweight reference to a contiguous array of elements +// of type `T`. It does not own any storage. Consequently +// care must be taken to ensure the life-time of this object +// does not outlive the storage it references. +// +//===----------------------------------------------------------------------===// +#ifndef SANITIZER_ARRAY_REF_H +#define SANITIZER_ARRAY_REF_H +#include "sanitizer_common.h" +#include "sanitizer_internal_defs.h" +#include "sanitizer_libc.h" + +namespace __sanitizer { + +template +class MutableArrayRef { + public: + explicit MutableArrayRef(T *data, uptr size) : data_(data), size_(size) {} + template + explicit MutableArrayRef(T (&ptr)[N]) : data_(ptr), size_(N) {} + + using elementTy = T; + // TODO(dliew): Consider implementing a smart iterator that tracks if + // derefence is safe. + using iterator = T *; + + // Accessors + T *data() const { return data_; } + iterator begin() const { return data_; } + iterator end() const { + if (size_ == 0) + return begin(); + return &(data_[size_ - 1]); + } + T &operator[](uptr idx) { + CHECK_LT(idx, size_); + return data_[idx]; + } + + // Properties + bool containsPtr(T *ptr) const { + if (ptr >= begin() && ptr <= end()) + return true; + return false; + } + bool isValidIndex(uptr idx) const { return idx < size_; } + bool overlaps(const MutableArrayRef &other) const { + if (begin() <= other.begin() && other.begin() <= end()) + return true; + if (other.begin() <= begin() && other.end() >= begin()) + return true; + return false; + } + uptr size() const { return size_; } + + // Copy operations + void copyFrom(const MutableArrayRef &src) { + CHECK_GE(size_, src.size_); + CHECK(!overlaps(src)); + internal_memcpy(data_, src.data_, sizeof(T) * src.size_); + } + + uptr truncatedCopyFrom(const MutableArrayRef &src) { + CHECK(!overlaps(src)); + uptr elts_to_copy = Min(size_, src.size_); + internal_memcpy(data_, src.data_, sizeof(T) * elts_to_copy); + return elts_to_copy; + } + + // Return an `MutableArrayRef` that drops the first `n` elements + MutableArrayRef slice(uptr n) const { + CHECK_LE(n, size()); + T *slice_begin = data_ + n; + uptr slice_size = size() - n; + return MutableArrayRef(slice_begin, slice_size); + } + + private: + T *data_; + uptr size_; // number of elements of type T in buffer. +}; + +} // namespace __sanitizer + +#endif diff --git a/compiler-rt/lib/sanitizer_common/tests/CMakeLists.txt b/compiler-rt/lib/sanitizer_common/tests/CMakeLists.txt --- a/compiler-rt/lib/sanitizer_common/tests/CMakeLists.txt +++ b/compiler-rt/lib/sanitizer_common/tests/CMakeLists.txt @@ -21,6 +21,7 @@ sanitizer_libc_test.cpp sanitizer_linux_test.cpp sanitizer_list_test.cpp + sanitizer_mutable_array_ref_test.cpp sanitizer_mutex_test.cpp sanitizer_nolibc_test.cpp sanitizer_posix_test.cpp diff --git a/compiler-rt/lib/sanitizer_common/tests/sanitizer_mutable_array_ref_test.cpp b/compiler-rt/lib/sanitizer_common/tests/sanitizer_mutable_array_ref_test.cpp new file mode 100644 --- /dev/null +++ b/compiler-rt/lib/sanitizer_common/tests/sanitizer_mutable_array_ref_test.cpp @@ -0,0 +1,241 @@ +//===-- sanitizer_mutable_array_ref_test.cpp ------------------------------===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// +// +// This file is a part of Sanitizer runtime. +// Tests for sanitizer_bitvector.h. +// +//===----------------------------------------------------------------------===// +#include "sanitizer_common/sanitizer_mutable_array_ref.h" + +#include "gtest/gtest.h" +#include "sanitizer_common/sanitizer_type_traits.h" +#include "sanitizer_test_utils.h" + +using namespace __sanitizer; + +static const char kDeathRegex[] = ".*CHECK failed:.*"; + +int data[] = {0, 1, 2, 3}; +MutableArrayRef ref(data, ARRAY_SIZE(data)); + +TEST(MutableArrayRef, CArrayConstructor) { + MutableArrayRef other_ref(data); + EXPECT_EQ(other_ref.size(), ARRAY_SIZE(data)); + for (unsigned index = 0; index < ARRAY_SIZE(data); ++index) { + EXPECT_EQ(ref[index], data[index]); + } +} + +TEST(MutableArrayRef, elementTy) { + static_assert( + __sanitizer::is_same::elementTy, int>::value, + "elementTy must be int"); + static_assert(__sanitizer::is_same::elementTy, + const int>::value, + "elementTy must be const int"); +} + +TEST(MutableArrayRef, IndexOperatorSafeRead) { + for (unsigned index = 0; index < ARRAY_SIZE(data); ++index) { + EXPECT_EQ(ref[index], data[index]); + } +} + +TEST(MutableArrayRef, IndexOperatorSafeWrite) { + for (unsigned index = 0; index < ARRAY_SIZE(data); ++index) { + ref[index] = static_cast(index) * 2; + } + for (unsigned index = 0; index < ARRAY_SIZE(data); ++index) { + EXPECT_EQ(data[index], static_cast(index) * 2); + } +} + +TEST(MutableArrayRef, IndexOperatorBadRead) { + EXPECT_DEATH(ref[4], kDeathRegex); +} + +TEST(MutableArrayRef, Size) { EXPECT_EQ(ref.size(), static_cast(4)); } + +TEST(MutableArrayRef, Data) { + MutableArrayRef ref(data, ARRAY_SIZE(data)); + EXPECT_EQ(ref.data(), &(data[0])); +} + +TEST(MutableArrayRef, Begin) { + MutableArrayRef ref(data, ARRAY_SIZE(data)); + EXPECT_EQ(ref.begin(), &(data[0])); +} + +TEST(MutableArrayRef, End) { + MutableArrayRef ref(data, ARRAY_SIZE(data)); + EXPECT_EQ(ref.end(), &(data[3])); +} + +TEST(MutableArrayRef, LoopCXX98) { + // Loop like it's 1998! + unsigned index = 0; + for (MutableArrayRef::iterator it = ref.begin(), end = ref.end(); + it != end; ++it) { + EXPECT_EQ(*it, (data[index])); + ++index; + } +} + +TEST(MutableArrayRef, LoopCXX11) { + // Loop like it's 2011! + unsigned index = 0; + for (auto& elt : ref) { + EXPECT_EQ(elt, data[index]); + ++index; + } +} + +TEST(MutableArrayRef, LoopCXX11const) { + // Loop like it's 2011! + unsigned index = 0; + for (const auto& elt : ref) { + EXPECT_EQ(elt, data[index]); + ++index; + } +} + +TEST(MutableArrayRef, IsValidIndex) { + unsigned index = 0; + for (; index < ARRAY_SIZE(data); ++index) { + EXPECT_TRUE(ref.isValidIndex(index)); + } + EXPECT_FALSE(ref.isValidIndex(index)); +} + +TEST(MutableArrayRef, OverlapDisjoint) { + MutableArrayRef left_ref(data, 2); + MutableArrayRef right_ref(&(data[2]), 2); + EXPECT_FALSE(left_ref.overlaps(right_ref)); + EXPECT_FALSE(right_ref.overlaps(left_ref)); +} + +TEST(MutableArrayRef, OverlapOneEltOverlap) { + MutableArrayRef left_ref(data, 3); + MutableArrayRef right_ref(&(data[2]), 2); + EXPECT_TRUE(left_ref.overlaps(right_ref)); + EXPECT_TRUE(right_ref.overlaps(left_ref)); +} + +TEST(MutableArrayRef, copyFrom) { + int other_data[5] = {0}; + static_assert(sizeof(other_data) > sizeof(data), + "other_data should be larger than data"); + MutableArrayRef other_ref(other_data, ARRAY_SIZE(other_data)); + for (unsigned index = 0; index < ARRAY_SIZE(other_data); ++index) { + EXPECT_EQ(other_data[index], 0); + } + other_ref.copyFrom(ref); + for (unsigned index = 0; index < ARRAY_SIZE(other_data); ++index) { + if (index < ARRAY_SIZE(data)) { + EXPECT_EQ(other_ref[index], data[index]); + } else { + EXPECT_EQ(other_ref[index], 0); + } + } +} + +TEST(MutableArrayRef, copyFromUnsafeOverlap) { + MutableArrayRef other_ref(data, ARRAY_SIZE(data)); + EXPECT_DEATH(other_ref.copyFrom(ref), kDeathRegex); +} + +TEST(MutableArrayRef, copyFromUnsafeOverlapOneElt) { + MutableArrayRef left_ref(data, 3); + MutableArrayRef right_ref(&(data[2]), 2); + EXPECT_DEATH(left_ref.copyFrom(right_ref), kDeathRegex); +} + +TEST(MutableArrayRef, copyFromInsufficientSpace) { + int other_data[2] = {0}; + static_assert(sizeof(other_data) < sizeof(data), + "other_data should be smaller"); + MutableArrayRef other_ref(other_data, ARRAY_SIZE(other_data)); + EXPECT_DEATH(other_ref.copyFrom(ref), kDeathRegex); +} + +TEST(MutableArrayRef, truncatedCopyFrom) { + int other_data[ARRAY_SIZE(data) - 2] = {0}; + MutableArrayRef other_ref(other_data, ARRAY_SIZE(other_data)); + for (unsigned index = 0; index < ARRAY_SIZE(other_data); ++index) { + EXPECT_EQ(other_data[index], 0); + } + uptr elts = other_ref.truncatedCopyFrom(ref); + EXPECT_EQ(elts, ARRAY_SIZE(other_data)); + for (unsigned index = 0; index < ARRAY_SIZE(other_data); ++index) { + EXPECT_EQ(other_ref[index], ref[index]); + } +} + +TEST(MutableArrayRef, truncatedCopyFromNoTrunc) { + int other_data[ARRAY_SIZE(data)] = {0}; + MutableArrayRef other_ref(other_data, ARRAY_SIZE(other_data)); + for (unsigned index = 0; index < ARRAY_SIZE(other_data); ++index) { + EXPECT_EQ(other_data[index], 0); + } + uptr elts = other_ref.truncatedCopyFrom(ref); + EXPECT_EQ(elts, ARRAY_SIZE(data)); + for (unsigned index = 0; index < ARRAY_SIZE(other_data); ++index) { + EXPECT_EQ(other_ref[index], ref[index]); + } +} + +TEST(MutableArrayRef, truncatedCopyFromUnsafeOverlap) { + MutableArrayRef other_ref(data, ARRAY_SIZE(data)); + EXPECT_DEATH(other_ref.truncatedCopyFrom(ref), kDeathRegex); +} + +TEST(MutableArrayRef, containsPtr) { + for (unsigned index = 0; index < ARRAY_SIZE(data); ++index) { + EXPECT_TRUE(ref.containsPtr(&(data[index]))); + } + EXPECT_FALSE(ref.containsPtr(&(data[ARRAY_SIZE(data)]))); + EXPECT_FALSE(ref.containsPtr(nullptr)); +} + +TEST(MutableArrayRef, zeroSize) { + MutableArrayRef null_ref(nullptr, 0); + EXPECT_EQ(null_ref.size(), (uptr)0); + EXPECT_EQ(null_ref.begin(), null_ref.end()); + EXPECT_EQ(null_ref.begin(), nullptr); +} + +TEST(MutableArrayRef, zeroSizeBadAccess) { + MutableArrayRef null_ref(nullptr, 0); + EXPECT_DEATH(null_ref[0], kDeathRegex); +} + +TEST(MutableArrayRef, slice) { + auto slice = ref.slice(2); + EXPECT_EQ(slice.size(), ref.size() - 2); + for (unsigned index = 0; index < slice.size(); ++index) { + EXPECT_EQ(slice[index], ref[index + 2]); + } +} + +TEST(MutableArrayRef, sliceToZero) { + auto slice = ref.slice(ref.size()); + EXPECT_EQ(slice.size(), static_cast(0)); +} + +TEST(MutableArrayRef, sliceTooLarge) { + EXPECT_DEATH(ref.slice(ref.size() + 1), kDeathRegex); +} + +TEST(MutableArrayRef, assign) { + int other_data[2] = {0}; + MutableArrayRef other_ref(other_data); + EXPECT_EQ(other_ref.size(), ARRAY_SIZE(other_data)); + other_ref = ref; + EXPECT_EQ(other_ref.size(), ref.size()); + EXPECT_EQ(other_ref.data(), ref.data()); +}