sanitizer_flat_map.h 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173
  1. //===-- sanitizer_flat_map.h ------------------------------------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // Part of the Sanitizer Allocator.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #ifndef SANITIZER_FLAT_MAP_H
  13. #define SANITIZER_FLAT_MAP_H
  14. #include "sanitizer_atomic.h"
  15. #include "sanitizer_common.h"
  16. #include "sanitizer_internal_defs.h"
  17. #include "sanitizer_local_address_space_view.h"
  18. #include "sanitizer_mutex.h"
  19. namespace __sanitizer {
  20. // Call these callbacks on mmap/munmap.
  21. struct NoOpMapUnmapCallback {
  22. void OnMap(uptr p, uptr size) const {}
  23. void OnUnmap(uptr p, uptr size) const {}
  24. };
  25. // Maps integers in rage [0, kSize) to values.
  26. template <typename T, u64 kSize,
  27. typename AddressSpaceViewTy = LocalAddressSpaceView>
  28. class FlatMap {
  29. public:
  30. using AddressSpaceView = AddressSpaceViewTy;
  31. void Init() { internal_memset(map_, 0, sizeof(map_)); }
  32. constexpr uptr size() const { return kSize; }
  33. bool contains(uptr idx) const {
  34. CHECK_LT(idx, kSize);
  35. return true;
  36. }
  37. T &operator[](uptr idx) {
  38. DCHECK_LT(idx, kSize);
  39. return map_[idx];
  40. }
  41. const T &operator[](uptr idx) const {
  42. DCHECK_LT(idx, kSize);
  43. return map_[idx];
  44. }
  45. private:
  46. T map_[kSize];
  47. };
  48. // TwoLevelMap maps integers in range [0, kSize1*kSize2) to values.
  49. // It is implemented as a two-dimensional array: array of kSize1 pointers
  50. // to kSize2-byte arrays. The secondary arrays are mmaped on demand.
  51. // Each value is initially zero and can be set to something else only once.
  52. // Setting and getting values from multiple threads is safe w/o extra locking.
  53. template <typename T, u64 kSize1, u64 kSize2,
  54. typename AddressSpaceViewTy = LocalAddressSpaceView,
  55. class MapUnmapCallback = NoOpMapUnmapCallback>
  56. class TwoLevelMap {
  57. static_assert(IsPowerOfTwo(kSize2), "Use a power of two for performance.");
  58. public:
  59. using AddressSpaceView = AddressSpaceViewTy;
  60. void Init() {
  61. mu_.Init();
  62. internal_memset(map1_, 0, sizeof(map1_));
  63. }
  64. void TestOnlyUnmap() {
  65. for (uptr i = 0; i < kSize1; i++) {
  66. T *p = Get(i);
  67. if (!p)
  68. continue;
  69. MapUnmapCallback().OnUnmap(reinterpret_cast<uptr>(p), MmapSize());
  70. UnmapOrDie(p, kSize2);
  71. }
  72. Init();
  73. }
  74. uptr MemoryUsage() const {
  75. uptr res = 0;
  76. for (uptr i = 0; i < kSize1; i++) {
  77. T *p = Get(i);
  78. if (!p)
  79. continue;
  80. res += MmapSize();
  81. }
  82. return res;
  83. }
  84. constexpr uptr size() const { return kSize1 * kSize2; }
  85. constexpr uptr size1() const { return kSize1; }
  86. constexpr uptr size2() const { return kSize2; }
  87. bool contains(uptr idx) const {
  88. CHECK_LT(idx, kSize1 * kSize2);
  89. return Get(idx / kSize2);
  90. }
  91. const T &operator[](uptr idx) const {
  92. DCHECK_LT(idx, kSize1 * kSize2);
  93. T *map2 = GetOrCreate(idx / kSize2);
  94. return *AddressSpaceView::Load(&map2[idx % kSize2]);
  95. }
  96. T &operator[](uptr idx) {
  97. DCHECK_LT(idx, kSize1 * kSize2);
  98. T *map2 = GetOrCreate(idx / kSize2);
  99. return *AddressSpaceView::LoadWritable(&map2[idx % kSize2]);
  100. }
  101. private:
  102. constexpr uptr MmapSize() const {
  103. return RoundUpTo(kSize2 * sizeof(T), GetPageSizeCached());
  104. }
  105. T *Get(uptr idx) const {
  106. DCHECK_LT(idx, kSize1);
  107. return reinterpret_cast<T *>(
  108. atomic_load(&map1_[idx], memory_order_acquire));
  109. }
  110. T *GetOrCreate(uptr idx) const {
  111. DCHECK_LT(idx, kSize1);
  112. // This code needs to use memory_order_acquire/consume, but we use
  113. // memory_order_relaxed for performance reasons (matters for arm64). We
  114. // expect memory_order_relaxed to be effectively equivalent to
  115. // memory_order_consume in this case for all relevant architectures: all
  116. // dependent data is reachable only by dereferencing the resulting pointer.
  117. // If relaxed load fails to see stored ptr, the code will fall back to
  118. // Create() and reload the value again with locked mutex as a memory
  119. // barrier.
  120. T *res = reinterpret_cast<T *>(atomic_load_relaxed(&map1_[idx]));
  121. if (LIKELY(res))
  122. return res;
  123. return Create(idx);
  124. }
  125. NOINLINE T *Create(uptr idx) const {
  126. SpinMutexLock l(&mu_);
  127. T *res = Get(idx);
  128. if (!res) {
  129. res = reinterpret_cast<T *>(MmapOrDie(MmapSize(), "TwoLevelMap"));
  130. MapUnmapCallback().OnMap(reinterpret_cast<uptr>(res), kSize2);
  131. atomic_store(&map1_[idx], reinterpret_cast<uptr>(res),
  132. memory_order_release);
  133. }
  134. return res;
  135. }
  136. mutable StaticSpinMutex mu_;
  137. mutable atomic_uintptr_t map1_[kSize1];
  138. };
  139. template <u64 kSize, typename AddressSpaceViewTy = LocalAddressSpaceView>
  140. using FlatByteMap = FlatMap<u8, kSize, AddressSpaceViewTy>;
  141. template <u64 kSize1, u64 kSize2,
  142. typename AddressSpaceViewTy = LocalAddressSpaceView,
  143. class MapUnmapCallback = NoOpMapUnmapCallback>
  144. using TwoLevelByteMap =
  145. TwoLevelMap<u8, kSize1, kSize2, AddressSpaceViewTy, MapUnmapCallback>;
  146. } // namespace __sanitizer
  147. #endif