sampling_heap_profiler_unittest.cc 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364
  1. // Copyright 2018 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include "base/sampling_heap_profiler/sampling_heap_profiler.h"
  5. #include <stdlib.h>
  6. #include <cinttypes>
  7. #include "base/allocator/allocator_shim.h"
  8. #include "base/debug/alias.h"
  9. #include "base/memory/raw_ptr.h"
  10. #include "base/rand_util.h"
  11. #include "base/sampling_heap_profiler/poisson_allocation_sampler.h"
  12. #include "base/synchronization/waitable_event.h"
  13. #include "base/threading/simple_thread.h"
  14. #include "build/build_config.h"
  15. #include "testing/gtest/include/gtest/gtest.h"
  16. namespace base {
  17. using ScopedSuppressRandomnessForTesting =
  18. PoissonAllocationSampler::ScopedSuppressRandomnessForTesting;
  19. class SamplingHeapProfilerTest : public ::testing::Test {
  20. public:
  21. void SetUp() override {
  22. #if BUILDFLAG(IS_APPLE)
  23. allocator::InitializeAllocatorShim();
  24. #endif
  25. SamplingHeapProfiler::Init();
  26. // Ensure the PoissonAllocationSampler starts in the default state.
  27. ASSERT_FALSE(PoissonAllocationSampler::AreHookedSamplesMuted());
  28. ASSERT_FALSE(PoissonAllocationSampler::ScopedMuteThreadSamples::IsMuted());
  29. ASSERT_FALSE(ScopedSuppressRandomnessForTesting::IsSuppressed());
  30. }
  31. size_t GetNextSample(size_t mean_interval) {
  32. return PoissonAllocationSampler::GetNextSampleInterval(mean_interval);
  33. }
  34. static int GetRunningSessionsCount() {
  35. return SamplingHeapProfiler::Get()->running_sessions_;
  36. }
  37. static void RunStartStopLoop(SamplingHeapProfiler* profiler) {
  38. for (int i = 0; i < 100000; ++i) {
  39. profiler->Start();
  40. EXPECT_LE(1, GetRunningSessionsCount());
  41. profiler->Stop();
  42. }
  43. }
  44. };
  45. class SamplesCollector : public PoissonAllocationSampler::SamplesObserver {
  46. public:
  47. explicit SamplesCollector(size_t watch_size) : watch_size_(watch_size) {}
  48. void SampleAdded(void* address,
  49. size_t size,
  50. size_t,
  51. PoissonAllocationSampler::AllocatorType,
  52. const char*) override {
  53. if (sample_added || size != watch_size_)
  54. return;
  55. sample_address_ = address;
  56. sample_added = true;
  57. }
  58. void SampleRemoved(void* address) override {
  59. if (address == sample_address_)
  60. sample_removed = true;
  61. }
  62. bool sample_added = false;
  63. bool sample_removed = false;
  64. private:
  65. size_t watch_size_;
  66. raw_ptr<void> sample_address_ = nullptr;
  67. };
  68. TEST_F(SamplingHeapProfilerTest, SampleObserver) {
  69. ScopedSuppressRandomnessForTesting suppress;
  70. SamplesCollector collector(10000);
  71. auto* sampler = PoissonAllocationSampler::Get();
  72. sampler->SetSamplingInterval(1024);
  73. sampler->AddSamplesObserver(&collector);
  74. void* volatile p = malloc(10000);
  75. free(p);
  76. sampler->RemoveSamplesObserver(&collector);
  77. EXPECT_TRUE(collector.sample_added);
  78. EXPECT_TRUE(collector.sample_removed);
  79. }
  80. TEST_F(SamplingHeapProfilerTest, SampleObserverMuted) {
  81. ScopedSuppressRandomnessForTesting suppress;
  82. SamplesCollector collector(10000);
  83. auto* sampler = PoissonAllocationSampler::Get();
  84. sampler->SetSamplingInterval(1024);
  85. sampler->AddSamplesObserver(&collector);
  86. {
  87. PoissonAllocationSampler::ScopedMuteThreadSamples muted_scope;
  88. void* volatile p = malloc(10000);
  89. free(p);
  90. }
  91. sampler->RemoveSamplesObserver(&collector);
  92. EXPECT_FALSE(collector.sample_added);
  93. EXPECT_FALSE(collector.sample_removed);
  94. }
  95. TEST_F(SamplingHeapProfilerTest, IntervalRandomizationSanity) {
  96. ASSERT_FALSE(ScopedSuppressRandomnessForTesting::IsSuppressed());
  97. constexpr int iterations = 50;
  98. constexpr size_t target = 10000000;
  99. int sum = 0;
  100. for (int i = 0; i < iterations; ++i) {
  101. int samples = 0;
  102. for (size_t value = 0; value < target; value += GetNextSample(10000))
  103. ++samples;
  104. // There are should be ~ target/10000 = 1000 samples.
  105. sum += samples;
  106. }
  107. int mean_samples = sum / iterations;
  108. EXPECT_NEAR(1000, mean_samples, 100); // 10% tolerance.
  109. }
  110. #if BUILDFLAG(IS_IOS)
  111. // iOS devices generally have ~4GB of RAM with no swap and therefore need a
  112. // lower allocation limit here.
  113. const int kNumberOfAllocations = 1000;
  114. #else
  115. const int kNumberOfAllocations = 10000;
  116. #endif
  117. NOINLINE void Allocate1() {
  118. void* p = malloc(400);
  119. base::debug::Alias(&p);
  120. }
  121. NOINLINE void Allocate2() {
  122. void* p = malloc(700);
  123. base::debug::Alias(&p);
  124. }
  125. NOINLINE void Allocate3() {
  126. void* p = malloc(20480);
  127. base::debug::Alias(&p);
  128. }
  129. class MyThread1 : public SimpleThread {
  130. public:
  131. MyThread1() : SimpleThread("MyThread1") {}
  132. void Run() override {
  133. for (int i = 0; i < kNumberOfAllocations; ++i)
  134. Allocate1();
  135. }
  136. };
  137. class MyThread2 : public SimpleThread {
  138. public:
  139. MyThread2() : SimpleThread("MyThread2") {}
  140. void Run() override {
  141. for (int i = 0; i < kNumberOfAllocations; ++i)
  142. Allocate2();
  143. }
  144. };
  145. void CheckAllocationPattern(void (*allocate_callback)()) {
  146. ASSERT_FALSE(ScopedSuppressRandomnessForTesting::IsSuppressed());
  147. auto* profiler = SamplingHeapProfiler::Get();
  148. profiler->SetSamplingInterval(10240);
  149. base::TimeTicks t0 = base::TimeTicks::Now();
  150. std::map<size_t, size_t> sums;
  151. const int iterations = 40;
  152. for (int i = 0; i < iterations; ++i) {
  153. uint32_t id = profiler->Start();
  154. allocate_callback();
  155. std::vector<SamplingHeapProfiler::Sample> samples =
  156. profiler->GetSamples(id);
  157. profiler->Stop();
  158. std::map<size_t, size_t> buckets;
  159. for (auto& sample : samples) {
  160. buckets[sample.size] += sample.total;
  161. }
  162. for (auto& it : buckets) {
  163. if (it.first != 400 && it.first != 700 && it.first != 20480)
  164. continue;
  165. sums[it.first] += it.second;
  166. printf("%zu,", it.second);
  167. }
  168. printf("\n");
  169. }
  170. printf("Time taken %" PRIu64 "ms\n",
  171. (base::TimeTicks::Now() - t0).InMilliseconds());
  172. for (auto sum : sums) {
  173. intptr_t expected = sum.first * kNumberOfAllocations;
  174. intptr_t actual = sum.second / iterations;
  175. printf("%zu:\tmean: %zu\trelative error: %.2f%%\n", sum.first, actual,
  176. 100. * (actual - expected) / expected);
  177. }
  178. }
  179. // Manual tests to check precision of the sampling profiler.
  180. // Yes, they do leak lots of memory.
  181. TEST_F(SamplingHeapProfilerTest, DISABLED_ParallelLargeSmallStats) {
  182. CheckAllocationPattern([]() {
  183. MyThread1 t1;
  184. MyThread1 t2;
  185. t1.Start();
  186. t2.Start();
  187. for (int i = 0; i < kNumberOfAllocations; ++i)
  188. Allocate3();
  189. t1.Join();
  190. t2.Join();
  191. });
  192. }
  193. TEST_F(SamplingHeapProfilerTest, DISABLED_SequentialLargeSmallStats) {
  194. CheckAllocationPattern([]() {
  195. for (int i = 0; i < kNumberOfAllocations; ++i) {
  196. Allocate1();
  197. Allocate2();
  198. Allocate3();
  199. }
  200. });
  201. }
  202. // Platform TLS: alloc+free[ns]: 22.184 alloc[ns]: 8.910 free[ns]: 13.274
  203. // thread_local: alloc+free[ns]: 18.353 alloc[ns]: 5.021 free[ns]: 13.331
  204. // TODO(crbug.com/1117342) Disabled on Mac
  205. #if BUILDFLAG(IS_MAC)
  206. #define MAYBE_MANUAL_SamplerMicroBenchmark DISABLED_MANUAL_SamplerMicroBenchmark
  207. #else
  208. #define MAYBE_MANUAL_SamplerMicroBenchmark MANUAL_SamplerMicroBenchmark
  209. #endif
  210. TEST_F(SamplingHeapProfilerTest, MAYBE_MANUAL_SamplerMicroBenchmark) {
  211. // With the sampling interval of 100KB it happens to record ~ every 450th
  212. // allocation in the browser process. We model this pattern here.
  213. constexpr size_t sampling_interval = 100000;
  214. constexpr size_t allocation_size = sampling_interval / 450;
  215. SamplesCollector collector(0);
  216. auto* sampler = PoissonAllocationSampler::Get();
  217. sampler->SetSamplingInterval(sampling_interval);
  218. sampler->AddSamplesObserver(&collector);
  219. int kNumAllocations = 50000000;
  220. base::TimeTicks t0 = base::TimeTicks::Now();
  221. for (int i = 1; i <= kNumAllocations; ++i) {
  222. sampler->RecordAlloc(
  223. reinterpret_cast<void*>(static_cast<intptr_t>(i)), allocation_size,
  224. PoissonAllocationSampler::AllocatorType::kMalloc, nullptr);
  225. }
  226. base::TimeTicks t1 = base::TimeTicks::Now();
  227. for (int i = 1; i <= kNumAllocations; ++i)
  228. sampler->RecordFree(reinterpret_cast<void*>(static_cast<intptr_t>(i)));
  229. base::TimeTicks t2 = base::TimeTicks::Now();
  230. printf(
  231. "alloc+free[ns]: %.3f alloc[ns]: %.3f free[ns]: %.3f "
  232. "alloc+free[mln/s]: %.1f total[ms]: %.1f\n",
  233. (t2 - t0).InNanoseconds() * 1. / kNumAllocations,
  234. (t1 - t0).InNanoseconds() * 1. / kNumAllocations,
  235. (t2 - t1).InNanoseconds() * 1. / kNumAllocations,
  236. kNumAllocations / (t2 - t0).InMicrosecondsF(),
  237. (t2 - t0).InMillisecondsF());
  238. sampler->RemoveSamplesObserver(&collector);
  239. }
  240. class StartStopThread : public SimpleThread {
  241. public:
  242. StartStopThread(WaitableEvent* event)
  243. : SimpleThread("MyThread2"), event_(event) {}
  244. void Run() override {
  245. auto* profiler = SamplingHeapProfiler::Get();
  246. event_->Signal();
  247. SamplingHeapProfilerTest::RunStartStopLoop(profiler);
  248. }
  249. private:
  250. raw_ptr<WaitableEvent> event_;
  251. };
  252. TEST_F(SamplingHeapProfilerTest, StartStop) {
  253. auto* profiler = SamplingHeapProfiler::Get();
  254. EXPECT_EQ(0, GetRunningSessionsCount());
  255. profiler->Start();
  256. EXPECT_EQ(1, GetRunningSessionsCount());
  257. profiler->Start();
  258. EXPECT_EQ(2, GetRunningSessionsCount());
  259. profiler->Stop();
  260. EXPECT_EQ(1, GetRunningSessionsCount());
  261. profiler->Stop();
  262. EXPECT_EQ(0, GetRunningSessionsCount());
  263. }
  264. // TODO(crbug.com/1116543): Test is crashing on Mac.
  265. #if BUILDFLAG(IS_MAC)
  266. #define MAYBE_ConcurrentStartStop DISABLED_ConcurrentStartStop
  267. #else
  268. #define MAYBE_ConcurrentStartStop ConcurrentStartStop
  269. #endif
  270. TEST_F(SamplingHeapProfilerTest, MAYBE_ConcurrentStartStop) {
  271. auto* profiler = SamplingHeapProfiler::Get();
  272. WaitableEvent event;
  273. StartStopThread thread(&event);
  274. thread.Start();
  275. event.Wait();
  276. RunStartStopLoop(profiler);
  277. thread.Join();
  278. EXPECT_EQ(0, GetRunningSessionsCount());
  279. }
  280. TEST_F(SamplingHeapProfilerTest, HookedAllocatorMuted) {
  281. ScopedSuppressRandomnessForTesting suppress;
  282. EXPECT_FALSE(PoissonAllocationSampler::AreHookedSamplesMuted());
  283. auto* sampler = PoissonAllocationSampler::Get();
  284. sampler->SetSamplingInterval(1024);
  285. {
  286. PoissonAllocationSampler::ScopedMuteHookedSamplesForTesting mute_hooks;
  287. EXPECT_TRUE(PoissonAllocationSampler::AreHookedSamplesMuted());
  288. SamplesCollector collector(10000);
  289. // A ScopedMuteHookedSamplesForTesting exists so hooked allocations should
  290. // be ignored.
  291. sampler->AddSamplesObserver(&collector);
  292. void* volatile p = malloc(10000);
  293. free(p);
  294. sampler->RemoveSamplesObserver(&collector);
  295. EXPECT_FALSE(collector.sample_added);
  296. EXPECT_FALSE(collector.sample_removed);
  297. // Manual allocations should be captured.
  298. sampler->AddSamplesObserver(&collector);
  299. void* const kAddress = reinterpret_cast<void*>(0x1234);
  300. sampler->RecordAlloc(kAddress, 10000,
  301. PoissonAllocationSampler::kManualForTesting, nullptr);
  302. sampler->RecordFree(kAddress);
  303. sampler->RemoveSamplesObserver(&collector);
  304. EXPECT_TRUE(collector.sample_added);
  305. EXPECT_TRUE(collector.sample_removed);
  306. }
  307. EXPECT_FALSE(PoissonAllocationSampler::AreHookedSamplesMuted());
  308. // Hooked allocations should be captured again.
  309. SamplesCollector collector(10000);
  310. sampler->AddSamplesObserver(&collector);
  311. void* volatile p = malloc(10000);
  312. free(p);
  313. sampler->RemoveSamplesObserver(&collector);
  314. EXPECT_TRUE(collector.sample_added);
  315. EXPECT_TRUE(collector.sample_removed);
  316. }
  317. } // namespace base