stack_sampling_profiler_unittest.cc 64 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642
  1. // Copyright 2015 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include <stddef.h>
  5. #include <stdint.h>
  6. #include <cstdlib>
  7. #include <memory>
  8. #include <set>
  9. #include <utility>
  10. #include <vector>
  11. #include "base/bind.h"
  12. #include "base/callback.h"
  13. #include "base/compiler_specific.h"
  14. #include "base/files/file_util.h"
  15. #include "base/location.h"
  16. #include "base/memory/ptr_util.h"
  17. #include "base/memory/raw_ptr.h"
  18. #include "base/metrics/metrics_hashes.h"
  19. #include "base/profiler/profiler_buildflags.h"
  20. #include "base/profiler/sample_metadata.h"
  21. #include "base/profiler/stack_sampler.h"
  22. #include "base/profiler/stack_sampling_profiler.h"
  23. #include "base/profiler/stack_sampling_profiler_test_util.h"
  24. #include "base/profiler/unwinder.h"
  25. #include "base/ranges/algorithm.h"
  26. #include "base/run_loop.h"
  27. #include "base/scoped_native_library.h"
  28. #include "base/strings/utf_string_conversions.h"
  29. #include "base/synchronization/lock.h"
  30. #include "base/synchronization/waitable_event.h"
  31. #include "base/test/bind.h"
  32. #include "base/threading/simple_thread.h"
  33. #include "base/time/time.h"
  34. #include "build/build_config.h"
  35. #include "testing/gtest/include/gtest/gtest.h"
  36. #if BUILDFLAG(IS_WIN)
  37. #include <intrin.h>
  38. #include <malloc.h>
  39. #include <windows.h>
  40. #else
  41. #include <alloca.h>
  42. #endif
  43. // STACK_SAMPLING_PROFILER_SUPPORTED is used to conditionally enable the tests
  44. // below for supported platforms (currently Win x64, Mac x64, iOS 64, some
  45. // Android, and ChromeOS x64).
  46. // ChromeOS: These don't run under MSan because parts of the stack aren't
  47. // initialized.
  48. #if (BUILDFLAG(IS_WIN) && defined(ARCH_CPU_X86_64)) || \
  49. (BUILDFLAG(IS_MAC) && defined(ARCH_CPU_X86_64)) || \
  50. (BUILDFLAG(IS_IOS) && defined(ARCH_CPU_64_BITS)) || \
  51. (BUILDFLAG(IS_ANDROID) && BUILDFLAG(ENABLE_ARM_CFI_TABLE)) || \
  52. (BUILDFLAG(IS_CHROMEOS) && defined(ARCH_CPU_X86_64) && \
  53. !defined(MEMORY_SANITIZER))
  54. #define STACK_SAMPLING_PROFILER_SUPPORTED 1
  55. #endif
  56. namespace base {
  57. #if defined(STACK_SAMPLING_PROFILER_SUPPORTED)
  58. #define PROFILER_TEST_F(TestClass, TestName) TEST_F(TestClass, TestName)
  59. #else
  60. #define PROFILER_TEST_F(TestClass, TestName) \
  61. TEST_F(TestClass, DISABLED_##TestName)
  62. #endif
  63. using SamplingParams = StackSamplingProfiler::SamplingParams;
  64. namespace {
  65. // State provided to the ProfileBuilder's ApplyMetadataRetrospectively function.
  66. struct RetrospectiveMetadata {
  67. TimeTicks period_start;
  68. TimeTicks period_end;
  69. MetadataRecorder::Item item;
  70. };
  71. // Profile consists of a set of samples and other sampling information.
  72. struct Profile {
  73. // The collected samples.
  74. std::vector<std::vector<Frame>> samples;
  75. // The number of invocations of RecordMetadata().
  76. int record_metadata_count;
  77. // The retrospective metadata requests.
  78. std::vector<RetrospectiveMetadata> retrospective_metadata;
  79. // Duration of this profile.
  80. TimeDelta profile_duration;
  81. // Time between samples.
  82. TimeDelta sampling_period;
  83. };
  84. // The callback type used to collect a profile. The passed Profile is move-only.
  85. // Other threads, including the UI thread, may block on callback completion so
  86. // this should run as quickly as possible.
  87. using ProfileCompletedCallback = OnceCallback<void(Profile)>;
  88. // TestProfileBuilder collects samples produced by the profiler.
  89. class TestProfileBuilder : public ProfileBuilder {
  90. public:
  91. TestProfileBuilder(ModuleCache* module_cache,
  92. ProfileCompletedCallback callback);
  93. TestProfileBuilder(const TestProfileBuilder&) = delete;
  94. TestProfileBuilder& operator=(const TestProfileBuilder&) = delete;
  95. ~TestProfileBuilder() override;
  96. // ProfileBuilder:
  97. ModuleCache* GetModuleCache() override;
  98. void RecordMetadata(
  99. const MetadataRecorder::MetadataProvider& metadata_provider) override;
  100. void ApplyMetadataRetrospectively(
  101. TimeTicks period_start,
  102. TimeTicks period_end,
  103. const MetadataRecorder::Item& item) override;
  104. void OnSampleCompleted(std::vector<Frame> sample,
  105. TimeTicks sample_timestamp) override;
  106. void OnProfileCompleted(TimeDelta profile_duration,
  107. TimeDelta sampling_period) override;
  108. private:
  109. raw_ptr<ModuleCache> module_cache_;
  110. // The set of recorded samples.
  111. std::vector<std::vector<Frame>> samples_;
  112. // The number of invocations of RecordMetadata().
  113. int record_metadata_count_ = 0;
  114. // The retrospective metadata requests.
  115. std::vector<RetrospectiveMetadata> retrospective_metadata_;
  116. // Callback made when sampling a profile completes.
  117. ProfileCompletedCallback callback_;
  118. };
  119. TestProfileBuilder::TestProfileBuilder(ModuleCache* module_cache,
  120. ProfileCompletedCallback callback)
  121. : module_cache_(module_cache), callback_(std::move(callback)) {}
  122. TestProfileBuilder::~TestProfileBuilder() = default;
  123. ModuleCache* TestProfileBuilder::GetModuleCache() {
  124. return module_cache_;
  125. }
  126. void TestProfileBuilder::RecordMetadata(
  127. const MetadataRecorder::MetadataProvider& metadata_provider) {
  128. ++record_metadata_count_;
  129. }
  130. void TestProfileBuilder::ApplyMetadataRetrospectively(
  131. TimeTicks period_start,
  132. TimeTicks period_end,
  133. const MetadataRecorder::Item& item) {
  134. retrospective_metadata_.push_back(
  135. RetrospectiveMetadata{period_start, period_end, item});
  136. }
  137. void TestProfileBuilder::OnSampleCompleted(std::vector<Frame> sample,
  138. TimeTicks sample_timestamp) {
  139. samples_.push_back(std::move(sample));
  140. }
  141. void TestProfileBuilder::OnProfileCompleted(TimeDelta profile_duration,
  142. TimeDelta sampling_period) {
  143. std::move(callback_).Run(Profile{samples_, record_metadata_count_,
  144. retrospective_metadata_, profile_duration,
  145. sampling_period});
  146. }
  147. // Unloads |library| and returns when it has completed unloading. Unloading a
  148. // library is asynchronous on Windows, so simply calling UnloadNativeLibrary()
  149. // is insufficient to ensure it's been unloaded.
  150. void SynchronousUnloadNativeLibrary(NativeLibrary library) {
  151. UnloadNativeLibrary(library);
  152. #if BUILDFLAG(IS_WIN)
  153. // NativeLibrary is a typedef for HMODULE, which is actually the base address
  154. // of the module.
  155. uintptr_t module_base_address = reinterpret_cast<uintptr_t>(library);
  156. HMODULE module_handle;
  157. // Keep trying to get the module handle until the call fails.
  158. while (::GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS |
  159. GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT,
  160. reinterpret_cast<LPCTSTR>(module_base_address),
  161. &module_handle) ||
  162. ::GetLastError() != ERROR_MOD_NOT_FOUND) {
  163. PlatformThread::Sleep(Milliseconds(1));
  164. }
  165. #elif BUILDFLAG(IS_APPLE) || BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_CHROMEOS)
  166. // Unloading a library on Mac and Android is synchronous.
  167. #else
  168. NOTIMPLEMENTED();
  169. #endif
  170. }
  171. void WithTargetThread(ProfileCallback profile_callback) {
  172. UnwindScenario scenario(BindRepeating(&CallWithPlainFunction));
  173. WithTargetThread(&scenario, std::move(profile_callback));
  174. }
  175. struct TestProfilerInfo {
  176. TestProfilerInfo(SamplingProfilerThreadToken thread_token,
  177. const SamplingParams& params,
  178. ModuleCache* module_cache,
  179. StackSamplerTestDelegate* delegate = nullptr)
  180. : completed(WaitableEvent::ResetPolicy::MANUAL,
  181. WaitableEvent::InitialState::NOT_SIGNALED),
  182. profiler(thread_token,
  183. params,
  184. std::make_unique<TestProfileBuilder>(
  185. module_cache,
  186. BindLambdaForTesting([this](Profile result_profile) {
  187. profile = std::move(result_profile);
  188. completed.Signal();
  189. })),
  190. CreateCoreUnwindersFactoryForTesting(module_cache),
  191. RepeatingClosure(),
  192. delegate) {}
  193. TestProfilerInfo(const TestProfilerInfo&) = delete;
  194. TestProfilerInfo& operator=(const TestProfilerInfo&) = delete;
  195. // The order here is important to ensure objects being referenced don't get
  196. // destructed until after the objects referencing them.
  197. Profile profile;
  198. WaitableEvent completed;
  199. StackSamplingProfiler profiler;
  200. };
  201. // Captures samples as specified by |params| on the TargetThread, and returns
  202. // them. Waits up to |profiler_wait_time| for the profiler to complete.
  203. std::vector<std::vector<Frame>> CaptureSamples(const SamplingParams& params,
  204. TimeDelta profiler_wait_time,
  205. ModuleCache* module_cache) {
  206. std::vector<std::vector<Frame>> samples;
  207. WithTargetThread(BindLambdaForTesting(
  208. [&](SamplingProfilerThreadToken target_thread_token) {
  209. TestProfilerInfo info(target_thread_token, params, module_cache);
  210. info.profiler.Start();
  211. info.completed.TimedWait(profiler_wait_time);
  212. info.profiler.Stop();
  213. info.completed.Wait();
  214. samples = std::move(info.profile.samples);
  215. }));
  216. return samples;
  217. }
  218. // Waits for one of multiple samplings to complete.
  219. size_t WaitForSamplingComplete(
  220. const std::vector<std::unique_ptr<TestProfilerInfo>>& infos) {
  221. // Map unique_ptrs to something that WaitMany can accept.
  222. std::vector<WaitableEvent*> sampling_completed_rawptrs(infos.size());
  223. ranges::transform(infos, sampling_completed_rawptrs.begin(),
  224. [](const std::unique_ptr<TestProfilerInfo>& info) {
  225. return &info.get()->completed;
  226. });
  227. // Wait for one profiler to finish.
  228. return WaitableEvent::WaitMany(sampling_completed_rawptrs.data(),
  229. sampling_completed_rawptrs.size());
  230. }
  231. // Returns a duration that is longer than the test timeout. We would use
  232. // TimeDelta::Max() but https://crbug.com/465948.
  233. TimeDelta AVeryLongTimeDelta() {
  234. return Days(1);
  235. }
  236. // Tests the scenario where the library is unloaded after copying the stack, but
  237. // before walking it. If |wait_until_unloaded| is true, ensures that the
  238. // asynchronous library loading has completed before walking the stack. If
  239. // false, the unloading may still be occurring during the stack walk.
  240. void TestLibraryUnload(bool wait_until_unloaded, ModuleCache* module_cache) {
  241. // Test delegate that supports intervening between the copying of the stack
  242. // and the walking of the stack.
  243. class StackCopiedSignaler : public StackSamplerTestDelegate {
  244. public:
  245. StackCopiedSignaler(WaitableEvent* stack_copied,
  246. WaitableEvent* start_stack_walk,
  247. bool wait_to_walk_stack)
  248. : stack_copied_(stack_copied),
  249. start_stack_walk_(start_stack_walk),
  250. wait_to_walk_stack_(wait_to_walk_stack) {}
  251. void OnPreStackWalk() override {
  252. stack_copied_->Signal();
  253. if (wait_to_walk_stack_)
  254. start_stack_walk_->Wait();
  255. }
  256. private:
  257. const raw_ptr<WaitableEvent> stack_copied_;
  258. const raw_ptr<WaitableEvent> start_stack_walk_;
  259. const bool wait_to_walk_stack_;
  260. };
  261. SamplingParams params;
  262. params.sampling_interval = Milliseconds(0);
  263. params.samples_per_profile = 1;
  264. NativeLibrary other_library = LoadOtherLibrary();
  265. UnwindScenario scenario(
  266. BindRepeating(&CallThroughOtherLibrary, Unretained(other_library)));
  267. UnwindScenario::SampleEvents events;
  268. TargetThread target_thread(
  269. BindLambdaForTesting([&]() { scenario.Execute(&events); }));
  270. target_thread.Start();
  271. events.ready_for_sample.Wait();
  272. WaitableEvent sampling_thread_completed(
  273. WaitableEvent::ResetPolicy::MANUAL,
  274. WaitableEvent::InitialState::NOT_SIGNALED);
  275. Profile profile;
  276. WaitableEvent stack_copied(WaitableEvent::ResetPolicy::MANUAL,
  277. WaitableEvent::InitialState::NOT_SIGNALED);
  278. WaitableEvent start_stack_walk(WaitableEvent::ResetPolicy::MANUAL,
  279. WaitableEvent::InitialState::NOT_SIGNALED);
  280. StackCopiedSignaler test_delegate(&stack_copied, &start_stack_walk,
  281. wait_until_unloaded);
  282. StackSamplingProfiler profiler(
  283. target_thread.thread_token(), params,
  284. std::make_unique<TestProfileBuilder>(
  285. module_cache,
  286. BindLambdaForTesting(
  287. [&profile, &sampling_thread_completed](Profile result_profile) {
  288. profile = std::move(result_profile);
  289. sampling_thread_completed.Signal();
  290. })),
  291. CreateCoreUnwindersFactoryForTesting(module_cache), RepeatingClosure(),
  292. &test_delegate);
  293. profiler.Start();
  294. // Wait for the stack to be copied and the target thread to be resumed.
  295. stack_copied.Wait();
  296. // Cause the target thread to finish, so that it's no longer executing code in
  297. // the library we're about to unload.
  298. events.sample_finished.Signal();
  299. target_thread.Join();
  300. // Unload the library now that it's not being used.
  301. if (wait_until_unloaded)
  302. SynchronousUnloadNativeLibrary(other_library);
  303. else
  304. UnloadNativeLibrary(other_library);
  305. // Let the stack walk commence after unloading the library, if we're waiting
  306. // on that event.
  307. start_stack_walk.Signal();
  308. // Wait for the sampling thread to complete and fill out |profile|.
  309. sampling_thread_completed.Wait();
  310. // Look up the sample.
  311. ASSERT_EQ(1u, profile.samples.size());
  312. const std::vector<Frame>& sample = profile.samples[0];
  313. if (wait_until_unloaded) {
  314. // We expect the stack to look something like this, with the frame in the
  315. // now-unloaded library having a null module.
  316. //
  317. // ... WaitableEvent and system frames ...
  318. // WaitForSample()
  319. // TargetThread::OtherLibraryCallback
  320. // <frame in unloaded library>
  321. EXPECT_EQ(nullptr, sample.back().module)
  322. << "Stack:\n"
  323. << FormatSampleForDiagnosticOutput(sample);
  324. ExpectStackContains(sample, {scenario.GetWaitForSampleAddressRange()});
  325. ExpectStackDoesNotContain(sample,
  326. {scenario.GetSetupFunctionAddressRange(),
  327. scenario.GetOuterFunctionAddressRange()});
  328. } else {
  329. // We didn't wait for the asynchronous unloading to complete, so the results
  330. // are non-deterministic: if the library finished unloading we should have
  331. // the same stack as |wait_until_unloaded|, if not we should have the full
  332. // stack. The important thing is that we should not crash.
  333. if (!sample.back().module) {
  334. // This is the same case as |wait_until_unloaded|.
  335. ExpectStackContains(sample, {scenario.GetWaitForSampleAddressRange()});
  336. ExpectStackDoesNotContain(sample,
  337. {scenario.GetSetupFunctionAddressRange(),
  338. scenario.GetOuterFunctionAddressRange()});
  339. return;
  340. }
  341. ExpectStackContains(sample, {scenario.GetWaitForSampleAddressRange(),
  342. scenario.GetSetupFunctionAddressRange(),
  343. scenario.GetOuterFunctionAddressRange()});
  344. }
  345. }
  346. // Provide a suitable (and clean) environment for the tests below. All tests
  347. // must use this class to ensure that proper clean-up is done and thus be
  348. // usable in a later test.
  349. class StackSamplingProfilerTest : public testing::Test {
  350. public:
  351. void SetUp() override {
  352. // The idle-shutdown time is too long for convenient (and accurate) testing.
  353. // That behavior is checked instead by artificially triggering it through
  354. // the TestPeer.
  355. StackSamplingProfiler::TestPeer::DisableIdleShutdown();
  356. }
  357. void TearDown() override {
  358. // Be a good citizen and clean up after ourselves. This also re-enables the
  359. // idle-shutdown behavior.
  360. StackSamplingProfiler::TestPeer::Reset();
  361. }
  362. protected:
  363. ModuleCache* module_cache() { return &module_cache_; }
  364. private:
  365. ModuleCache module_cache_;
  366. };
  367. } // namespace
  368. // Checks that the basic expected information is present in sampled frames.
  369. //
  370. // macOS ASAN is not yet supported - crbug.com/718628.
  371. //
  372. // TODO(https://crbug.com/1100175): Enable this test again for Android with
  373. // ASAN. This is now disabled because the android-asan bot fails.
  374. //
  375. // If we're running the ChromeOS unit tests on Linux, this test will never pass
  376. // because Ubuntu's libc isn't compiled with frame pointers. Skip if not a real
  377. // ChromeOS device.
  378. #if (defined(ADDRESS_SANITIZER) && BUILDFLAG(IS_APPLE)) || \
  379. (defined(ADDRESS_SANITIZER) && BUILDFLAG(IS_ANDROID)) || \
  380. (BUILDFLAG(IS_CHROMEOS) && !BUILDFLAG(IS_CHROMEOS_DEVICE))
  381. #define MAYBE_Basic DISABLED_Basic
  382. #else
  383. #define MAYBE_Basic Basic
  384. #endif
  385. PROFILER_TEST_F(StackSamplingProfilerTest, MAYBE_Basic) {
  386. UnwindScenario scenario(BindRepeating(&CallWithPlainFunction));
  387. const std::vector<Frame>& sample = SampleScenario(&scenario, module_cache());
  388. // Check that all the modules are valid.
  389. for (const auto& frame : sample)
  390. EXPECT_NE(nullptr, frame.module);
  391. // The stack should contain a full unwind.
  392. ExpectStackContains(sample, {scenario.GetWaitForSampleAddressRange(),
  393. scenario.GetSetupFunctionAddressRange(),
  394. scenario.GetOuterFunctionAddressRange()});
  395. }
  396. // A simple unwinder that always generates one frame then aborts the stack walk.
  397. class TestAuxUnwinder : public Unwinder {
  398. public:
  399. TestAuxUnwinder(const Frame& frame_to_report,
  400. base::RepeatingClosure add_initial_modules_callback)
  401. : frame_to_report_(frame_to_report),
  402. add_initial_modules_callback_(std::move(add_initial_modules_callback)) {
  403. }
  404. TestAuxUnwinder(const TestAuxUnwinder&) = delete;
  405. TestAuxUnwinder& operator=(const TestAuxUnwinder&) = delete;
  406. void InitializeModules() override {
  407. if (add_initial_modules_callback_)
  408. add_initial_modules_callback_.Run();
  409. }
  410. bool CanUnwindFrom(const Frame& current_frame) const override { return true; }
  411. UnwindResult TryUnwind(RegisterContext* thread_context,
  412. uintptr_t stack_top,
  413. std::vector<Frame>* stack) const override {
  414. stack->push_back(frame_to_report_);
  415. return UnwindResult::kAborted;
  416. }
  417. private:
  418. const Frame frame_to_report_;
  419. base::RepeatingClosure add_initial_modules_callback_;
  420. };
  421. // Checks that the profiler handles stacks containing dynamically-allocated
  422. // stack memory.
  423. // macOS ASAN is not yet supported - crbug.com/718628.
  424. // Android is not supported since Chrome unwind tables don't support dynamic
  425. // frames.
  426. // If we're running the ChromeOS unit tests on Linux, this test will never pass
  427. // because Ubuntu's libc isn't compiled with frame pointers. Skip if not a real
  428. // ChromeOS device.
  429. #if (defined(ADDRESS_SANITIZER) && BUILDFLAG(IS_APPLE)) || \
  430. BUILDFLAG(IS_ANDROID) || \
  431. (BUILDFLAG(IS_CHROMEOS) && !BUILDFLAG(IS_CHROMEOS_DEVICE))
  432. #define MAYBE_Alloca DISABLED_Alloca
  433. #else
  434. #define MAYBE_Alloca Alloca
  435. #endif
  436. PROFILER_TEST_F(StackSamplingProfilerTest, MAYBE_Alloca) {
  437. UnwindScenario scenario(BindRepeating(&CallWithAlloca));
  438. const std::vector<Frame>& sample = SampleScenario(&scenario, module_cache());
  439. // The stack should contain a full unwind.
  440. ExpectStackContains(sample, {scenario.GetWaitForSampleAddressRange(),
  441. scenario.GetSetupFunctionAddressRange(),
  442. scenario.GetOuterFunctionAddressRange()});
  443. }
  444. // Checks that a stack that runs through another library produces a stack with
  445. // the expected functions.
  446. // macOS ASAN is not yet supported - crbug.com/718628.
  447. // iOS chrome doesn't support loading native libraries.
  448. // Android is not supported when EXCLUDE_UNWIND_TABLES |other_library| doesn't
  449. // have unwind tables.
  450. // TODO(https://crbug.com/1100175): Enable this test again for Android with
  451. // ASAN. This is now disabled because the android-asan bot fails.
  452. // If we're running the ChromeOS unit tests on Linux, this test will never pass
  453. // because Ubuntu's libc isn't compiled with frame pointers. Skip if not a real
  454. // ChromeOS device.
  455. #if (defined(ADDRESS_SANITIZER) && BUILDFLAG(IS_APPLE)) || \
  456. BUILDFLAG(IS_IOS) || \
  457. (BUILDFLAG(IS_ANDROID) && BUILDFLAG(EXCLUDE_UNWIND_TABLES)) || \
  458. (BUILDFLAG(IS_ANDROID) && defined(ADDRESS_SANITIZER)) || \
  459. (BUILDFLAG(IS_CHROMEOS) && !BUILDFLAG(IS_CHROMEOS_DEVICE))
  460. #define MAYBE_OtherLibrary DISABLED_OtherLibrary
  461. #else
  462. #define MAYBE_OtherLibrary OtherLibrary
  463. #endif
  464. PROFILER_TEST_F(StackSamplingProfilerTest, MAYBE_OtherLibrary) {
  465. ScopedNativeLibrary other_library(LoadOtherLibrary());
  466. UnwindScenario scenario(
  467. BindRepeating(&CallThroughOtherLibrary, Unretained(other_library.get())));
  468. const std::vector<Frame>& sample = SampleScenario(&scenario, module_cache());
  469. // The stack should contain a full unwind.
  470. ExpectStackContains(sample, {scenario.GetWaitForSampleAddressRange(),
  471. scenario.GetSetupFunctionAddressRange(),
  472. scenario.GetOuterFunctionAddressRange()});
  473. }
  474. // Checks that a stack that runs through a library that is unloading produces a
  475. // stack, and doesn't crash.
  476. // Unloading is synchronous on the Mac, so this test is inapplicable.
  477. // Android is not supported when EXCLUDE_UNWIND_TABLES |other_library| doesn't
  478. // have unwind tables.
  479. // TODO(https://crbug.com/1100175): Enable this test again for Android with
  480. // ASAN. This is now disabled because the android-asan bot fails.
  481. // If we're running the ChromeOS unit tests on Linux, this test will never pass
  482. // because Ubuntu's libc isn't compiled with frame pointers. Skip if not a real
  483. // ChromeOS device.
  484. #if BUILDFLAG(IS_APPLE) || \
  485. (BUILDFLAG(IS_ANDROID) && BUILDFLAG(EXCLUDE_UNWIND_TABLES)) || \
  486. (BUILDFLAG(IS_ANDROID) && defined(ADDRESS_SANITIZER)) || \
  487. (BUILDFLAG(IS_CHROMEOS) && !BUILDFLAG(IS_CHROMEOS_DEVICE))
  488. #define MAYBE_UnloadingLibrary DISABLED_UnloadingLibrary
  489. #else
  490. #define MAYBE_UnloadingLibrary UnloadingLibrary
  491. #endif
  492. PROFILER_TEST_F(StackSamplingProfilerTest, MAYBE_UnloadingLibrary) {
  493. TestLibraryUnload(false, module_cache());
  494. }
  495. // Checks that a stack that runs through a library that has been unloaded
  496. // produces a stack, and doesn't crash.
  497. // macOS ASAN is not yet supported - crbug.com/718628.
  498. // Android is not supported since modules are found before unwinding.
  499. // If we're running the ChromeOS unit tests on Linux, this test will never pass
  500. // because Ubuntu's libc isn't compiled with frame pointers. Skip if not a real
  501. // ChromeOS device.
  502. #if (defined(ADDRESS_SANITIZER) && BUILDFLAG(IS_APPLE)) || \
  503. BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_IOS) || \
  504. (BUILDFLAG(IS_CHROMEOS) && !BUILDFLAG(IS_CHROMEOS_DEVICE))
  505. #define MAYBE_UnloadedLibrary DISABLED_UnloadedLibrary
  506. #else
  507. #define MAYBE_UnloadedLibrary UnloadedLibrary
  508. #endif
  509. PROFILER_TEST_F(StackSamplingProfilerTest, MAYBE_UnloadedLibrary) {
  510. TestLibraryUnload(true, module_cache());
  511. }
  512. // Checks that a profiler can stop/destruct without ever having started.
  513. PROFILER_TEST_F(StackSamplingProfilerTest, StopWithoutStarting) {
  514. WithTargetThread(BindLambdaForTesting(
  515. [this](SamplingProfilerThreadToken target_thread_token) {
  516. SamplingParams params;
  517. params.sampling_interval = Milliseconds(0);
  518. params.samples_per_profile = 1;
  519. Profile profile;
  520. WaitableEvent sampling_completed(
  521. WaitableEvent::ResetPolicy::MANUAL,
  522. WaitableEvent::InitialState::NOT_SIGNALED);
  523. StackSamplingProfiler profiler(
  524. target_thread_token, params,
  525. std::make_unique<TestProfileBuilder>(
  526. module_cache(),
  527. BindLambdaForTesting(
  528. [&profile, &sampling_completed](Profile result_profile) {
  529. profile = std::move(result_profile);
  530. sampling_completed.Signal();
  531. })),
  532. CreateCoreUnwindersFactoryForTesting(module_cache()));
  533. profiler.Stop(); // Constructed but never started.
  534. EXPECT_FALSE(sampling_completed.IsSignaled());
  535. }));
  536. }
  537. // Checks that its okay to stop a profiler before it finishes even when the
  538. // sampling thread continues to run.
  539. PROFILER_TEST_F(StackSamplingProfilerTest, StopSafely) {
  540. // Test delegate that counts samples.
  541. class SampleRecordedCounter : public StackSamplerTestDelegate {
  542. public:
  543. SampleRecordedCounter() = default;
  544. void OnPreStackWalk() override {
  545. AutoLock lock(lock_);
  546. ++count_;
  547. }
  548. size_t Get() {
  549. AutoLock lock(lock_);
  550. return count_;
  551. }
  552. private:
  553. Lock lock_;
  554. size_t count_ = 0;
  555. };
  556. WithTargetThread(
  557. BindLambdaForTesting([](SamplingProfilerThreadToken target_thread_token) {
  558. SamplingParams params[2];
  559. // Providing an initial delay makes it more likely that both will be
  560. // scheduled before either starts to run. Once started, samples will
  561. // run ordered by their scheduled, interleaved times regardless of
  562. // whatever interval the thread wakes up.
  563. params[0].initial_delay = Milliseconds(10);
  564. params[0].sampling_interval = Milliseconds(1);
  565. params[0].samples_per_profile = 100000;
  566. params[1].initial_delay = Milliseconds(10);
  567. params[1].sampling_interval = Milliseconds(1);
  568. params[1].samples_per_profile = 100000;
  569. SampleRecordedCounter samples_recorded[std::size(params)];
  570. ModuleCache module_cache1, module_cache2;
  571. TestProfilerInfo profiler_info0(target_thread_token, params[0],
  572. &module_cache1, &samples_recorded[0]);
  573. TestProfilerInfo profiler_info1(target_thread_token, params[1],
  574. &module_cache2, &samples_recorded[1]);
  575. profiler_info0.profiler.Start();
  576. profiler_info1.profiler.Start();
  577. // Wait for both to start accumulating samples. Using a WaitableEvent is
  578. // possible but gets complicated later on because there's no way of
  579. // knowing if 0 or 1 additional sample will be taken after Stop() and
  580. // thus no way of knowing how many Wait() calls to make on it.
  581. while (samples_recorded[0].Get() == 0 || samples_recorded[1].Get() == 0)
  582. PlatformThread::Sleep(Milliseconds(1));
  583. // Ensure that the first sampler can be safely stopped while the second
  584. // continues to run. The stopped first profiler will still have a
  585. // RecordSampleTask pending that will do nothing when executed because
  586. // the collection will have been removed by Stop().
  587. profiler_info0.profiler.Stop();
  588. profiler_info0.completed.Wait();
  589. size_t count0 = samples_recorded[0].Get();
  590. size_t count1 = samples_recorded[1].Get();
  591. // Waiting for the second sampler to collect a couple samples ensures
  592. // that the pending RecordSampleTask for the first has executed because
  593. // tasks are always ordered by their next scheduled time.
  594. while (samples_recorded[1].Get() < count1 + 2)
  595. PlatformThread::Sleep(Milliseconds(1));
  596. // Ensure that the first profiler didn't do anything since it was
  597. // stopped.
  598. EXPECT_EQ(count0, samples_recorded[0].Get());
  599. }));
  600. }
  601. // Checks that no sample are captured if the profiling is stopped during the
  602. // initial delay.
  603. PROFILER_TEST_F(StackSamplingProfilerTest, StopDuringInitialDelay) {
  604. SamplingParams params;
  605. params.initial_delay = Seconds(60);
  606. std::vector<std::vector<Frame>> samples =
  607. CaptureSamples(params, Milliseconds(0), module_cache());
  608. EXPECT_TRUE(samples.empty());
  609. }
  610. // Checks that tasks can be stopped before completion and incomplete samples are
  611. // captured.
  612. PROFILER_TEST_F(StackSamplingProfilerTest, StopDuringInterSampleInterval) {
  613. // Test delegate that counts samples.
  614. class SampleRecordedEvent : public StackSamplerTestDelegate {
  615. public:
  616. SampleRecordedEvent()
  617. : sample_recorded_(WaitableEvent::ResetPolicy::MANUAL,
  618. WaitableEvent::InitialState::NOT_SIGNALED) {}
  619. void OnPreStackWalk() override { sample_recorded_.Signal(); }
  620. void WaitForSample() { sample_recorded_.Wait(); }
  621. private:
  622. WaitableEvent sample_recorded_;
  623. };
  624. WithTargetThread(BindLambdaForTesting(
  625. [this](SamplingProfilerThreadToken target_thread_token) {
  626. SamplingParams params;
  627. params.sampling_interval = AVeryLongTimeDelta();
  628. params.samples_per_profile = 2;
  629. SampleRecordedEvent samples_recorded;
  630. TestProfilerInfo profiler_info(target_thread_token, params,
  631. module_cache(), &samples_recorded);
  632. profiler_info.profiler.Start();
  633. // Wait for profiler to start accumulating samples.
  634. samples_recorded.WaitForSample();
  635. // Ensure that it can stop safely.
  636. profiler_info.profiler.Stop();
  637. profiler_info.completed.Wait();
  638. EXPECT_EQ(1u, profiler_info.profile.samples.size());
  639. }));
  640. }
  641. PROFILER_TEST_F(StackSamplingProfilerTest, GetNextSampleTime_NormalExecution) {
  642. const auto& GetNextSampleTime =
  643. StackSamplingProfiler::TestPeer::GetNextSampleTime;
  644. const TimeTicks scheduled_current_sample_time = TimeTicks::UnixEpoch();
  645. const TimeDelta sampling_interval = Milliseconds(10);
  646. // When executing the sample at exactly the scheduled time the next sample
  647. // should be one interval later.
  648. EXPECT_EQ(scheduled_current_sample_time + sampling_interval,
  649. GetNextSampleTime(scheduled_current_sample_time, sampling_interval,
  650. scheduled_current_sample_time));
  651. // When executing the sample less than half an interval after the scheduled
  652. // time the next sample also should be one interval later.
  653. EXPECT_EQ(scheduled_current_sample_time + sampling_interval,
  654. GetNextSampleTime(
  655. scheduled_current_sample_time, sampling_interval,
  656. scheduled_current_sample_time + 0.4 * sampling_interval));
  657. // When executing the sample less than half an interval before the scheduled
  658. // time the next sample also should be one interval later. This is not
  659. // expected to occur in practice since delayed tasks never run early.
  660. EXPECT_EQ(scheduled_current_sample_time + sampling_interval,
  661. GetNextSampleTime(
  662. scheduled_current_sample_time, sampling_interval,
  663. scheduled_current_sample_time - 0.4 * sampling_interval));
  664. }
  665. PROFILER_TEST_F(StackSamplingProfilerTest, GetNextSampleTime_DelayedExecution) {
  666. const auto& GetNextSampleTime =
  667. StackSamplingProfiler::TestPeer::GetNextSampleTime;
  668. const TimeTicks scheduled_current_sample_time = TimeTicks::UnixEpoch();
  669. const TimeDelta sampling_interval = Milliseconds(10);
  670. // When executing the sample between 0.5 and 1.5 intervals after the scheduled
  671. // time the next sample should be two intervals later.
  672. EXPECT_EQ(scheduled_current_sample_time + 2 * sampling_interval,
  673. GetNextSampleTime(
  674. scheduled_current_sample_time, sampling_interval,
  675. scheduled_current_sample_time + 0.6 * sampling_interval));
  676. EXPECT_EQ(scheduled_current_sample_time + 2 * sampling_interval,
  677. GetNextSampleTime(
  678. scheduled_current_sample_time, sampling_interval,
  679. scheduled_current_sample_time + 1.0 * sampling_interval));
  680. EXPECT_EQ(scheduled_current_sample_time + 2 * sampling_interval,
  681. GetNextSampleTime(
  682. scheduled_current_sample_time, sampling_interval,
  683. scheduled_current_sample_time + 1.4 * sampling_interval));
  684. // Similarly when executing the sample between 9.5 and 10.5 intervals after
  685. // the scheduled time the next sample should be 11 intervals later.
  686. EXPECT_EQ(scheduled_current_sample_time + 11 * sampling_interval,
  687. GetNextSampleTime(
  688. scheduled_current_sample_time, sampling_interval,
  689. scheduled_current_sample_time + 9.6 * sampling_interval));
  690. EXPECT_EQ(scheduled_current_sample_time + 11 * sampling_interval,
  691. GetNextSampleTime(
  692. scheduled_current_sample_time, sampling_interval,
  693. scheduled_current_sample_time + 10.0 * sampling_interval));
  694. EXPECT_EQ(scheduled_current_sample_time + 11 * sampling_interval,
  695. GetNextSampleTime(
  696. scheduled_current_sample_time, sampling_interval,
  697. scheduled_current_sample_time + 10.4 * sampling_interval));
  698. }
  699. // Checks that we can destroy the profiler while profiling.
  700. PROFILER_TEST_F(StackSamplingProfilerTest, DestroyProfilerWhileProfiling) {
  701. SamplingParams params;
  702. params.sampling_interval = Milliseconds(10);
  703. Profile profile;
  704. WithTargetThread(BindLambdaForTesting([&, this](SamplingProfilerThreadToken
  705. target_thread_token) {
  706. std::unique_ptr<StackSamplingProfiler> profiler;
  707. auto profile_builder = std::make_unique<TestProfileBuilder>(
  708. module_cache(),
  709. BindLambdaForTesting([&profile](Profile result_profile) {
  710. profile = std::move(result_profile);
  711. }));
  712. profiler = std::make_unique<StackSamplingProfiler>(
  713. target_thread_token, params, std::move(profile_builder),
  714. CreateCoreUnwindersFactoryForTesting(module_cache()));
  715. profiler->Start();
  716. profiler.reset();
  717. // Wait longer than a sample interval to catch any use-after-free actions by
  718. // the profiler thread.
  719. PlatformThread::Sleep(Milliseconds(50));
  720. }));
  721. }
  722. // Checks that the different profilers may be run.
  723. PROFILER_TEST_F(StackSamplingProfilerTest, CanRunMultipleProfilers) {
  724. SamplingParams params;
  725. params.sampling_interval = Milliseconds(0);
  726. params.samples_per_profile = 1;
  727. std::vector<std::vector<Frame>> samples =
  728. CaptureSamples(params, AVeryLongTimeDelta(), module_cache());
  729. ASSERT_EQ(1u, samples.size());
  730. samples = CaptureSamples(params, AVeryLongTimeDelta(), module_cache());
  731. ASSERT_EQ(1u, samples.size());
  732. }
  733. // Checks that a sampler can be started while another is running.
  734. PROFILER_TEST_F(StackSamplingProfilerTest, MultipleStart) {
  735. WithTargetThread(
  736. BindLambdaForTesting([](SamplingProfilerThreadToken target_thread_token) {
  737. SamplingParams params1;
  738. params1.initial_delay = AVeryLongTimeDelta();
  739. params1.samples_per_profile = 1;
  740. ModuleCache module_cache1;
  741. TestProfilerInfo profiler_info1(target_thread_token, params1,
  742. &module_cache1);
  743. SamplingParams params2;
  744. params2.sampling_interval = Milliseconds(1);
  745. params2.samples_per_profile = 1;
  746. ModuleCache module_cache2;
  747. TestProfilerInfo profiler_info2(target_thread_token, params2,
  748. &module_cache2);
  749. profiler_info1.profiler.Start();
  750. profiler_info2.profiler.Start();
  751. profiler_info2.completed.Wait();
  752. EXPECT_EQ(1u, profiler_info2.profile.samples.size());
  753. }));
  754. }
  755. // Checks that the profile duration and the sampling interval are calculated
  756. // correctly. Also checks that RecordMetadata() is invoked each time a sample
  757. // is recorded.
  758. PROFILER_TEST_F(StackSamplingProfilerTest, ProfileGeneralInfo) {
  759. WithTargetThread(BindLambdaForTesting(
  760. [this](SamplingProfilerThreadToken target_thread_token) {
  761. SamplingParams params;
  762. params.sampling_interval = Milliseconds(1);
  763. params.samples_per_profile = 3;
  764. TestProfilerInfo profiler_info(target_thread_token, params,
  765. module_cache());
  766. profiler_info.profiler.Start();
  767. profiler_info.completed.Wait();
  768. EXPECT_EQ(3u, profiler_info.profile.samples.size());
  769. // The profile duration should be greater than the total sampling
  770. // intervals.
  771. EXPECT_GT(profiler_info.profile.profile_duration,
  772. profiler_info.profile.sampling_period * 3);
  773. EXPECT_EQ(Milliseconds(1), profiler_info.profile.sampling_period);
  774. // The number of invocations of RecordMetadata() should be equal to the
  775. // number of samples recorded.
  776. EXPECT_EQ(3, profiler_info.profile.record_metadata_count);
  777. }));
  778. }
  779. // Checks that the sampling thread can shut down.
  780. PROFILER_TEST_F(StackSamplingProfilerTest, SamplerIdleShutdown) {
  781. SamplingParams params;
  782. params.sampling_interval = Milliseconds(0);
  783. params.samples_per_profile = 1;
  784. std::vector<std::vector<Frame>> samples =
  785. CaptureSamples(params, AVeryLongTimeDelta(), module_cache());
  786. ASSERT_EQ(1u, samples.size());
  787. // Capture thread should still be running at this point.
  788. ASSERT_TRUE(StackSamplingProfiler::TestPeer::IsSamplingThreadRunning());
  789. // Initiate an "idle" shutdown and ensure it happens. Idle-shutdown was
  790. // disabled by the test fixture so the test will fail due to a timeout if
  791. // it does not exit.
  792. StackSamplingProfiler::TestPeer::PerformSamplingThreadIdleShutdown(false);
  793. // While the shutdown has been initiated, the actual exit of the thread still
  794. // happens asynchronously. Watch until the thread actually exits. This test
  795. // will time-out in the case of failure.
  796. while (StackSamplingProfiler::TestPeer::IsSamplingThreadRunning())
  797. PlatformThread::Sleep(Milliseconds(1));
  798. }
  799. // Checks that additional requests will restart a stopped profiler.
  800. PROFILER_TEST_F(StackSamplingProfilerTest,
  801. WillRestartSamplerAfterIdleShutdown) {
  802. SamplingParams params;
  803. params.sampling_interval = Milliseconds(0);
  804. params.samples_per_profile = 1;
  805. std::vector<std::vector<Frame>> samples =
  806. CaptureSamples(params, AVeryLongTimeDelta(), module_cache());
  807. ASSERT_EQ(1u, samples.size());
  808. // Capture thread should still be running at this point.
  809. ASSERT_TRUE(StackSamplingProfiler::TestPeer::IsSamplingThreadRunning());
  810. // Post a ShutdownTask on the sampling thread which, when executed, will
  811. // mark the thread as EXITING and begin shut down of the thread.
  812. StackSamplingProfiler::TestPeer::PerformSamplingThreadIdleShutdown(false);
  813. // Ensure another capture will start the sampling thread and run.
  814. samples = CaptureSamples(params, AVeryLongTimeDelta(), module_cache());
  815. ASSERT_EQ(1u, samples.size());
  816. EXPECT_TRUE(StackSamplingProfiler::TestPeer::IsSamplingThreadRunning());
  817. }
  818. // Checks that it's safe to stop a task after it's completed and the sampling
  819. // thread has shut-down for being idle.
  820. PROFILER_TEST_F(StackSamplingProfilerTest, StopAfterIdleShutdown) {
  821. WithTargetThread(BindLambdaForTesting(
  822. [this](SamplingProfilerThreadToken target_thread_token) {
  823. SamplingParams params;
  824. params.sampling_interval = Milliseconds(1);
  825. params.samples_per_profile = 1;
  826. TestProfilerInfo profiler_info(target_thread_token, params,
  827. module_cache());
  828. profiler_info.profiler.Start();
  829. profiler_info.completed.Wait();
  830. // Capture thread should still be running at this point.
  831. ASSERT_TRUE(StackSamplingProfiler::TestPeer::IsSamplingThreadRunning());
  832. // Perform an idle shutdown.
  833. StackSamplingProfiler::TestPeer::PerformSamplingThreadIdleShutdown(
  834. false);
  835. // Stop should be safe though its impossible to know at this moment if
  836. // the sampling thread has completely exited or will just "stop soon".
  837. profiler_info.profiler.Stop();
  838. }));
  839. }
  840. // Checks that profilers can run both before and after the sampling thread has
  841. // started.
  842. PROFILER_TEST_F(StackSamplingProfilerTest,
  843. ProfileBeforeAndAfterSamplingThreadRunning) {
  844. WithTargetThread(
  845. BindLambdaForTesting([](SamplingProfilerThreadToken target_thread_token) {
  846. ModuleCache module_cache1;
  847. ModuleCache module_cache2;
  848. std::vector<std::unique_ptr<TestProfilerInfo>> profiler_infos;
  849. profiler_infos.push_back(std::make_unique<TestProfilerInfo>(
  850. target_thread_token,
  851. SamplingParams{/*initial_delay=*/AVeryLongTimeDelta(),
  852. /*samples_per_profile=*/1,
  853. /*sampling_interval=*/Milliseconds(1)},
  854. &module_cache1));
  855. profiler_infos.push_back(std::make_unique<TestProfilerInfo>(
  856. target_thread_token,
  857. SamplingParams{/*initial_delay=*/Milliseconds(0),
  858. /*samples_per_profile=*/1,
  859. /*sampling_interval=*/Milliseconds(1)},
  860. &module_cache2));
  861. // First profiler is started when there has never been a sampling
  862. // thread.
  863. EXPECT_FALSE(
  864. StackSamplingProfiler::TestPeer::IsSamplingThreadRunning());
  865. profiler_infos[0]->profiler.Start();
  866. // Second profiler is started when sampling thread is already running.
  867. EXPECT_TRUE(StackSamplingProfiler::TestPeer::IsSamplingThreadRunning());
  868. profiler_infos[1]->profiler.Start();
  869. // Only the second profiler should finish before test times out.
  870. size_t completed_profiler = WaitForSamplingComplete(profiler_infos);
  871. EXPECT_EQ(1U, completed_profiler);
  872. }));
  873. }
  874. // Checks that an idle-shutdown task will abort if a new profiler starts
  875. // between when it was posted and when it runs.
  876. PROFILER_TEST_F(StackSamplingProfilerTest, IdleShutdownAbort) {
  877. WithTargetThread(BindLambdaForTesting(
  878. [this](SamplingProfilerThreadToken target_thread_token) {
  879. SamplingParams params;
  880. params.sampling_interval = Milliseconds(1);
  881. params.samples_per_profile = 1;
  882. TestProfilerInfo profiler_info(target_thread_token, params,
  883. module_cache());
  884. profiler_info.profiler.Start();
  885. profiler_info.completed.Wait();
  886. EXPECT_EQ(1u, profiler_info.profile.samples.size());
  887. // Perform an idle shutdown but simulate that a new capture is started
  888. // before it can actually run.
  889. StackSamplingProfiler::TestPeer::PerformSamplingThreadIdleShutdown(
  890. true);
  891. // Though the shutdown-task has been executed, any actual exit of the
  892. // thread is asynchronous so there is no way to detect that *didn't*
  893. // exit except to wait a reasonable amount of time and then check. Since
  894. // the thread was just running ("perform" blocked until it was), it
  895. // should finish almost immediately and without any waiting for tasks or
  896. // events.
  897. PlatformThread::Sleep(Milliseconds(200));
  898. EXPECT_TRUE(StackSamplingProfiler::TestPeer::IsSamplingThreadRunning());
  899. // Ensure that it's still possible to run another sampler.
  900. TestProfilerInfo another_info(target_thread_token, params,
  901. module_cache());
  902. another_info.profiler.Start();
  903. another_info.completed.Wait();
  904. EXPECT_EQ(1u, another_info.profile.samples.size());
  905. }));
  906. }
  907. // Checks that synchronized multiple sampling requests execute in parallel.
  908. PROFILER_TEST_F(StackSamplingProfilerTest, ConcurrentProfiling_InSync) {
  909. WithTargetThread(
  910. BindLambdaForTesting([](SamplingProfilerThreadToken target_thread_token) {
  911. std::vector<ModuleCache> module_caches(2);
  912. // Providing an initial delay makes it more likely that both will be
  913. // scheduled before either starts to run. Once started, samples will
  914. // run ordered by their scheduled, interleaved times regardless of
  915. // whatever interval the thread wakes up. Thus, total execution time
  916. // will be 10ms (delay) + 10x1ms (sampling) + 1/2 timer minimum
  917. // interval.
  918. std::vector<std::unique_ptr<TestProfilerInfo>> profiler_infos;
  919. profiler_infos.push_back(std::make_unique<TestProfilerInfo>(
  920. target_thread_token,
  921. SamplingParams{/*initial_delay=*/Milliseconds(10),
  922. /*samples_per_profile=*/9,
  923. /*sampling_interval=*/Milliseconds(1)},
  924. &module_caches[0]));
  925. profiler_infos.push_back(std::make_unique<TestProfilerInfo>(
  926. target_thread_token,
  927. SamplingParams{/*initial_delay=*/Milliseconds(11),
  928. /*samples_per_profile=*/8,
  929. /*sampling_interval=*/Milliseconds(1)},
  930. &module_caches[1]));
  931. profiler_infos[0]->profiler.Start();
  932. profiler_infos[1]->profiler.Start();
  933. // Wait for one profiler to finish.
  934. size_t completed_profiler = WaitForSamplingComplete(profiler_infos);
  935. size_t other_profiler = 1 - completed_profiler;
  936. // Wait for the other profiler to finish.
  937. profiler_infos[other_profiler]->completed.Wait();
  938. // Ensure each got the correct number of samples.
  939. EXPECT_EQ(9u, profiler_infos[0]->profile.samples.size());
  940. EXPECT_EQ(8u, profiler_infos[1]->profile.samples.size());
  941. }));
  942. }
  943. // Checks that several mixed sampling requests execute in parallel.
  944. PROFILER_TEST_F(StackSamplingProfilerTest, ConcurrentProfiling_Mixed) {
  945. WithTargetThread(BindLambdaForTesting([](SamplingProfilerThreadToken
  946. target_thread_token) {
  947. std::vector<ModuleCache> module_caches(3);
  948. std::vector<std::unique_ptr<TestProfilerInfo>> profiler_infos;
  949. profiler_infos.push_back(std::make_unique<TestProfilerInfo>(
  950. target_thread_token,
  951. SamplingParams{/*initial_delay=*/Milliseconds(8),
  952. /*samples_per_profile=*/10,
  953. /*sampling_interval=*/Milliseconds(4)},
  954. &module_caches[0]));
  955. profiler_infos.push_back(std::make_unique<TestProfilerInfo>(
  956. target_thread_token,
  957. SamplingParams{/*initial_delay=*/Milliseconds(9),
  958. /*samples_per_profile=*/10,
  959. /*sampling_interval=*/Milliseconds(3)},
  960. &module_caches[1]));
  961. profiler_infos.push_back(std::make_unique<TestProfilerInfo>(
  962. target_thread_token,
  963. SamplingParams{/*initial_delay=*/Milliseconds(10),
  964. /*samples_per_profile=*/10,
  965. /*sampling_interval=*/Milliseconds(2)},
  966. &module_caches[2]));
  967. for (auto& i : profiler_infos)
  968. i->profiler.Start();
  969. // Wait for one profiler to finish.
  970. size_t completed_profiler = WaitForSamplingComplete(profiler_infos);
  971. EXPECT_EQ(10u, profiler_infos[completed_profiler]->profile.samples.size());
  972. // Stop and destroy all profilers, always in the same order. Don't
  973. // crash.
  974. for (auto& i : profiler_infos)
  975. i->profiler.Stop();
  976. for (auto& i : profiler_infos)
  977. i.reset();
  978. }));
  979. }
  980. // Checks that different threads can be sampled in parallel.
  981. PROFILER_TEST_F(StackSamplingProfilerTest, MultipleSampledThreads) {
  982. UnwindScenario scenario1(BindRepeating(&CallWithPlainFunction));
  983. UnwindScenario::SampleEvents events1;
  984. TargetThread target_thread1(
  985. BindLambdaForTesting([&]() { scenario1.Execute(&events1); }));
  986. target_thread1.Start();
  987. events1.ready_for_sample.Wait();
  988. UnwindScenario scenario2(BindRepeating(&CallWithPlainFunction));
  989. UnwindScenario::SampleEvents events2;
  990. TargetThread target_thread2(
  991. BindLambdaForTesting([&]() { scenario2.Execute(&events2); }));
  992. target_thread2.Start();
  993. events2.ready_for_sample.Wait();
  994. // Providing an initial delay makes it more likely that both will be
  995. // scheduled before either starts to run. Once started, samples will
  996. // run ordered by their scheduled, interleaved times regardless of
  997. // whatever interval the thread wakes up.
  998. SamplingParams params1, params2;
  999. params1.initial_delay = Milliseconds(10);
  1000. params1.sampling_interval = Milliseconds(1);
  1001. params1.samples_per_profile = 9;
  1002. params2.initial_delay = Milliseconds(10);
  1003. params2.sampling_interval = Milliseconds(1);
  1004. params2.samples_per_profile = 8;
  1005. Profile profile1, profile2;
  1006. ModuleCache module_cache1, module_cache2;
  1007. WaitableEvent sampling_thread_completed1(
  1008. WaitableEvent::ResetPolicy::MANUAL,
  1009. WaitableEvent::InitialState::NOT_SIGNALED);
  1010. StackSamplingProfiler profiler1(
  1011. target_thread1.thread_token(), params1,
  1012. std::make_unique<TestProfileBuilder>(
  1013. &module_cache1,
  1014. BindLambdaForTesting(
  1015. [&profile1, &sampling_thread_completed1](Profile result_profile) {
  1016. profile1 = std::move(result_profile);
  1017. sampling_thread_completed1.Signal();
  1018. })),
  1019. CreateCoreUnwindersFactoryForTesting(&module_cache1));
  1020. WaitableEvent sampling_thread_completed2(
  1021. WaitableEvent::ResetPolicy::MANUAL,
  1022. WaitableEvent::InitialState::NOT_SIGNALED);
  1023. StackSamplingProfiler profiler2(
  1024. target_thread2.thread_token(), params2,
  1025. std::make_unique<TestProfileBuilder>(
  1026. &module_cache2,
  1027. BindLambdaForTesting(
  1028. [&profile2, &sampling_thread_completed2](Profile result_profile) {
  1029. profile2 = std::move(result_profile);
  1030. sampling_thread_completed2.Signal();
  1031. })),
  1032. CreateCoreUnwindersFactoryForTesting(&module_cache2));
  1033. // Finally the real work.
  1034. profiler1.Start();
  1035. profiler2.Start();
  1036. sampling_thread_completed1.Wait();
  1037. sampling_thread_completed2.Wait();
  1038. EXPECT_EQ(9u, profile1.samples.size());
  1039. EXPECT_EQ(8u, profile2.samples.size());
  1040. events1.sample_finished.Signal();
  1041. events2.sample_finished.Signal();
  1042. target_thread1.Join();
  1043. target_thread2.Join();
  1044. }
  1045. // A simple thread that runs a profiler on another thread.
  1046. class ProfilerThread : public SimpleThread {
  1047. public:
  1048. ProfilerThread(const std::string& name,
  1049. SamplingProfilerThreadToken thread_token,
  1050. const SamplingParams& params,
  1051. ModuleCache* module_cache)
  1052. : SimpleThread(name, Options()),
  1053. run_(WaitableEvent::ResetPolicy::MANUAL,
  1054. WaitableEvent::InitialState::NOT_SIGNALED),
  1055. completed_(WaitableEvent::ResetPolicy::MANUAL,
  1056. WaitableEvent::InitialState::NOT_SIGNALED),
  1057. profiler_(thread_token,
  1058. params,
  1059. std::make_unique<TestProfileBuilder>(
  1060. module_cache,
  1061. BindLambdaForTesting([this](Profile result_profile) {
  1062. profile_ = std::move(result_profile);
  1063. completed_.Signal();
  1064. })),
  1065. CreateCoreUnwindersFactoryForTesting(module_cache)) {}
  1066. void Run() override {
  1067. run_.Wait();
  1068. profiler_.Start();
  1069. }
  1070. void Go() { run_.Signal(); }
  1071. void Wait() { completed_.Wait(); }
  1072. Profile& profile() { return profile_; }
  1073. private:
  1074. WaitableEvent run_;
  1075. Profile profile_;
  1076. WaitableEvent completed_;
  1077. StackSamplingProfiler profiler_;
  1078. };
  1079. // Checks that different threads can run samplers in parallel.
  1080. PROFILER_TEST_F(StackSamplingProfilerTest, MultipleProfilerThreads) {
  1081. WithTargetThread(
  1082. BindLambdaForTesting([](SamplingProfilerThreadToken target_thread_token) {
  1083. // Providing an initial delay makes it more likely that both will be
  1084. // scheduled before either starts to run. Once started, samples will
  1085. // run ordered by their scheduled, interleaved times regardless of
  1086. // whatever interval the thread wakes up.
  1087. SamplingParams params1, params2;
  1088. params1.initial_delay = Milliseconds(10);
  1089. params1.sampling_interval = Milliseconds(1);
  1090. params1.samples_per_profile = 9;
  1091. params2.initial_delay = Milliseconds(10);
  1092. params2.sampling_interval = Milliseconds(1);
  1093. params2.samples_per_profile = 8;
  1094. // Start the profiler threads and give them a moment to get going.
  1095. ModuleCache module_cache1;
  1096. ProfilerThread profiler_thread1("profiler1", target_thread_token,
  1097. params1, &module_cache1);
  1098. ModuleCache module_cache2;
  1099. ProfilerThread profiler_thread2("profiler2", target_thread_token,
  1100. params2, &module_cache2);
  1101. profiler_thread1.Start();
  1102. profiler_thread2.Start();
  1103. PlatformThread::Sleep(Milliseconds(10));
  1104. // This will (approximately) synchronize the two threads.
  1105. profiler_thread1.Go();
  1106. profiler_thread2.Go();
  1107. // Wait for them both to finish and validate collection.
  1108. profiler_thread1.Wait();
  1109. profiler_thread2.Wait();
  1110. EXPECT_EQ(9u, profiler_thread1.profile().samples.size());
  1111. EXPECT_EQ(8u, profiler_thread2.profile().samples.size());
  1112. profiler_thread1.Join();
  1113. profiler_thread2.Join();
  1114. }));
  1115. }
  1116. PROFILER_TEST_F(StackSamplingProfilerTest, AddAuxUnwinder_BeforeStart) {
  1117. SamplingParams params;
  1118. params.sampling_interval = Milliseconds(0);
  1119. params.samples_per_profile = 1;
  1120. UnwindScenario scenario(BindRepeating(&CallWithPlainFunction));
  1121. int add_initial_modules_invocation_count = 0;
  1122. const auto add_initial_modules_callback =
  1123. [&add_initial_modules_invocation_count]() {
  1124. ++add_initial_modules_invocation_count;
  1125. };
  1126. Profile profile;
  1127. WithTargetThread(
  1128. &scenario,
  1129. BindLambdaForTesting(
  1130. [&](SamplingProfilerThreadToken target_thread_token) {
  1131. WaitableEvent sampling_thread_completed(
  1132. WaitableEvent::ResetPolicy::MANUAL,
  1133. WaitableEvent::InitialState::NOT_SIGNALED);
  1134. StackSamplingProfiler profiler(
  1135. target_thread_token, params,
  1136. std::make_unique<TestProfileBuilder>(
  1137. module_cache(),
  1138. BindLambdaForTesting([&profile, &sampling_thread_completed](
  1139. Profile result_profile) {
  1140. profile = std::move(result_profile);
  1141. sampling_thread_completed.Signal();
  1142. })),
  1143. CreateCoreUnwindersFactoryForTesting(module_cache()));
  1144. profiler.AddAuxUnwinder(std::make_unique<TestAuxUnwinder>(
  1145. Frame(23, nullptr),
  1146. BindLambdaForTesting(add_initial_modules_callback)));
  1147. profiler.Start();
  1148. sampling_thread_completed.Wait();
  1149. }));
  1150. ASSERT_EQ(1, add_initial_modules_invocation_count);
  1151. // The sample should have one frame from the context values and one from the
  1152. // TestAuxUnwinder.
  1153. ASSERT_EQ(1u, profile.samples.size());
  1154. const std::vector<Frame>& frames = profile.samples[0];
  1155. ASSERT_EQ(2u, frames.size());
  1156. EXPECT_EQ(23u, frames[1].instruction_pointer);
  1157. EXPECT_EQ(nullptr, frames[1].module);
  1158. }
  1159. PROFILER_TEST_F(StackSamplingProfilerTest, AddAuxUnwinder_AfterStart) {
  1160. SamplingParams params;
  1161. params.sampling_interval = Milliseconds(10);
  1162. params.samples_per_profile = 2;
  1163. UnwindScenario scenario(BindRepeating(&CallWithPlainFunction));
  1164. int add_initial_modules_invocation_count = 0;
  1165. const auto add_initial_modules_callback =
  1166. [&add_initial_modules_invocation_count]() {
  1167. ++add_initial_modules_invocation_count;
  1168. };
  1169. Profile profile;
  1170. WithTargetThread(
  1171. &scenario,
  1172. BindLambdaForTesting(
  1173. [&](SamplingProfilerThreadToken target_thread_token) {
  1174. WaitableEvent sampling_thread_completed(
  1175. WaitableEvent::ResetPolicy::MANUAL,
  1176. WaitableEvent::InitialState::NOT_SIGNALED);
  1177. StackSamplingProfiler profiler(
  1178. target_thread_token, params,
  1179. std::make_unique<TestProfileBuilder>(
  1180. module_cache(),
  1181. BindLambdaForTesting([&profile, &sampling_thread_completed](
  1182. Profile result_profile) {
  1183. profile = std::move(result_profile);
  1184. sampling_thread_completed.Signal();
  1185. })),
  1186. CreateCoreUnwindersFactoryForTesting(module_cache()));
  1187. profiler.Start();
  1188. profiler.AddAuxUnwinder(std::make_unique<TestAuxUnwinder>(
  1189. Frame(23, nullptr),
  1190. BindLambdaForTesting(add_initial_modules_callback)));
  1191. sampling_thread_completed.Wait();
  1192. }));
  1193. ASSERT_EQ(1, add_initial_modules_invocation_count);
  1194. // The sample should have one frame from the context values and one from the
  1195. // TestAuxUnwinder.
  1196. ASSERT_EQ(2u, profile.samples.size());
  1197. // Whether the aux unwinder is available for the first sample is racy, so rely
  1198. // on the second sample.
  1199. const std::vector<Frame>& frames = profile.samples[1];
  1200. ASSERT_EQ(2u, frames.size());
  1201. EXPECT_EQ(23u, frames[1].instruction_pointer);
  1202. EXPECT_EQ(nullptr, frames[1].module);
  1203. }
  1204. PROFILER_TEST_F(StackSamplingProfilerTest, AddAuxUnwinder_AfterStop) {
  1205. SamplingParams params;
  1206. params.sampling_interval = Milliseconds(0);
  1207. params.samples_per_profile = 1;
  1208. UnwindScenario scenario(BindRepeating(&CallWithPlainFunction));
  1209. Profile profile;
  1210. WithTargetThread(
  1211. &scenario,
  1212. BindLambdaForTesting(
  1213. [&](SamplingProfilerThreadToken target_thread_token) {
  1214. WaitableEvent sampling_thread_completed(
  1215. WaitableEvent::ResetPolicy::MANUAL,
  1216. WaitableEvent::InitialState::NOT_SIGNALED);
  1217. StackSamplingProfiler profiler(
  1218. target_thread_token, params,
  1219. std::make_unique<TestProfileBuilder>(
  1220. module_cache(),
  1221. BindLambdaForTesting([&profile, &sampling_thread_completed](
  1222. Profile result_profile) {
  1223. profile = std::move(result_profile);
  1224. sampling_thread_completed.Signal();
  1225. })),
  1226. CreateCoreUnwindersFactoryForTesting(module_cache()));
  1227. profiler.Start();
  1228. profiler.Stop();
  1229. profiler.AddAuxUnwinder(std::make_unique<TestAuxUnwinder>(
  1230. Frame(23, nullptr), base::RepeatingClosure()));
  1231. sampling_thread_completed.Wait();
  1232. }));
  1233. // The AuxUnwinder should be accepted without error. It will have no effect
  1234. // since the collection has stopped.
  1235. }
  1236. // Checks that requests to apply metadata to past samples are passed on to the
  1237. // profile builder.
  1238. PROFILER_TEST_F(StackSamplingProfilerTest,
  1239. ApplyMetadataToPastSamples_PassedToProfileBuilder) {
  1240. // Runs the passed closure on the profiler thread after a sample is taken.
  1241. class PostSampleInvoker : public StackSamplerTestDelegate {
  1242. public:
  1243. explicit PostSampleInvoker(RepeatingClosure post_sample_closure)
  1244. : post_sample_closure_(std::move(post_sample_closure)) {}
  1245. void OnPreStackWalk() override { post_sample_closure_.Run(); }
  1246. private:
  1247. RepeatingClosure post_sample_closure_;
  1248. };
  1249. // Thread-safe representation of the times that samples were taken.
  1250. class SynchronizedSampleTimes {
  1251. public:
  1252. void AddNow() {
  1253. AutoLock lock(lock_);
  1254. times_.push_back(TimeTicks::Now());
  1255. }
  1256. std::vector<TimeTicks> GetTimes() {
  1257. AutoLock lock(lock_);
  1258. return times_;
  1259. }
  1260. private:
  1261. Lock lock_;
  1262. std::vector<TimeTicks> times_;
  1263. };
  1264. SamplingParams params;
  1265. params.sampling_interval = Milliseconds(10);
  1266. // 10,000 samples ensures the profiler continues running until manually
  1267. // stopped, after applying metadata.
  1268. params.samples_per_profile = 10000;
  1269. UnwindScenario scenario(BindRepeating(&CallWithPlainFunction));
  1270. std::vector<TimeTicks> sample_times;
  1271. Profile profile;
  1272. WithTargetThread(
  1273. &scenario,
  1274. BindLambdaForTesting(
  1275. [&](SamplingProfilerThreadToken target_thread_token) {
  1276. SynchronizedSampleTimes synchronized_sample_times;
  1277. WaitableEvent sample_seen(WaitableEvent::ResetPolicy::AUTOMATIC);
  1278. PostSampleInvoker post_sample_invoker(BindLambdaForTesting([&]() {
  1279. synchronized_sample_times.AddNow();
  1280. sample_seen.Signal();
  1281. }));
  1282. StackSamplingProfiler profiler(
  1283. target_thread_token, params,
  1284. std::make_unique<TestProfileBuilder>(
  1285. module_cache(),
  1286. BindLambdaForTesting([&profile](Profile result_profile) {
  1287. profile = std::move(result_profile);
  1288. })),
  1289. CreateCoreUnwindersFactoryForTesting(module_cache()),
  1290. RepeatingClosure(), &post_sample_invoker);
  1291. profiler.Start();
  1292. // Wait for 5 samples to be collected.
  1293. for (int i = 0; i < 5; ++i)
  1294. sample_seen.Wait();
  1295. sample_times = synchronized_sample_times.GetTimes();
  1296. // Record metadata on past samples, with and without a key value.
  1297. // The range [times[1], times[3]] is guaranteed to include only
  1298. // samples 2 and 3, and likewise [times[2], times[4]] is guaranteed
  1299. // to include only samples 3 and 4.
  1300. ApplyMetadataToPastSamples(sample_times[1], sample_times[3],
  1301. "TestMetadata1", 10,
  1302. base::SampleMetadataScope::kProcess);
  1303. ApplyMetadataToPastSamples(sample_times[2], sample_times[4],
  1304. "TestMetadata2", 100, 11,
  1305. base::SampleMetadataScope::kProcess);
  1306. profiler.Stop();
  1307. }));
  1308. ASSERT_EQ(2u, profile.retrospective_metadata.size());
  1309. const RetrospectiveMetadata& metadata1 = profile.retrospective_metadata[0];
  1310. EXPECT_EQ(sample_times[1], metadata1.period_start);
  1311. EXPECT_EQ(sample_times[3], metadata1.period_end);
  1312. EXPECT_EQ(HashMetricName("TestMetadata1"), metadata1.item.name_hash);
  1313. EXPECT_FALSE(metadata1.item.key.has_value());
  1314. EXPECT_EQ(10, metadata1.item.value);
  1315. const RetrospectiveMetadata& metadata2 = profile.retrospective_metadata[1];
  1316. EXPECT_EQ(sample_times[2], metadata2.period_start);
  1317. EXPECT_EQ(sample_times[4], metadata2.period_end);
  1318. EXPECT_EQ(HashMetricName("TestMetadata2"), metadata2.item.name_hash);
  1319. ASSERT_TRUE(metadata2.item.key.has_value());
  1320. EXPECT_EQ(100, *metadata2.item.key);
  1321. EXPECT_EQ(11, metadata2.item.value);
  1322. }
  1323. PROFILER_TEST_F(
  1324. StackSamplingProfilerTest,
  1325. ApplyMetadataToPastSamples_PassedToProfileBuilder_MultipleCollections) {
  1326. SamplingParams params;
  1327. params.sampling_interval = Milliseconds(10);
  1328. // 10,000 samples ensures the profiler continues running until manually
  1329. // stopped, after applying metadata.
  1330. params.samples_per_profile = 10000;
  1331. ModuleCache module_cache1, module_cache2;
  1332. WaitableEvent profiler1_started;
  1333. WaitableEvent profiler2_started;
  1334. WaitableEvent profiler1_metadata_applied;
  1335. WaitableEvent profiler2_metadata_applied;
  1336. Profile profile1;
  1337. WaitableEvent sampling_completed1;
  1338. TargetThread target_thread1(BindLambdaForTesting([&]() {
  1339. StackSamplingProfiler profiler1(
  1340. target_thread1.thread_token(), params,
  1341. std::make_unique<TestProfileBuilder>(
  1342. &module_cache1, BindLambdaForTesting([&](Profile result_profile) {
  1343. profile1 = std::move(result_profile);
  1344. sampling_completed1.Signal();
  1345. })),
  1346. CreateCoreUnwindersFactoryForTesting(&module_cache1),
  1347. RepeatingClosure());
  1348. profiler1.Start();
  1349. profiler1_started.Signal();
  1350. profiler2_started.Wait();
  1351. // Record metadata on past samples only for this thread. The time range
  1352. // shouldn't affect the outcome, it should always be passed to the
  1353. // ProfileBuilder.
  1354. ApplyMetadataToPastSamples(TimeTicks(), TimeTicks::Now(), "TestMetadata1",
  1355. 10, 10, SampleMetadataScope::kThread);
  1356. profiler1_metadata_applied.Signal();
  1357. profiler2_metadata_applied.Wait();
  1358. profiler1.Stop();
  1359. }));
  1360. target_thread1.Start();
  1361. Profile profile2;
  1362. WaitableEvent sampling_completed2;
  1363. TargetThread target_thread2(BindLambdaForTesting([&]() {
  1364. StackSamplingProfiler profiler2(
  1365. target_thread2.thread_token(), params,
  1366. std::make_unique<TestProfileBuilder>(
  1367. &module_cache2, BindLambdaForTesting([&](Profile result_profile) {
  1368. profile2 = std::move(result_profile);
  1369. sampling_completed2.Signal();
  1370. })),
  1371. CreateCoreUnwindersFactoryForTesting(&module_cache2),
  1372. RepeatingClosure());
  1373. profiler2.Start();
  1374. profiler2_started.Signal();
  1375. profiler1_started.Wait();
  1376. // Record metadata on past samples only for this thread.
  1377. ApplyMetadataToPastSamples(TimeTicks(), TimeTicks::Now(), "TestMetadata2",
  1378. 20, 20, SampleMetadataScope::kThread);
  1379. profiler2_metadata_applied.Signal();
  1380. profiler1_metadata_applied.Wait();
  1381. profiler2.Stop();
  1382. }));
  1383. target_thread2.Start();
  1384. target_thread1.Join();
  1385. target_thread2.Join();
  1386. // Wait for the profile to be captured before checking expectations.
  1387. sampling_completed1.Wait();
  1388. sampling_completed2.Wait();
  1389. ASSERT_EQ(1u, profile1.retrospective_metadata.size());
  1390. ASSERT_EQ(1u, profile2.retrospective_metadata.size());
  1391. {
  1392. const RetrospectiveMetadata& metadata1 = profile1.retrospective_metadata[0];
  1393. EXPECT_EQ(HashMetricName("TestMetadata1"), metadata1.item.name_hash);
  1394. ASSERT_TRUE(metadata1.item.key.has_value());
  1395. EXPECT_EQ(10, *metadata1.item.key);
  1396. EXPECT_EQ(10, metadata1.item.value);
  1397. }
  1398. {
  1399. const RetrospectiveMetadata& metadata2 = profile2.retrospective_metadata[0];
  1400. EXPECT_EQ(HashMetricName("TestMetadata2"), metadata2.item.name_hash);
  1401. ASSERT_TRUE(metadata2.item.key.has_value());
  1402. EXPECT_EQ(20, *metadata2.item.key);
  1403. EXPECT_EQ(20, metadata2.item.value);
  1404. }
  1405. }
  1406. } // namespace base