metrics_service_unittest.cc 43 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101
  1. // Copyright 2014 The Chromium Authors. All rights reserved.
  2. // Use of this source code is governed by a BSD-style license that can be
  3. // found in the LICENSE file.
  4. #include "components/metrics/metrics_service.h"
  5. #include <stdint.h>
  6. #include <algorithm>
  7. #include <memory>
  8. #include <string>
  9. #include "base/bind.h"
  10. #include "base/containers/contains.h"
  11. #include "base/files/file_path.h"
  12. #include "base/files/file_util.h"
  13. #include "base/files/scoped_temp_dir.h"
  14. #include "base/memory/raw_ptr.h"
  15. #include "base/metrics/field_trial.h"
  16. #include "base/metrics/histogram_functions.h"
  17. #include "base/metrics/metrics_hashes.h"
  18. #include "base/metrics/statistics_recorder.h"
  19. #include "base/metrics/user_metrics.h"
  20. #include "base/test/bind.h"
  21. #include "base/test/metrics/histogram_tester.h"
  22. #include "base/test/scoped_feature_list.h"
  23. #include "base/test/test_simple_task_runner.h"
  24. #include "base/threading/platform_thread.h"
  25. #include "base/threading/thread_task_runner_handle.h"
  26. #include "build/build_config.h"
  27. #include "components/metrics/clean_exit_beacon.h"
  28. #include "components/metrics/client_info.h"
  29. #include "components/metrics/environment_recorder.h"
  30. #include "components/metrics/log_decoder.h"
  31. #include "components/metrics/metrics_log.h"
  32. #include "components/metrics/metrics_pref_names.h"
  33. #include "components/metrics/metrics_state_manager.h"
  34. #include "components/metrics/metrics_upload_scheduler.h"
  35. #include "components/metrics/stability_metrics_helper.h"
  36. #include "components/metrics/test/test_enabled_state_provider.h"
  37. #include "components/metrics/test/test_metrics_provider.h"
  38. #include "components/metrics/test/test_metrics_service_client.h"
  39. #include "components/metrics/unsent_log_store_metrics_impl.h"
  40. #include "components/prefs/testing_pref_service.h"
  41. #include "components/variations/active_field_trials.h"
  42. #include "testing/gtest/include/gtest/gtest.h"
  43. #include "third_party/metrics_proto/chrome_user_metrics_extension.pb.h"
  44. #include "third_party/metrics_proto/system_profile.pb.h"
  45. #include "third_party/zlib/google/compression_utils.h"
  46. namespace metrics {
  47. namespace {
  48. const char kTestPrefName[] = "TestPref";
  49. class TestUnsentLogStore : public UnsentLogStore {
  50. public:
  51. explicit TestUnsentLogStore(PrefService* service)
  52. : UnsentLogStore(std::make_unique<UnsentLogStoreMetricsImpl>(),
  53. service,
  54. kTestPrefName,
  55. nullptr,
  56. /* min_log_count= */ 3,
  57. /* min_log_bytes= */ 1,
  58. /* max_log_size= */ 0,
  59. std::string()) {}
  60. ~TestUnsentLogStore() override = default;
  61. TestUnsentLogStore(const TestUnsentLogStore&) = delete;
  62. TestUnsentLogStore& operator=(const TestUnsentLogStore&) = delete;
  63. static void RegisterPrefs(PrefRegistrySimple* registry) {
  64. registry->RegisterListPref(kTestPrefName);
  65. }
  66. };
  67. void YieldUntil(base::Time when) {
  68. while (base::Time::Now() <= when)
  69. base::PlatformThread::YieldCurrentThread();
  70. }
  71. // Returns true if |id| is present in |proto|'s collection of FieldTrials.
  72. bool IsFieldTrialPresent(const SystemProfileProto& proto,
  73. const std::string& trial_name,
  74. const std::string& group_name) {
  75. const variations::ActiveGroupId id =
  76. variations::MakeActiveGroupId(trial_name, group_name);
  77. for (const auto& trial : proto.field_trial()) {
  78. if (trial.name_id() == id.name && trial.group_id() == id.group)
  79. return true;
  80. }
  81. return false;
  82. }
  83. class TestMetricsService : public MetricsService {
  84. public:
  85. TestMetricsService(MetricsStateManager* state_manager,
  86. MetricsServiceClient* client,
  87. PrefService* local_state)
  88. : MetricsService(state_manager, client, local_state) {}
  89. TestMetricsService(const TestMetricsService&) = delete;
  90. TestMetricsService& operator=(const TestMetricsService&) = delete;
  91. ~TestMetricsService() override = default;
  92. using MetricsService::INIT_TASK_SCHEDULED;
  93. using MetricsService::RecordCurrentEnvironmentHelper;
  94. using MetricsService::SENDING_LOGS;
  95. using MetricsService::state;
  96. // MetricsService:
  97. void SetPersistentSystemProfile(const std::string& serialized_proto,
  98. bool complete) override {
  99. persistent_system_profile_provided_ = true;
  100. persistent_system_profile_complete_ = complete;
  101. }
  102. bool persistent_system_profile_provided() const {
  103. return persistent_system_profile_provided_;
  104. }
  105. bool persistent_system_profile_complete() const {
  106. return persistent_system_profile_complete_;
  107. }
  108. private:
  109. bool persistent_system_profile_provided_ = false;
  110. bool persistent_system_profile_complete_ = false;
  111. };
  112. class TestMetricsLog : public MetricsLog {
  113. public:
  114. TestMetricsLog(const std::string& client_id,
  115. int session_id,
  116. MetricsServiceClient* client)
  117. : MetricsLog(client_id, session_id, MetricsLog::ONGOING_LOG, client) {}
  118. TestMetricsLog(const TestMetricsLog&) = delete;
  119. TestMetricsLog& operator=(const TestMetricsLog&) = delete;
  120. ~TestMetricsLog() override {}
  121. };
  122. const char kOnDidCreateMetricsLogHistogramName[] = "Test.OnDidCreateMetricsLog";
  123. class TestMetricsProviderForOnDidCreateMetricsLog : public TestMetricsProvider {
  124. public:
  125. TestMetricsProviderForOnDidCreateMetricsLog() = default;
  126. ~TestMetricsProviderForOnDidCreateMetricsLog() override = default;
  127. void OnDidCreateMetricsLog() override {
  128. base::UmaHistogramBoolean(kOnDidCreateMetricsLogHistogramName, true);
  129. }
  130. };
  131. class MetricsServiceTest : public testing::Test {
  132. public:
  133. MetricsServiceTest()
  134. : task_runner_(new base::TestSimpleTaskRunner),
  135. task_runner_handle_(task_runner_),
  136. enabled_state_provider_(new TestEnabledStateProvider(false, false)) {
  137. base::SetRecordActionTaskRunner(task_runner_);
  138. MetricsService::RegisterPrefs(testing_local_state_.registry());
  139. }
  140. MetricsServiceTest(const MetricsServiceTest&) = delete;
  141. MetricsServiceTest& operator=(const MetricsServiceTest&) = delete;
  142. ~MetricsServiceTest() override {}
  143. void SetUp() override { ASSERT_TRUE(temp_dir_.CreateUniqueTempDir()); }
  144. MetricsStateManager* GetMetricsStateManager(
  145. const base::FilePath& user_data_dir = base::FilePath(),
  146. StartupVisibility startup_visibility = StartupVisibility::kUnknown) {
  147. // Lazy-initialize the metrics_state_manager so that it correctly reads the
  148. // stability state from prefs after tests have a chance to initialize it.
  149. if (!metrics_state_manager_) {
  150. metrics_state_manager_ = MetricsStateManager::Create(
  151. GetLocalState(), enabled_state_provider_.get(), std::wstring(),
  152. user_data_dir, startup_visibility);
  153. metrics_state_manager_->InstantiateFieldTrialList();
  154. }
  155. return metrics_state_manager_.get();
  156. }
  157. std::unique_ptr<TestUnsentLogStore> InitializeTestLogStoreAndGet() {
  158. TestUnsentLogStore::RegisterPrefs(testing_local_state_.registry());
  159. return std::make_unique<TestUnsentLogStore>(GetLocalState());
  160. }
  161. PrefService* GetLocalState() { return &testing_local_state_; }
  162. // Sets metrics reporting as enabled for testing.
  163. void EnableMetricsReporting() { SetMetricsReporting(true); }
  164. // Sets metrics reporting for testing.
  165. void SetMetricsReporting(bool enabled) {
  166. enabled_state_provider_->set_consent(enabled);
  167. enabled_state_provider_->set_enabled(enabled);
  168. }
  169. // Finds a histogram with the specified |name_hash| in |histograms|.
  170. const base::HistogramBase* FindHistogram(
  171. const base::StatisticsRecorder::Histograms& histograms,
  172. uint64_t name_hash) {
  173. for (const base::HistogramBase* histogram : histograms) {
  174. if (name_hash == base::HashMetricName(histogram->histogram_name()))
  175. return histogram;
  176. }
  177. return nullptr;
  178. }
  179. // Checks whether |uma_log| contains any histograms that are not flagged
  180. // with kUmaStabilityHistogramFlag. Stability logs should only contain such
  181. // histograms.
  182. void CheckForNonStabilityHistograms(
  183. const ChromeUserMetricsExtension& uma_log) {
  184. const int kStabilityFlags = base::HistogramBase::kUmaStabilityHistogramFlag;
  185. const base::StatisticsRecorder::Histograms histograms =
  186. base::StatisticsRecorder::GetHistograms();
  187. for (int i = 0; i < uma_log.histogram_event_size(); ++i) {
  188. const uint64_t hash = uma_log.histogram_event(i).name_hash();
  189. const base::HistogramBase* histogram = FindHistogram(histograms, hash);
  190. EXPECT_TRUE(histogram) << hash;
  191. EXPECT_EQ(kStabilityFlags, histogram->flags() & kStabilityFlags) << hash;
  192. }
  193. }
  194. // Returns the number of samples logged to the specified histogram or 0 if
  195. // the histogram was not found.
  196. int GetHistogramSampleCount(const ChromeUserMetricsExtension& uma_log,
  197. base::StringPiece histogram_name) {
  198. const auto histogram_name_hash = base::HashMetricName(histogram_name);
  199. int samples = 0;
  200. for (int i = 0; i < uma_log.histogram_event_size(); ++i) {
  201. const auto& histogram = uma_log.histogram_event(i);
  202. if (histogram.name_hash() == histogram_name_hash) {
  203. for (int j = 0; j < histogram.bucket_size(); ++j) {
  204. const auto& bucket = histogram.bucket(j);
  205. // Per proto comments, count field not being set means 1 sample.
  206. samples += (!bucket.has_count() ? 1 : bucket.count());
  207. }
  208. }
  209. }
  210. return samples;
  211. }
  212. // Returns the sampled count of the |kOnDidCreateMetricsLogHistogramName|
  213. // histogram in the currently staged log in |test_log_store|.
  214. int GetSampleCountOfOnDidCreateLogHistogram(MetricsLogStore* test_log_store) {
  215. ChromeUserMetricsExtension log;
  216. EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &log));
  217. return GetHistogramSampleCount(log, kOnDidCreateMetricsLogHistogramName);
  218. }
  219. int GetNumberOfUserActions(MetricsLogStore* test_log_store) {
  220. ChromeUserMetricsExtension log;
  221. EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &log));
  222. return log.user_action_event_size();
  223. }
  224. const base::FilePath user_data_dir_path() { return temp_dir_.GetPath(); }
  225. protected:
  226. scoped_refptr<base::TestSimpleTaskRunner> task_runner_;
  227. base::ThreadTaskRunnerHandle task_runner_handle_;
  228. base::test::ScopedFeatureList feature_list_;
  229. private:
  230. std::unique_ptr<TestEnabledStateProvider> enabled_state_provider_;
  231. TestingPrefServiceSimple testing_local_state_;
  232. std::unique_ptr<MetricsStateManager> metrics_state_manager_;
  233. base::ScopedTempDir temp_dir_;
  234. };
  235. class MetricsServiceTestWithConsolidateInitialLogLogicFeature
  236. : public MetricsServiceTest,
  237. public ::testing::WithParamInterface<bool> {
  238. public:
  239. MetricsServiceTestWithConsolidateInitialLogLogicFeature() = default;
  240. ~MetricsServiceTestWithConsolidateInitialLogLogicFeature() override = default;
  241. bool ShouldConsolidateInitialLogLogic() { return GetParam(); }
  242. void SetUp() override {
  243. MetricsServiceTest::SetUp();
  244. if (ShouldConsolidateInitialLogLogic()) {
  245. feature_list_.InitWithFeatures(
  246. {kConsolidateMetricsServiceInitialLogLogic}, {});
  247. } else {
  248. feature_list_.InitWithFeatures(
  249. {}, {kConsolidateMetricsServiceInitialLogLogic});
  250. }
  251. }
  252. private:
  253. base::test::ScopedFeatureList feature_list_;
  254. };
  255. struct StartupVisibilityTestParams {
  256. const std::string test_name;
  257. metrics::StartupVisibility startup_visibility;
  258. bool consolidate_initial_log_logic;
  259. bool expected_beacon_value;
  260. };
  261. class MetricsServiceTestWithStartupVisibility
  262. : public MetricsServiceTest,
  263. public ::testing::WithParamInterface<StartupVisibilityTestParams> {
  264. public:
  265. MetricsServiceTestWithStartupVisibility() = default;
  266. ~MetricsServiceTestWithStartupVisibility() override = default;
  267. bool ShouldConsolidateInitialLogLogic() {
  268. return GetParam().consolidate_initial_log_logic;
  269. }
  270. void SetUp() override {
  271. MetricsServiceTest::SetUp();
  272. if (ShouldConsolidateInitialLogLogic()) {
  273. feature_list_.InitWithFeatures(
  274. {kConsolidateMetricsServiceInitialLogLogic}, {});
  275. } else {
  276. feature_list_.InitWithFeatures(
  277. {}, {kConsolidateMetricsServiceInitialLogLogic});
  278. }
  279. }
  280. private:
  281. base::test::ScopedFeatureList feature_list_;
  282. };
  283. class ExperimentTestMetricsProvider : public TestMetricsProvider {
  284. public:
  285. explicit ExperimentTestMetricsProvider(
  286. base::FieldTrial* profile_metrics_trial,
  287. base::FieldTrial* session_data_trial)
  288. : profile_metrics_trial_(profile_metrics_trial),
  289. session_data_trial_(session_data_trial) {}
  290. ~ExperimentTestMetricsProvider() override = default;
  291. void ProvideSystemProfileMetrics(
  292. SystemProfileProto* system_profile_proto) override {
  293. TestMetricsProvider::ProvideSystemProfileMetrics(system_profile_proto);
  294. profile_metrics_trial_->group();
  295. }
  296. void ProvideCurrentSessionData(
  297. ChromeUserMetricsExtension* uma_proto) override {
  298. TestMetricsProvider::ProvideCurrentSessionData(uma_proto);
  299. session_data_trial_->group();
  300. }
  301. private:
  302. raw_ptr<base::FieldTrial> profile_metrics_trial_;
  303. raw_ptr<base::FieldTrial> session_data_trial_;
  304. };
  305. bool HistogramExists(base::StringPiece name) {
  306. return base::StatisticsRecorder::FindHistogram(name) != nullptr;
  307. }
  308. base::HistogramBase::Count GetHistogramDeltaTotalCount(base::StringPiece name) {
  309. return base::StatisticsRecorder::FindHistogram(name)
  310. ->SnapshotDelta()
  311. ->TotalCount();
  312. }
  313. } // namespace
  314. INSTANTIATE_TEST_SUITE_P(
  315. All,
  316. MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  317. testing::Bool());
  318. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  319. InitialStabilityLogAfterCleanShutDown) {
  320. base::HistogramTester histogram_tester;
  321. EnableMetricsReporting();
  322. // Write a beacon file indicating that Chrome exited cleanly. Note that the
  323. // crash streak value is arbitrary.
  324. const base::FilePath beacon_file_path =
  325. user_data_dir_path().Append(kCleanExitBeaconFilename);
  326. ASSERT_LT(0,
  327. base::WriteFile(beacon_file_path,
  328. CleanExitBeacon::CreateBeaconFileContentsForTesting(
  329. /*exited_cleanly=*/true, /*crash_streak=*/1)
  330. .data()));
  331. TestMetricsServiceClient client;
  332. TestMetricsService service(GetMetricsStateManager(user_data_dir_path()),
  333. &client, GetLocalState());
  334. TestMetricsProvider* test_provider = new TestMetricsProvider();
  335. service.RegisterMetricsProvider(
  336. std::unique_ptr<MetricsProvider>(test_provider));
  337. service.InitializeMetricsRecordingState();
  338. // No initial stability log should be generated.
  339. EXPECT_FALSE(service.has_unsent_logs());
  340. // Ensure that HasPreviousSessionData() is always called on providers,
  341. // for consistency, even if other conditions already indicate their presence.
  342. EXPECT_TRUE(test_provider->has_initial_stability_metrics_called());
  343. // The test provider should not have been called upon to provide initial
  344. // stability nor regular stability metrics.
  345. EXPECT_FALSE(test_provider->provide_initial_stability_metrics_called());
  346. EXPECT_FALSE(test_provider->provide_stability_metrics_called());
  347. // As there wasn't an unclean shutdown, no browser crash samples should have
  348. // been emitted.
  349. histogram_tester.ExpectBucketCount("Stability.Counts2",
  350. StabilityEventType::kBrowserCrash, 0);
  351. }
  352. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  353. InitialStabilityLogAtProviderRequest) {
  354. base::HistogramTester histogram_tester;
  355. EnableMetricsReporting();
  356. // Save an existing system profile to prefs, to correspond to what would be
  357. // saved from a previous session.
  358. TestMetricsServiceClient client;
  359. TestMetricsLog log("client", 1, &client);
  360. DelegatingProvider delegating_provider;
  361. TestMetricsService::RecordCurrentEnvironmentHelper(&log, GetLocalState(),
  362. &delegating_provider);
  363. // Record stability build time and version from previous session, so that
  364. // stability metrics (including exited cleanly flag) won't be cleared.
  365. EnvironmentRecorder(GetLocalState())
  366. .SetBuildtimeAndVersion(MetricsLog::GetBuildTime(),
  367. client.GetVersionString());
  368. // Write a beacon file indicating that Chrome exited cleanly. Note that the
  369. // crash streak value is arbitrary.
  370. const base::FilePath beacon_file_path =
  371. user_data_dir_path().Append(kCleanExitBeaconFilename);
  372. ASSERT_LT(0,
  373. base::WriteFile(beacon_file_path,
  374. CleanExitBeacon::CreateBeaconFileContentsForTesting(
  375. /*exited_cleanly=*/true, /*crash_streak=*/1)
  376. .data()));
  377. TestMetricsService service(GetMetricsStateManager(user_data_dir_path()),
  378. &client, GetLocalState());
  379. // Add a metrics provider that requests a stability log.
  380. TestMetricsProvider* test_provider = new TestMetricsProvider();
  381. test_provider->set_has_initial_stability_metrics(true);
  382. service.RegisterMetricsProvider(
  383. std::unique_ptr<MetricsProvider>(test_provider));
  384. service.InitializeMetricsRecordingState();
  385. // The initial stability log should be generated and persisted in unsent logs.
  386. MetricsLogStore* test_log_store = service.LogStoreForTest();
  387. EXPECT_TRUE(test_log_store->has_unsent_logs());
  388. EXPECT_FALSE(test_log_store->has_staged_log());
  389. // Ensure that HasPreviousSessionData() is always called on providers,
  390. // for consistency, even if other conditions already indicate their presence.
  391. EXPECT_TRUE(test_provider->has_initial_stability_metrics_called());
  392. // The test provider should have been called upon to provide initial
  393. // stability and regular stability metrics.
  394. EXPECT_TRUE(test_provider->provide_initial_stability_metrics_called());
  395. EXPECT_TRUE(test_provider->provide_stability_metrics_called());
  396. // Stage the log and retrieve it.
  397. test_log_store->StageNextLog();
  398. EXPECT_TRUE(test_log_store->has_staged_log());
  399. ChromeUserMetricsExtension uma_log;
  400. EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &uma_log));
  401. EXPECT_TRUE(uma_log.has_client_id());
  402. EXPECT_TRUE(uma_log.has_session_id());
  403. EXPECT_TRUE(uma_log.has_system_profile());
  404. EXPECT_EQ(0, uma_log.user_action_event_size());
  405. EXPECT_EQ(0, uma_log.omnibox_event_size());
  406. EXPECT_EQ(0, uma_log.perf_data_size());
  407. CheckForNonStabilityHistograms(uma_log);
  408. // As there wasn't an unclean shutdown, no browser crash samples should have
  409. // been emitted.
  410. histogram_tester.ExpectBucketCount("Stability.Counts2",
  411. StabilityEventType::kBrowserCrash, 0);
  412. }
  413. INSTANTIATE_TEST_SUITE_P(
  414. All,
  415. MetricsServiceTestWithStartupVisibility,
  416. ::testing::Values(
  417. StartupVisibilityTestParams{
  418. .test_name = "UnknownVisibility",
  419. .startup_visibility = StartupVisibility::kUnknown,
  420. .consolidate_initial_log_logic = false,
  421. .expected_beacon_value = true},
  422. StartupVisibilityTestParams{
  423. .test_name = "BackgroundVisibility",
  424. .startup_visibility = StartupVisibility::kBackground,
  425. .consolidate_initial_log_logic = false,
  426. .expected_beacon_value = true},
  427. StartupVisibilityTestParams{
  428. .test_name = "ForegroundVisibility",
  429. .startup_visibility = StartupVisibility::kForeground,
  430. .consolidate_initial_log_logic = false,
  431. .expected_beacon_value = false},
  432. StartupVisibilityTestParams{
  433. .test_name = "UnknownVisibilityConsolidateInitialLogLogic",
  434. .startup_visibility = StartupVisibility::kUnknown,
  435. .consolidate_initial_log_logic = true,
  436. .expected_beacon_value = true},
  437. StartupVisibilityTestParams{
  438. .test_name = "BackgroundVisibilityConsolidateInitialLogLogic",
  439. .startup_visibility = StartupVisibility::kBackground,
  440. .consolidate_initial_log_logic = true,
  441. .expected_beacon_value = true},
  442. StartupVisibilityTestParams{
  443. .test_name = "ForegroundVisibilityConsolidateInitialLogLogic",
  444. .startup_visibility = StartupVisibility::kForeground,
  445. .consolidate_initial_log_logic = true,
  446. .expected_beacon_value = false}),
  447. [](const ::testing::TestParamInfo<StartupVisibilityTestParams>& params) {
  448. return params.param.test_name;
  449. });
  450. TEST_P(MetricsServiceTestWithStartupVisibility, InitialStabilityLogAfterCrash) {
  451. base::HistogramTester histogram_tester;
  452. PrefService* local_state = GetLocalState();
  453. EnableMetricsReporting();
  454. // Write a beacon file indicating that Chrome exited uncleanly. Note that the
  455. // crash streak value is arbitrary.
  456. const base::FilePath beacon_file_path =
  457. user_data_dir_path().Append(kCleanExitBeaconFilename);
  458. ASSERT_LT(0,
  459. base::WriteFile(beacon_file_path,
  460. CleanExitBeacon::CreateBeaconFileContentsForTesting(
  461. /*exited_cleanly=*/false, /*crash_streak=*/1)
  462. .data()));
  463. // Set up prefs to simulate restarting after a crash.
  464. // Save an existing system profile to prefs, to correspond to what would be
  465. // saved from a previous session.
  466. TestMetricsServiceClient client;
  467. const std::string kCrashedVersion = "4.0.321.0-64-devel";
  468. client.set_version_string(kCrashedVersion);
  469. TestMetricsLog log("client", 1, &client);
  470. DelegatingProvider delegating_provider;
  471. TestMetricsService::RecordCurrentEnvironmentHelper(&log, local_state,
  472. &delegating_provider);
  473. // Record stability build time and version from previous session, so that
  474. // stability metrics (including exited cleanly flag) won't be cleared.
  475. EnvironmentRecorder(local_state)
  476. .SetBuildtimeAndVersion(MetricsLog::GetBuildTime(),
  477. client.GetVersionString());
  478. const std::string kCurrentVersion = "5.0.322.0-64-devel";
  479. client.set_version_string(kCurrentVersion);
  480. StartupVisibilityTestParams params = GetParam();
  481. TestMetricsService service(
  482. GetMetricsStateManager(user_data_dir_path(), params.startup_visibility),
  483. &client, local_state);
  484. // Add a provider.
  485. TestMetricsProvider* test_provider = new TestMetricsProvider();
  486. service.RegisterMetricsProvider(
  487. std::unique_ptr<MetricsProvider>(test_provider));
  488. service.InitializeMetricsRecordingState();
  489. // Verify that Chrome is (or is not) watching for crashes by checking the
  490. // beacon value.
  491. std::string beacon_file_contents;
  492. ASSERT_TRUE(base::ReadFileToString(beacon_file_path, &beacon_file_contents));
  493. std::string partial_expected_contents;
  494. #if BUILDFLAG(IS_ANDROID)
  495. // Whether Chrome is watching for crashes after
  496. // InitializeMetricsRecordingState() depends on the type of Android Chrome
  497. // session. See the comments in MetricsService::InitializeMetricsState() for
  498. // more details.
  499. const std::string beacon_value =
  500. params.expected_beacon_value ? "true" : "false";
  501. partial_expected_contents = "exited_cleanly\":" + beacon_value;
  502. #else
  503. partial_expected_contents = "exited_cleanly\":false";
  504. #endif // BUILDFLAG(IS_ANDROID)
  505. EXPECT_TRUE(base::Contains(beacon_file_contents, partial_expected_contents));
  506. // The initial stability log should be generated and persisted in unsent logs.
  507. MetricsLogStore* test_log_store = service.LogStoreForTest();
  508. EXPECT_TRUE(test_log_store->has_unsent_logs());
  509. EXPECT_FALSE(test_log_store->has_staged_log());
  510. // Ensure that HasPreviousSessionData() is always called on providers,
  511. // for consistency, even if other conditions already indicate their presence.
  512. EXPECT_TRUE(test_provider->has_initial_stability_metrics_called());
  513. // The test provider should have been called upon to provide initial
  514. // stability and regular stability metrics.
  515. EXPECT_TRUE(test_provider->provide_initial_stability_metrics_called());
  516. EXPECT_TRUE(test_provider->provide_stability_metrics_called());
  517. // Stage the log and retrieve it.
  518. test_log_store->StageNextLog();
  519. EXPECT_TRUE(test_log_store->has_staged_log());
  520. ChromeUserMetricsExtension uma_log;
  521. EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &uma_log));
  522. EXPECT_TRUE(uma_log.has_client_id());
  523. EXPECT_TRUE(uma_log.has_session_id());
  524. EXPECT_TRUE(uma_log.has_system_profile());
  525. EXPECT_EQ(0, uma_log.user_action_event_size());
  526. EXPECT_EQ(0, uma_log.omnibox_event_size());
  527. EXPECT_EQ(0, uma_log.perf_data_size());
  528. CheckForNonStabilityHistograms(uma_log);
  529. EXPECT_EQ(kCrashedVersion, uma_log.system_profile().app_version());
  530. EXPECT_EQ(kCurrentVersion,
  531. uma_log.system_profile().log_written_by_app_version());
  532. histogram_tester.ExpectBucketCount("Stability.Counts2",
  533. StabilityEventType::kBrowserCrash, 1);
  534. }
  535. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  536. InitialLogsHaveOnDidCreateMetricsLogHistograms) {
  537. EnableMetricsReporting();
  538. TestMetricsServiceClient client;
  539. TestMetricsService service(GetMetricsStateManager(), &client,
  540. GetLocalState());
  541. // Create a provider that will log to |kOnDidCreateMetricsLogHistogramName|
  542. // in OnDidCreateMetricsLog()
  543. auto* test_provider = new TestMetricsProviderForOnDidCreateMetricsLog();
  544. service.RegisterMetricsProvider(
  545. std::unique_ptr<MetricsProvider>(test_provider));
  546. service.InitializeMetricsRecordingState();
  547. // Start() will create the first ongoing log.
  548. service.Start();
  549. ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
  550. // Run pending tasks to finish init task and complete the first ongoing log.
  551. task_runner_->RunPendingTasks();
  552. ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
  553. MetricsLogStore* test_log_store = service.LogStoreForTest();
  554. // Stage the next log, which should be the first ongoing log.
  555. // Check that it has one sample in |kOnDidCreateMetricsLogHistogramName|.
  556. test_log_store->StageNextLog();
  557. EXPECT_EQ(1, GetSampleCountOfOnDidCreateLogHistogram(test_log_store));
  558. // Discard the staged log and close and stage the next log, which is the
  559. // second "ongoing log".
  560. // Check that it has one sample in |kOnDidCreateMetricsLogHistogramName|.
  561. test_log_store->DiscardStagedLog();
  562. service.StageCurrentLogForTest();
  563. EXPECT_EQ(1, GetSampleCountOfOnDidCreateLogHistogram(test_log_store));
  564. // Check one more log for good measure.
  565. test_log_store->DiscardStagedLog();
  566. service.StageCurrentLogForTest();
  567. EXPECT_EQ(1, GetSampleCountOfOnDidCreateLogHistogram(test_log_store));
  568. }
  569. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  570. MarkCurrentHistogramsAsReported) {
  571. EnableMetricsReporting();
  572. TestMetricsServiceClient client;
  573. TestMetricsService service(GetMetricsStateManager(), &client,
  574. GetLocalState());
  575. // Emit to histogram |Test.Before.Histogram|.
  576. ASSERT_FALSE(HistogramExists("Test.Before.Histogram"));
  577. base::UmaHistogramBoolean("Test.Before.Histogram", true);
  578. ASSERT_TRUE(HistogramExists("Test.Before.Histogram"));
  579. // Mark histogram data that has been collected until now (in particular, the
  580. // |Test.Before.Histogram| sample) as reported.
  581. service.MarkCurrentHistogramsAsReported();
  582. // Emit to histogram |Test.After.Histogram|.
  583. ASSERT_FALSE(HistogramExists("Test.After.Histogram"));
  584. base::UmaHistogramBoolean("Test.After.Histogram", true);
  585. ASSERT_TRUE(HistogramExists("Test.After.Histogram"));
  586. // Verify that the |Test.Before.Histogram| sample was marked as reported, and
  587. // is not included in the next snapshot.
  588. EXPECT_EQ(0, GetHistogramDeltaTotalCount("Test.Before.Histogram"));
  589. // Verify that the |Test.After.Histogram| sample was not marked as reported,
  590. // and is included in the next snapshot.
  591. EXPECT_EQ(1, GetHistogramDeltaTotalCount("Test.After.Histogram"));
  592. // Clean up histograms.
  593. base::StatisticsRecorder::ForgetHistogramForTesting("Test.Before.Histogram");
  594. base::StatisticsRecorder::ForgetHistogramForTesting("Test.After.Histogram");
  595. }
  596. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  597. LogHasUserActions) {
  598. // This test verifies that user actions are properly captured in UMA logs.
  599. // In particular, it checks that the first log has actions, a behavior that
  600. // was buggy in the past, plus additional checks for subsequent logs with
  601. // different numbers of actions. This behavior is only fixed after
  602. // consolidating the initial log logic.
  603. if (!ShouldConsolidateInitialLogLogic())
  604. return;
  605. EnableMetricsReporting();
  606. TestMetricsServiceClient client;
  607. TestMetricsService service(GetMetricsStateManager(), &client,
  608. GetLocalState());
  609. service.InitializeMetricsRecordingState();
  610. // Start() will create an initial log.
  611. service.Start();
  612. ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
  613. base::RecordAction(base::UserMetricsAction("TestAction"));
  614. base::RecordAction(base::UserMetricsAction("TestAction"));
  615. base::RecordAction(base::UserMetricsAction("DifferentAction"));
  616. // Run pending tasks to finish init task and complete the first ongoing log.
  617. task_runner_->RunPendingTasks();
  618. ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
  619. MetricsLogStore* test_log_store = service.LogStoreForTest();
  620. // Stage the next log, which should be the initial metrics log.
  621. test_log_store->StageNextLog();
  622. EXPECT_EQ(3, GetNumberOfUserActions(test_log_store));
  623. // Log another action.
  624. base::RecordAction(base::UserMetricsAction("TestAction"));
  625. test_log_store->DiscardStagedLog();
  626. service.StageCurrentLogForTest();
  627. EXPECT_EQ(1, GetNumberOfUserActions(test_log_store));
  628. // Check a log with no actions.
  629. test_log_store->DiscardStagedLog();
  630. service.StageCurrentLogForTest();
  631. EXPECT_EQ(0, GetNumberOfUserActions(test_log_store));
  632. // And another one with a couple.
  633. base::RecordAction(base::UserMetricsAction("TestAction"));
  634. base::RecordAction(base::UserMetricsAction("TestAction"));
  635. test_log_store->DiscardStagedLog();
  636. service.StageCurrentLogForTest();
  637. EXPECT_EQ(2, GetNumberOfUserActions(test_log_store));
  638. }
  639. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  640. FirstLogCreatedBeforeUnsentLogsSent) {
  641. // This test checks that we will create and serialize the first ongoing log
  642. // before starting to send unsent logs from the past session. The latter is
  643. // simulated by injecting some fake ongoing logs into the MetricsLogStore.
  644. EnableMetricsReporting();
  645. TestMetricsServiceClient client;
  646. TestMetricsService service(GetMetricsStateManager(), &client,
  647. GetLocalState());
  648. service.InitializeMetricsRecordingState();
  649. // Start() will create the first ongoing log.
  650. service.Start();
  651. ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
  652. MetricsLogStore* test_log_store = service.LogStoreForTest();
  653. // Set up the log store with an existing fake log entry. The string content
  654. // is never deserialized to proto, so we're just passing some dummy content.
  655. ASSERT_EQ(0u, test_log_store->initial_log_count());
  656. ASSERT_EQ(0u, test_log_store->ongoing_log_count());
  657. test_log_store->StoreLog("blah_blah", MetricsLog::ONGOING_LOG, LogMetadata());
  658. // Note: |initial_log_count()| refers to initial stability logs, so the above
  659. // log is counted an ongoing log (per its type).
  660. ASSERT_EQ(0u, test_log_store->initial_log_count());
  661. ASSERT_EQ(1u, test_log_store->ongoing_log_count());
  662. // Run pending tasks to finish init task and complete the first ongoing log.
  663. task_runner_->RunPendingTasks();
  664. ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
  665. // When the init task is complete, the first ongoing log should be created
  666. // and added to the ongoing logs.
  667. EXPECT_EQ(0u, test_log_store->initial_log_count());
  668. EXPECT_EQ(2u, test_log_store->ongoing_log_count());
  669. }
  670. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  671. MetricsProviderOnRecordingDisabledCalledOnInitialStop) {
  672. TestMetricsServiceClient client;
  673. TestMetricsService service(GetMetricsStateManager(), &client,
  674. GetLocalState());
  675. TestMetricsProvider* test_provider = new TestMetricsProvider();
  676. service.RegisterMetricsProvider(
  677. std::unique_ptr<MetricsProvider>(test_provider));
  678. service.InitializeMetricsRecordingState();
  679. service.Stop();
  680. EXPECT_TRUE(test_provider->on_recording_disabled_called());
  681. }
  682. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  683. MetricsProvidersInitialized) {
  684. TestMetricsServiceClient client;
  685. TestMetricsService service(GetMetricsStateManager(), &client,
  686. GetLocalState());
  687. TestMetricsProvider* test_provider = new TestMetricsProvider();
  688. service.RegisterMetricsProvider(
  689. std::unique_ptr<MetricsProvider>(test_provider));
  690. service.InitializeMetricsRecordingState();
  691. EXPECT_TRUE(test_provider->init_called());
  692. }
  693. // Verify that FieldTrials activated by a MetricsProvider are reported by the
  694. // FieldTrialsProvider.
  695. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  696. ActiveFieldTrialsReported) {
  697. EnableMetricsReporting();
  698. TestMetricsServiceClient client;
  699. TestMetricsService service(GetMetricsStateManager(), &client,
  700. GetLocalState());
  701. // Set up FieldTrials.
  702. const std::string trial_name1 = "CoffeeExperiment";
  703. const std::string group_name1 = "Free";
  704. base::FieldTrial* trial1 =
  705. base::FieldTrialList::CreateFieldTrial(trial_name1, group_name1);
  706. const std::string trial_name2 = "DonutExperiment";
  707. const std::string group_name2 = "MapleBacon";
  708. base::FieldTrial* trial2 =
  709. base::FieldTrialList::CreateFieldTrial(trial_name2, group_name2);
  710. service.RegisterMetricsProvider(
  711. std::make_unique<ExperimentTestMetricsProvider>(trial1, trial2));
  712. service.InitializeMetricsRecordingState();
  713. service.Start();
  714. service.StageCurrentLogForTest();
  715. MetricsLogStore* test_log_store = service.LogStoreForTest();
  716. ChromeUserMetricsExtension uma_log;
  717. EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &uma_log));
  718. // Verify that the reported FieldTrial IDs are for the trial set up by this
  719. // test.
  720. EXPECT_TRUE(
  721. IsFieldTrialPresent(uma_log.system_profile(), trial_name1, group_name1));
  722. EXPECT_TRUE(
  723. IsFieldTrialPresent(uma_log.system_profile(), trial_name2, group_name2));
  724. }
  725. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  726. SystemProfileDataProvidedOnEnableRecording) {
  727. EnableMetricsReporting();
  728. TestMetricsServiceClient client;
  729. TestMetricsService service(GetMetricsStateManager(), &client,
  730. GetLocalState());
  731. TestMetricsProvider* test_provider = new TestMetricsProvider();
  732. service.RegisterMetricsProvider(
  733. std::unique_ptr<MetricsProvider>(test_provider));
  734. service.InitializeMetricsRecordingState();
  735. // ProvideSystemProfileMetrics() shouldn't be called initially.
  736. EXPECT_FALSE(test_provider->provide_system_profile_metrics_called());
  737. EXPECT_FALSE(service.persistent_system_profile_provided());
  738. service.Start();
  739. // Start should call ProvideSystemProfileMetrics().
  740. EXPECT_TRUE(test_provider->provide_system_profile_metrics_called());
  741. EXPECT_TRUE(service.persistent_system_profile_provided());
  742. EXPECT_FALSE(service.persistent_system_profile_complete());
  743. }
  744. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature, SplitRotation) {
  745. EnableMetricsReporting();
  746. TestMetricsServiceClient client;
  747. TestMetricsService service(GetMetricsStateManager(), &client,
  748. GetLocalState());
  749. service.InitializeMetricsRecordingState();
  750. service.Start();
  751. // Rotation loop should create a log and mark state as idle.
  752. // Upload loop should start upload or be restarted.
  753. // The independent-metrics upload job will be started and always be a task.
  754. task_runner_->RunPendingTasks();
  755. // Rotation loop should terminated due to being idle.
  756. // Upload loop should start uploading if it isn't already.
  757. task_runner_->RunPendingTasks();
  758. EXPECT_TRUE(client.uploader()->is_uploading());
  759. EXPECT_EQ(1U, task_runner_->NumPendingTasks());
  760. service.OnApplicationNotIdle();
  761. EXPECT_TRUE(client.uploader()->is_uploading());
  762. EXPECT_EQ(2U, task_runner_->NumPendingTasks());
  763. // Log generation should be suppressed due to unsent log.
  764. // Idle state should not be reset.
  765. task_runner_->RunPendingTasks();
  766. EXPECT_TRUE(client.uploader()->is_uploading());
  767. EXPECT_EQ(2U, task_runner_->NumPendingTasks());
  768. // Make sure idle state was not reset.
  769. task_runner_->RunPendingTasks();
  770. EXPECT_TRUE(client.uploader()->is_uploading());
  771. EXPECT_EQ(2U, task_runner_->NumPendingTasks());
  772. // Upload should not be rescheduled, since there are no other logs.
  773. client.uploader()->CompleteUpload(200);
  774. EXPECT_FALSE(client.uploader()->is_uploading());
  775. EXPECT_EQ(2U, task_runner_->NumPendingTasks());
  776. // Running should generate a log, restart upload loop, and mark idle.
  777. task_runner_->RunPendingTasks();
  778. EXPECT_FALSE(client.uploader()->is_uploading());
  779. EXPECT_EQ(3U, task_runner_->NumPendingTasks());
  780. // Upload should start, and rotation loop should idle out.
  781. task_runner_->RunPendingTasks();
  782. EXPECT_TRUE(client.uploader()->is_uploading());
  783. EXPECT_EQ(1U, task_runner_->NumPendingTasks());
  784. }
  785. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  786. LastLiveTimestamp) {
  787. EnableMetricsReporting();
  788. TestMetricsServiceClient client;
  789. TestMetricsService service(GetMetricsStateManager(), &client,
  790. GetLocalState());
  791. base::Time initial_last_live_time =
  792. GetLocalState()->GetTime(prefs::kStabilityBrowserLastLiveTimeStamp);
  793. service.InitializeMetricsRecordingState();
  794. service.Start();
  795. task_runner_->RunPendingTasks();
  796. size_t num_pending_tasks = task_runner_->NumPendingTasks();
  797. service.StartUpdatingLastLiveTimestamp();
  798. // Starting the update sequence should not write anything, but should
  799. // set up for a later write.
  800. EXPECT_EQ(
  801. initial_last_live_time,
  802. GetLocalState()->GetTime(prefs::kStabilityBrowserLastLiveTimeStamp));
  803. EXPECT_EQ(num_pending_tasks + 1, task_runner_->NumPendingTasks());
  804. // To avoid flakiness, yield until we're over a microsecond threshold.
  805. YieldUntil(initial_last_live_time + base::Microseconds(2));
  806. task_runner_->RunPendingTasks();
  807. // Verify that the time has updated in local state.
  808. base::Time updated_last_live_time =
  809. GetLocalState()->GetTime(prefs::kStabilityBrowserLastLiveTimeStamp);
  810. EXPECT_LT(initial_last_live_time, updated_last_live_time);
  811. // Double check that an update schedules again...
  812. YieldUntil(updated_last_live_time + base::Microseconds(2));
  813. task_runner_->RunPendingTasks();
  814. EXPECT_LT(
  815. updated_last_live_time,
  816. GetLocalState()->GetTime(prefs::kStabilityBrowserLastLiveTimeStamp));
  817. }
  818. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  819. EnablementObserverNotification) {
  820. EnableMetricsReporting();
  821. TestMetricsServiceClient client;
  822. TestMetricsService service(GetMetricsStateManager(), &client,
  823. GetLocalState());
  824. service.InitializeMetricsRecordingState();
  825. absl::optional<bool> enabled;
  826. auto observer = [&enabled](bool notification) { enabled = notification; };
  827. auto subscription =
  828. service.AddEnablementObserver(base::BindLambdaForTesting(observer));
  829. service.Start();
  830. ASSERT_TRUE(enabled.has_value());
  831. EXPECT_TRUE(enabled.value());
  832. enabled.reset();
  833. service.Stop();
  834. ASSERT_TRUE(enabled.has_value());
  835. EXPECT_FALSE(enabled.value());
  836. }
  837. #if BUILDFLAG(IS_CHROMEOS_LACROS)
  838. // ResetClientId is only enabled on certain targets.
  839. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  840. SetClientIdToExternalId) {
  841. EnableMetricsReporting();
  842. TestMetricsServiceClient client;
  843. TestMetricsService service(GetMetricsStateManager(), &client,
  844. GetLocalState());
  845. const std::string client_id = "d92ad666-a420-4c73-8718-94311ae2ff5f";
  846. EXPECT_NE(service.GetClientId(), client_id);
  847. service.SetExternalClientId(client_id);
  848. // Reset will cause the client id to be regenerated. If an external client id
  849. // is provided, it should defer to using that id instead of creating its own.
  850. service.ResetClientId();
  851. EXPECT_EQ(service.GetClientId(), client_id);
  852. }
  853. #endif // BUILDFLAG(IS_CHROMEOS_LACROS)
  854. #if BUILDFLAG(IS_CHROMEOS_ASH)
  855. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  856. OngoingLogNotFlushedBeforeInitialLogWhenUserLogStoreSet) {
  857. EnableMetricsReporting();
  858. TestMetricsServiceClient client;
  859. TestMetricsService service(GetMetricsStateManager(), &client,
  860. GetLocalState());
  861. service.InitializeMetricsRecordingState();
  862. // Start() will create the first ongoing log.
  863. service.Start();
  864. MetricsLogStore* test_log_store = service.LogStoreForTest();
  865. std::unique_ptr<TestUnsentLogStore> alternate_ongoing_log_store =
  866. InitializeTestLogStoreAndGet();
  867. TestUnsentLogStore* alternate_ongoing_log_store_ptr =
  868. alternate_ongoing_log_store.get();
  869. ASSERT_EQ(0u, test_log_store->initial_log_count());
  870. ASSERT_EQ(0u, test_log_store->ongoing_log_count());
  871. service.SetUserLogStore(std::move(alternate_ongoing_log_store));
  872. // Initial logs should not have been collected so the ongoing log being
  873. // recorded should not be flushed when a user log store is mounted.
  874. ASSERT_EQ(0u, test_log_store->initial_log_count());
  875. ASSERT_EQ(0u, test_log_store->ongoing_log_count());
  876. // Run pending tasks to finish init task and complete the first ongoing log.
  877. task_runner_->RunPendingTasks();
  878. ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
  879. // When the init task is complete, the first ongoing log should be created
  880. // in the alternate ongoing log store.
  881. EXPECT_EQ(0u, test_log_store->initial_log_count());
  882. EXPECT_EQ(0u, test_log_store->ongoing_log_count());
  883. EXPECT_EQ(1u, alternate_ongoing_log_store_ptr->size());
  884. }
  885. TEST_P(MetricsServiceTestWithConsolidateInitialLogLogicFeature,
  886. OngoingLogFlushedAfterInitialLogWhenUserLogStoreSet) {
  887. EnableMetricsReporting();
  888. TestMetricsServiceClient client;
  889. TestMetricsService service(GetMetricsStateManager(), &client,
  890. GetLocalState());
  891. service.InitializeMetricsRecordingState();
  892. // Start() will create the first ongoing log.
  893. service.Start();
  894. MetricsLogStore* test_log_store = service.LogStoreForTest();
  895. std::unique_ptr<TestUnsentLogStore> alternate_ongoing_log_store =
  896. InitializeTestLogStoreAndGet();
  897. // Init state.
  898. ASSERT_EQ(0u, test_log_store->initial_log_count());
  899. ASSERT_EQ(0u, test_log_store->ongoing_log_count());
  900. // Run pending tasks to finish init task and complete the first ongoing log.
  901. task_runner_->RunPendingTasks();
  902. ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
  903. ASSERT_EQ(0u, test_log_store->initial_log_count());
  904. ASSERT_EQ(1u, test_log_store->ongoing_log_count());
  905. // User log store set post-init.
  906. service.SetUserLogStore(std::move(alternate_ongoing_log_store));
  907. // Another log should have been flushed from setting the user log store.
  908. ASSERT_EQ(0u, test_log_store->initial_log_count());
  909. ASSERT_EQ(2u, test_log_store->ongoing_log_count());
  910. }
  911. #endif
  912. } // namespace metrics