• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "components/metrics/metrics_service.h"
6 
7 #include <stdint.h>
8 
9 #include <algorithm>
10 #include <memory>
11 #include <string>
12 #include <string_view>
13 
14 #include "base/containers/contains.h"
15 #include "base/files/file_path.h"
16 #include "base/files/file_util.h"
17 #include "base/files/scoped_temp_dir.h"
18 #include "base/functional/bind.h"
19 #include "base/memory/raw_ptr.h"
20 #include "base/metrics/field_trial.h"
21 #include "base/metrics/histogram_functions.h"
22 #include "base/metrics/histogram_snapshot_manager.h"
23 #include "base/metrics/metrics_hashes.h"
24 #include "base/metrics/statistics_recorder.h"
25 #include "base/metrics/user_metrics.h"
26 #include "base/task/single_thread_task_runner.h"
27 #include "base/test/bind.h"
28 #include "base/test/metrics/histogram_tester.h"
29 #include "base/test/scoped_feature_list.h"
30 #include "base/test/task_environment.h"
31 #include "base/threading/platform_thread.h"
32 #include "build/build_config.h"
33 #include "components/metrics/clean_exit_beacon.h"
34 #include "components/metrics/client_info.h"
35 #include "components/metrics/cloned_install_detector.h"
36 #include "components/metrics/environment_recorder.h"
37 #include "components/metrics/log_decoder.h"
38 #include "components/metrics/metrics_features.h"
39 #include "components/metrics/metrics_log.h"
40 #include "components/metrics/metrics_pref_names.h"
41 #include "components/metrics/metrics_scheduler.h"
42 #include "components/metrics/metrics_state_manager.h"
43 #include "components/metrics/metrics_upload_scheduler.h"
44 #include "components/metrics/stability_metrics_helper.h"
45 #include "components/metrics/test/test_enabled_state_provider.h"
46 #include "components/metrics/test/test_metrics_provider.h"
47 #include "components/metrics/test/test_metrics_service_client.h"
48 #include "components/metrics/unsent_log_store_metrics_impl.h"
49 #include "components/prefs/testing_pref_service.h"
50 #include "components/variations/active_field_trials.h"
51 #include "testing/gtest/include/gtest/gtest.h"
52 #include "third_party/metrics_proto/chrome_user_metrics_extension.pb.h"
53 #include "third_party/metrics_proto/system_profile.pb.h"
54 #include "third_party/zlib/google/compression_utils.h"
55 
56 namespace metrics {
57 namespace {
58 
59 const char kTestPrefName[] = "TestPref";
60 
61 class TestUnsentLogStore : public UnsentLogStore {
62  public:
TestUnsentLogStore(PrefService * service)63   explicit TestUnsentLogStore(PrefService* service)
64       : UnsentLogStore(std::make_unique<UnsentLogStoreMetricsImpl>(),
65                        service,
66                        kTestPrefName,
67                        nullptr,
68                        // Set to 3 so logs are not dropped in the test.
69                        UnsentLogStore::UnsentLogStoreLimits{
70                            .min_log_count = 3,
71                        },
72                        /*signing_key=*/std::string(),
73                        /*logs_event_manager=*/nullptr) {}
74   ~TestUnsentLogStore() override = default;
75 
76   TestUnsentLogStore(const TestUnsentLogStore&) = delete;
77   TestUnsentLogStore& operator=(const TestUnsentLogStore&) = delete;
78 
RegisterPrefs(PrefRegistrySimple * registry)79   static void RegisterPrefs(PrefRegistrySimple* registry) {
80     registry->RegisterListPref(kTestPrefName);
81   }
82 };
83 
84 // Returns true if |id| is present in |proto|'s collection of FieldTrials.
IsFieldTrialPresent(const SystemProfileProto & proto,const std::string & trial_name,const std::string & group_name)85 bool IsFieldTrialPresent(const SystemProfileProto& proto,
86                          const std::string& trial_name,
87                          const std::string& group_name) {
88   const variations::ActiveGroupId id =
89       variations::MakeActiveGroupId(trial_name, group_name);
90 
91   for (const auto& trial : proto.field_trial()) {
92     if (trial.name_id() == id.name && trial.group_id() == id.group) {
93       return true;
94     }
95   }
96   return false;
97 }
98 
99 class TestMetricsService : public MetricsService {
100  public:
TestMetricsService(MetricsStateManager * state_manager,MetricsServiceClient * client,PrefService * local_state)101   TestMetricsService(MetricsStateManager* state_manager,
102                      MetricsServiceClient* client,
103                      PrefService* local_state)
104       : MetricsService(state_manager, client, local_state) {}
105 
106   TestMetricsService(const TestMetricsService&) = delete;
107   TestMetricsService& operator=(const TestMetricsService&) = delete;
108 
109   ~TestMetricsService() override = default;
110 
111   using MetricsService::INIT_TASK_DONE;
112   using MetricsService::INIT_TASK_SCHEDULED;
113   using MetricsService::RecordCurrentEnvironmentHelper;
114   using MetricsService::SENDING_LOGS;
115   using MetricsService::state;
116 
117   // MetricsService:
SetPersistentSystemProfile(const std::string & serialized_proto,bool complete)118   void SetPersistentSystemProfile(const std::string& serialized_proto,
119                                   bool complete) override {
120     persistent_system_profile_provided_ = true;
121     persistent_system_profile_complete_ = complete;
122   }
123 
persistent_system_profile_provided() const124   bool persistent_system_profile_provided() const {
125     return persistent_system_profile_provided_;
126   }
persistent_system_profile_complete() const127   bool persistent_system_profile_complete() const {
128     return persistent_system_profile_complete_;
129   }
130 
131  private:
132   bool persistent_system_profile_provided_ = false;
133   bool persistent_system_profile_complete_ = false;
134 };
135 
136 class TestMetricsLog : public MetricsLog {
137  public:
TestMetricsLog(const std::string & client_id,int session_id,MetricsServiceClient * client)138   TestMetricsLog(const std::string& client_id,
139                  int session_id,
140                  MetricsServiceClient* client)
141       : MetricsLog(client_id, session_id, MetricsLog::ONGOING_LOG, client) {}
142 
143   TestMetricsLog(const TestMetricsLog&) = delete;
144   TestMetricsLog& operator=(const TestMetricsLog&) = delete;
145 
146   ~TestMetricsLog() override = default;
147 };
148 
149 const char kOnDidCreateMetricsLogHistogramName[] = "Test.OnDidCreateMetricsLog";
150 
151 class TestMetricsProviderForOnDidCreateMetricsLog : public TestMetricsProvider {
152  public:
153   TestMetricsProviderForOnDidCreateMetricsLog() = default;
154   ~TestMetricsProviderForOnDidCreateMetricsLog() override = default;
155 
OnDidCreateMetricsLog()156   void OnDidCreateMetricsLog() override {
157     base::UmaHistogramBoolean(kOnDidCreateMetricsLogHistogramName, true);
158   }
159 };
160 
161 const char kProvideHistogramsHistogramName[] = "Test.ProvideHistograms";
162 
163 class TestMetricsProviderForProvideHistograms : public TestMetricsProvider {
164  public:
165   TestMetricsProviderForProvideHistograms() = default;
166   ~TestMetricsProviderForProvideHistograms() override = default;
167 
ProvideHistograms()168   bool ProvideHistograms() override {
169     base::UmaHistogramBoolean(kProvideHistogramsHistogramName, true);
170     return true;
171   }
172 
ProvideCurrentSessionData(ChromeUserMetricsExtension * uma_proto)173   void ProvideCurrentSessionData(
174       ChromeUserMetricsExtension* uma_proto) override {
175     MetricsProvider::ProvideCurrentSessionData(uma_proto);
176   }
177 };
178 
179 class TestMetricsProviderForProvideHistogramsEarlyReturn
180     : public TestMetricsProviderForProvideHistograms {
181  public:
182   TestMetricsProviderForProvideHistogramsEarlyReturn() = default;
183   ~TestMetricsProviderForProvideHistogramsEarlyReturn() override = default;
184 
OnDidCreateMetricsLog()185   void OnDidCreateMetricsLog() override {}
186 };
187 
188 class TestIndependentMetricsProvider : public MetricsProvider {
189  public:
190   TestIndependentMetricsProvider() = default;
191   ~TestIndependentMetricsProvider() override = default;
192 
193   // MetricsProvider:
HasIndependentMetrics()194   bool HasIndependentMetrics() override {
195     // Only return true the first time this is called (i.e., we only have one
196     // independent log to provide).
197     if (!has_independent_metrics_called_) {
198       has_independent_metrics_called_ = true;
199       return true;
200     }
201     return false;
202   }
ProvideIndependentMetrics(base::OnceClosure serialize_log_callback,base::OnceCallback<void (bool)> done_callback,ChromeUserMetricsExtension * uma_proto,base::HistogramSnapshotManager * snapshot_manager)203   void ProvideIndependentMetrics(
204       base::OnceClosure serialize_log_callback,
205       base::OnceCallback<void(bool)> done_callback,
206       ChromeUserMetricsExtension* uma_proto,
207       base::HistogramSnapshotManager* snapshot_manager) override {
208     provide_independent_metrics_called_ = true;
209     uma_proto->set_client_id(123);
210     std::move(done_callback).Run(true);
211   }
212 
has_independent_metrics_called() const213   bool has_independent_metrics_called() const {
214     return has_independent_metrics_called_;
215   }
216 
provide_independent_metrics_called() const217   bool provide_independent_metrics_called() const {
218     return provide_independent_metrics_called_;
219   }
220 
221  private:
222   bool has_independent_metrics_called_ = false;
223   bool provide_independent_metrics_called_ = false;
224 };
225 
226 class MetricsServiceTest : public testing::Test {
227  public:
MetricsServiceTest()228   MetricsServiceTest()
229       : enabled_state_provider_(new TestEnabledStateProvider(false, false)) {
230     base::SetRecordActionTaskRunner(
231         task_environment_.GetMainThreadTaskRunner());
232     MetricsService::RegisterPrefs(testing_local_state_.registry());
233   }
234 
235   MetricsServiceTest(const MetricsServiceTest&) = delete;
236   MetricsServiceTest& operator=(const MetricsServiceTest&) = delete;
237 
238   ~MetricsServiceTest() override = default;
239 
SetUp()240   void SetUp() override { ASSERT_TRUE(temp_dir_.CreateUniqueTempDir()); }
241 
GetMetricsStateManager(const base::FilePath & user_data_dir=base::FilePath (),StartupVisibility startup_visibility=StartupVisibility::kUnknown)242   MetricsStateManager* GetMetricsStateManager(
243       const base::FilePath& user_data_dir = base::FilePath(),
244       StartupVisibility startup_visibility = StartupVisibility::kUnknown) {
245     // Lazy-initialize the metrics_state_manager so that it correctly reads the
246     // stability state from prefs after tests have a chance to initialize it.
247     if (!metrics_state_manager_) {
248       metrics_state_manager_ = MetricsStateManager::Create(
249           GetLocalState(), enabled_state_provider_.get(), std::wstring(),
250           user_data_dir, startup_visibility);
251       metrics_state_manager_->InstantiateFieldTrialList();
252     }
253     return metrics_state_manager_.get();
254   }
255 
InitializeTestLogStoreAndGet()256   std::unique_ptr<TestUnsentLogStore> InitializeTestLogStoreAndGet() {
257     TestUnsentLogStore::RegisterPrefs(testing_local_state_.registry());
258     return std::make_unique<TestUnsentLogStore>(GetLocalState());
259   }
260 
GetLocalState()261   PrefService* GetLocalState() { return &testing_local_state_; }
262 
263   // Sets metrics reporting as enabled for testing.
EnableMetricsReporting()264   void EnableMetricsReporting() { SetMetricsReporting(true); }
265 
266   // Sets metrics reporting for testing.
SetMetricsReporting(bool enabled)267   void SetMetricsReporting(bool enabled) {
268     enabled_state_provider_->set_consent(enabled);
269     enabled_state_provider_->set_enabled(enabled);
270   }
271 
272   // Finds a histogram with the specified |name_hash| in |histograms|.
FindHistogram(const base::StatisticsRecorder::Histograms & histograms,uint64_t name_hash)273   const base::HistogramBase* FindHistogram(
274       const base::StatisticsRecorder::Histograms& histograms,
275       uint64_t name_hash) {
276     for (const base::HistogramBase* histogram : histograms) {
277       if (name_hash == base::HashMetricName(histogram->histogram_name())) {
278         return histogram;
279       }
280     }
281     return nullptr;
282   }
283 
284   // Checks whether |uma_log| contains any histograms that are not flagged
285   // with kUmaStabilityHistogramFlag. Stability logs should only contain such
286   // histograms.
CheckForNonStabilityHistograms(const ChromeUserMetricsExtension & uma_log)287   void CheckForNonStabilityHistograms(
288       const ChromeUserMetricsExtension& uma_log) {
289     const int kStabilityFlags = base::HistogramBase::kUmaStabilityHistogramFlag;
290     const base::StatisticsRecorder::Histograms histograms =
291         base::StatisticsRecorder::GetHistograms();
292     for (int i = 0; i < uma_log.histogram_event_size(); ++i) {
293       const uint64_t hash = uma_log.histogram_event(i).name_hash();
294 
295       const base::HistogramBase* histogram = FindHistogram(histograms, hash);
296       EXPECT_TRUE(histogram) << hash;
297 
298       EXPECT_TRUE(histogram->HasFlags(kStabilityFlags)) << hash;
299     }
300   }
301 
302   // Returns the number of samples logged to the specified histogram or 0 if
303   // the histogram was not found.
GetHistogramSampleCount(const ChromeUserMetricsExtension & uma_log,std::string_view histogram_name)304   int GetHistogramSampleCount(const ChromeUserMetricsExtension& uma_log,
305                               std::string_view histogram_name) {
306     const auto histogram_name_hash = base::HashMetricName(histogram_name);
307     int samples = 0;
308     for (int i = 0; i < uma_log.histogram_event_size(); ++i) {
309       const auto& histogram = uma_log.histogram_event(i);
310       if (histogram.name_hash() == histogram_name_hash) {
311         for (int j = 0; j < histogram.bucket_size(); ++j) {
312           const auto& bucket = histogram.bucket(j);
313           // Per proto comments, count field not being set means 1 sample.
314           samples += (!bucket.has_count() ? 1 : bucket.count());
315         }
316       }
317     }
318     return samples;
319   }
320 
321   // Returns the sampled count of the |kOnDidCreateMetricsLogHistogramName|
322   // histogram in the currently staged log in |test_log_store|.
GetSampleCountOfOnDidCreateLogHistogram(MetricsLogStore * test_log_store)323   int GetSampleCountOfOnDidCreateLogHistogram(MetricsLogStore* test_log_store) {
324     ChromeUserMetricsExtension log;
325     EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &log));
326     return GetHistogramSampleCount(log, kOnDidCreateMetricsLogHistogramName);
327   }
328 
GetNumberOfUserActions(MetricsLogStore * test_log_store)329   int GetNumberOfUserActions(MetricsLogStore* test_log_store) {
330     ChromeUserMetricsExtension log;
331     EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &log));
332     return log.user_action_event_size();
333   }
334 
user_data_dir_path()335   const base::FilePath user_data_dir_path() { return temp_dir_.GetPath(); }
336 
337  protected:
338   base::test::TaskEnvironment task_environment_{
339       base::test::TaskEnvironment::TimeSource::MOCK_TIME};
340   base::test::ScopedFeatureList feature_list_;
341 
342  private:
343   std::unique_ptr<TestEnabledStateProvider> enabled_state_provider_;
344   TestingPrefServiceSimple testing_local_state_;
345   std::unique_ptr<MetricsStateManager> metrics_state_manager_;
346   base::ScopedTempDir temp_dir_;
347 };
348 
349 class MetricsServiceTestWithFeatures
350     : public MetricsServiceTest,
351       public ::testing::WithParamInterface<std::tuple<bool>> {
352  public:
353   MetricsServiceTestWithFeatures() = default;
354   ~MetricsServiceTestWithFeatures() override = default;
355 
ShouldSnapshotInBg()356   bool ShouldSnapshotInBg() { return std::get<0>(GetParam()); }
357 
SetUp()358   void SetUp() override {
359     MetricsServiceTest::SetUp();
360     std::vector<base::test::FeatureRefAndParams> enabled_features;
361     std::vector<base::test::FeatureRef> disabled_features;
362 
363     if (ShouldSnapshotInBg()) {
364       enabled_features.emplace_back(features::kMetricsServiceDeltaSnapshotInBg,
365                                     base::FieldTrialParams());
366     } else {
367       disabled_features.emplace_back(
368           features::kMetricsServiceDeltaSnapshotInBg);
369     }
370 
371     feature_list_.InitWithFeaturesAndParameters(enabled_features,
372                                                 disabled_features);
373   }
374 
375  private:
376   base::test::ScopedFeatureList feature_list_;
377 };
378 
379 struct StartupVisibilityTestParams {
380   metrics::StartupVisibility startup_visibility;
381   bool expected_beacon_value;
382 };
383 
384 class MetricsServiceTestWithStartupVisibility
385     : public MetricsServiceTest,
386       public ::testing::WithParamInterface<
387           std::tuple<StartupVisibilityTestParams, bool>> {
388  public:
389   MetricsServiceTestWithStartupVisibility() = default;
390   ~MetricsServiceTestWithStartupVisibility() override = default;
391 
ShouldSnapshotInBg()392   bool ShouldSnapshotInBg() { return std::get<1>(GetParam()); }
393 
SetUp()394   void SetUp() override {
395     MetricsServiceTest::SetUp();
396     std::vector<base::test::FeatureRefAndParams> enabled_features;
397     std::vector<base::test::FeatureRef> disabled_features;
398 
399     if (ShouldSnapshotInBg()) {
400       enabled_features.emplace_back(features::kMetricsServiceDeltaSnapshotInBg,
401                                     base::FieldTrialParams());
402     } else {
403       disabled_features.emplace_back(
404           features::kMetricsServiceDeltaSnapshotInBg);
405     }
406 
407     feature_list_.InitWithFeaturesAndParameters(enabled_features,
408                                                 disabled_features);
409   }
410 
411  private:
412   base::test::ScopedFeatureList feature_list_;
413 };
414 
415 class ExperimentTestMetricsProvider : public TestMetricsProvider {
416  public:
ExperimentTestMetricsProvider(base::FieldTrial * profile_metrics_trial,base::FieldTrial * session_data_trial)417   explicit ExperimentTestMetricsProvider(
418       base::FieldTrial* profile_metrics_trial,
419       base::FieldTrial* session_data_trial)
420       : profile_metrics_trial_(profile_metrics_trial),
421         session_data_trial_(session_data_trial) {}
422 
423   ~ExperimentTestMetricsProvider() override = default;
424 
ProvideSystemProfileMetrics(SystemProfileProto * system_profile_proto)425   void ProvideSystemProfileMetrics(
426       SystemProfileProto* system_profile_proto) override {
427     TestMetricsProvider::ProvideSystemProfileMetrics(system_profile_proto);
428     profile_metrics_trial_->Activate();
429   }
430 
ProvideCurrentSessionData(ChromeUserMetricsExtension * uma_proto)431   void ProvideCurrentSessionData(
432       ChromeUserMetricsExtension* uma_proto) override {
433     TestMetricsProvider::ProvideCurrentSessionData(uma_proto);
434     session_data_trial_->Activate();
435   }
436 
437  private:
438   raw_ptr<base::FieldTrial> profile_metrics_trial_;
439   raw_ptr<base::FieldTrial> session_data_trial_;
440 };
441 
HistogramExists(std::string_view name)442 bool HistogramExists(std::string_view name) {
443   return base::StatisticsRecorder::FindHistogram(name) != nullptr;
444 }
445 
GetHistogramDeltaTotalCount(std::string_view name)446 base::HistogramBase::Count GetHistogramDeltaTotalCount(std::string_view name) {
447   return base::StatisticsRecorder::FindHistogram(name)
448       ->SnapshotDelta()
449       ->TotalCount();
450 }
451 
452 }  // namespace
453 
454 INSTANTIATE_TEST_SUITE_P(All,
455                          MetricsServiceTestWithFeatures,
456                          ::testing::Combine(::testing::Bool()));
457 
TEST_P(MetricsServiceTestWithFeatures,RecordId)458 TEST_P(MetricsServiceTestWithFeatures, RecordId) {
459   EnableMetricsReporting();
460   GetMetricsStateManager(user_data_dir_path())->ForceClientIdCreation();
461 
462   // Set an initial value for the record-ids, to make them predictable.
463   GetLocalState()->SetInteger(prefs::kMetricsLogRecordId, 1000);
464 
465   TestMetricsServiceClient client;
466   TestMetricsService service(GetMetricsStateManager(user_data_dir_path()),
467                              &client, GetLocalState());
468 
469   auto log1 = service.CreateLogForTesting(MetricsLog::ONGOING_LOG);
470   auto log2 = service.CreateLogForTesting(MetricsLog::INITIAL_STABILITY_LOG);
471   auto log3 = service.CreateLogForTesting(MetricsLog::INDEPENDENT_LOG);
472 
473   EXPECT_EQ(1001, log1->uma_proto()->record_id());
474   EXPECT_EQ(1002, log2->uma_proto()->record_id());
475   EXPECT_EQ(1003, log3->uma_proto()->record_id());
476 }
477 
TEST_P(MetricsServiceTestWithFeatures,InitialStabilityLogAfterCleanShutDown)478 TEST_P(MetricsServiceTestWithFeatures, InitialStabilityLogAfterCleanShutDown) {
479   base::HistogramTester histogram_tester;
480   EnableMetricsReporting();
481   // Write a beacon file indicating that Chrome exited cleanly. Note that the
482   // crash streak value is arbitrary.
483   const base::FilePath beacon_file_path =
484       user_data_dir_path().Append(kCleanExitBeaconFilename);
485   ASSERT_TRUE(base::WriteFile(
486       beacon_file_path, CleanExitBeacon::CreateBeaconFileContentsForTesting(
487                             /*exited_cleanly=*/true, /*crash_streak=*/1)));
488 
489   TestMetricsServiceClient client;
490   TestMetricsService service(GetMetricsStateManager(user_data_dir_path()),
491                              &client, GetLocalState());
492 
493   TestMetricsProvider* test_provider = new TestMetricsProvider();
494   service.RegisterMetricsProvider(
495       std::unique_ptr<MetricsProvider>(test_provider));
496 
497   service.InitializeMetricsRecordingState();
498 
499   // No initial stability log should be generated.
500   EXPECT_FALSE(service.has_unsent_logs());
501 
502   // Ensure that HasPreviousSessionData() is always called on providers,
503   // for consistency, even if other conditions already indicate their presence.
504   EXPECT_TRUE(test_provider->has_initial_stability_metrics_called());
505 
506   // The test provider should not have been called upon to provide initial
507   // stability nor regular stability metrics.
508   EXPECT_FALSE(test_provider->provide_initial_stability_metrics_called());
509   EXPECT_FALSE(test_provider->provide_stability_metrics_called());
510 
511   // As there wasn't an unclean shutdown, no browser crash samples should have
512   // been emitted.
513   histogram_tester.ExpectBucketCount("Stability.Counts2",
514                                      StabilityEventType::kBrowserCrash, 0);
515 }
516 
TEST_P(MetricsServiceTestWithFeatures,InitialStabilityLogAtProviderRequest)517 TEST_P(MetricsServiceTestWithFeatures, InitialStabilityLogAtProviderRequest) {
518   base::HistogramTester histogram_tester;
519   EnableMetricsReporting();
520 
521   // Save an existing system profile to prefs, to correspond to what would be
522   // saved from a previous session.
523   TestMetricsServiceClient client;
524   TestMetricsLog log("0a94430b-18e5-43c8-a657-580f7e855ce1", 1, &client);
525   // Manually override the log's session hash to something else to verify that
526   // stability logs created later on using this environment will contain that
527   // session hash.
528   uint64_t modified_session_hash =
529       log.uma_proto()->system_profile().session_hash() + 1;
530   log.uma_proto()->mutable_system_profile()->set_session_hash(
531       modified_session_hash);
532   DelegatingProvider delegating_provider;
533   TestMetricsService::RecordCurrentEnvironmentHelper(&log, GetLocalState(),
534                                                      &delegating_provider);
535 
536   // Record stability build time and version from previous session, so that
537   // stability metrics (including exited cleanly flag) won't be cleared.
538   EnvironmentRecorder(GetLocalState())
539       .SetBuildtimeAndVersion(MetricsLog::GetBuildTime(),
540                               client.GetVersionString());
541 
542   // Write a beacon file indicating that Chrome exited cleanly. Note that the
543   // crash streak value is arbitrary.
544   const base::FilePath beacon_file_path =
545       user_data_dir_path().Append(kCleanExitBeaconFilename);
546   ASSERT_TRUE(base::WriteFile(
547       beacon_file_path, CleanExitBeacon::CreateBeaconFileContentsForTesting(
548                             /*exited_cleanly=*/true, /*crash_streak=*/1)));
549 
550   TestMetricsService service(GetMetricsStateManager(user_data_dir_path()),
551                              &client, GetLocalState());
552   // Add a metrics provider that requests a stability log.
553   TestMetricsProvider* test_provider = new TestMetricsProvider();
554   test_provider->set_has_initial_stability_metrics(true);
555   service.RegisterMetricsProvider(
556       std::unique_ptr<MetricsProvider>(test_provider));
557 
558   service.InitializeMetricsRecordingState();
559 
560   // The initial stability log should be generated and persisted in unsent logs.
561   MetricsLogStore* test_log_store = service.LogStoreForTest();
562   EXPECT_TRUE(test_log_store->has_unsent_logs());
563   EXPECT_FALSE(test_log_store->has_staged_log());
564 
565   // Ensure that HasPreviousSessionData() is always called on providers,
566   // for consistency, even if other conditions already indicate their presence.
567   EXPECT_TRUE(test_provider->has_initial_stability_metrics_called());
568 
569   // The test provider should have been called upon to provide initial
570   // stability and regular stability metrics.
571   EXPECT_TRUE(test_provider->provide_initial_stability_metrics_called());
572   EXPECT_TRUE(test_provider->provide_stability_metrics_called());
573 
574   // Stage the log and retrieve it.
575   test_log_store->StageNextLog();
576   EXPECT_TRUE(test_log_store->has_staged_log());
577 
578   ChromeUserMetricsExtension uma_log;
579   EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &uma_log));
580 
581   EXPECT_TRUE(uma_log.has_client_id());
582   EXPECT_TRUE(uma_log.has_session_id());
583   EXPECT_TRUE(uma_log.has_system_profile());
584   EXPECT_TRUE(uma_log.system_profile().has_session_hash());
585   EXPECT_EQ(modified_session_hash, uma_log.system_profile().session_hash());
586   EXPECT_EQ(0, uma_log.user_action_event_size());
587   EXPECT_EQ(0, uma_log.omnibox_event_size());
588   CheckForNonStabilityHistograms(uma_log);
589   EXPECT_EQ(
590       1, GetHistogramSampleCount(uma_log, "UMA.InitialStabilityRecordBeacon"));
591 
592   // As there wasn't an unclean shutdown, no browser crash samples should have
593   // been emitted.
594   histogram_tester.ExpectBucketCount("Stability.Counts2",
595                                      StabilityEventType::kBrowserCrash, 0);
596 }
597 
TEST_P(MetricsServiceTestWithFeatures,IndependentLogAtProviderRequest)598 TEST_P(MetricsServiceTestWithFeatures, IndependentLogAtProviderRequest) {
599   EnableMetricsReporting();
600   TestMetricsServiceClient client;
601   TestMetricsService service(GetMetricsStateManager(), &client,
602                              GetLocalState());
603 
604   // Create a a provider that will have one independent log to provide.
605   auto* test_provider = new TestIndependentMetricsProvider();
606   service.RegisterMetricsProvider(
607       std::unique_ptr<MetricsProvider>(test_provider));
608 
609   service.InitializeMetricsRecordingState();
610   // Start() will create the first ongoing log.
611   service.Start();
612   ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
613 
614   // Verify that the independent log provider has not yet been called, and emit
615   // a histogram. This histogram should not be put into the independent log.
616   EXPECT_FALSE(test_provider->has_independent_metrics_called());
617   EXPECT_FALSE(test_provider->provide_independent_metrics_called());
618   const std::string test_histogram = "Test.Histogram";
619   base::UmaHistogramBoolean(test_histogram, true);
620 
621   // Fast forward the time by |initialization_delay|, which is when the pending
622   // init tasks will run.
623   base::TimeDelta initialization_delay = service.GetInitializationDelay();
624   task_environment_.FastForwardBy(initialization_delay);
625   EXPECT_EQ(TestMetricsService::INIT_TASK_DONE, service.state());
626 
627   // Fast forward the time by another |initialization_delay|, which is when
628   // metrics providers are called to provide independent logs.
629   task_environment_.FastForwardBy(initialization_delay);
630   EXPECT_TRUE(test_provider->has_independent_metrics_called());
631   EXPECT_TRUE(test_provider->provide_independent_metrics_called());
632 
633   // Fast forward the time until the MetricsRotationScheduler first runs, which
634   // should complete the first ongoing log.
635   // Note: The first log is only created after N = GetInitialIntervalSeconds()
636   // seconds since the start, and since we already fast forwarded by
637   // |initialization_delay| twice, we only need to fast forward by
638   // N - 2 * |initialization_delay|.
639   task_environment_.FastForwardBy(
640       base::Seconds(MetricsScheduler::GetInitialIntervalSeconds()) -
641       2 * initialization_delay);
642   EXPECT_EQ(TestMetricsService::SENDING_LOGS, service.state());
643 
644   MetricsLogStore* test_log_store = service.LogStoreForTest();
645 
646   // The currently staged log should be the independent log created by the
647   // independent log provider. The log should have a client id of 123. It should
648   // also not contain |test_histogram|.
649   ASSERT_TRUE(test_log_store->has_staged_log());
650   ChromeUserMetricsExtension uma_log;
651   EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &uma_log));
652   EXPECT_EQ(uma_log.client_id(), 123UL);
653   EXPECT_EQ(GetHistogramSampleCount(uma_log, test_histogram), 0);
654 
655   // Discard the staged log and stage the next one. It should be the first
656   // ongoing log.
657   test_log_store->DiscardStagedLog();
658   ASSERT_TRUE(test_log_store->has_unsent_logs());
659   test_log_store->StageNextLog();
660   ASSERT_TRUE(test_log_store->has_staged_log());
661 
662   // Verify that the first ongoing log contains |test_histogram| (it should not
663   // have been put into the independent log).
664   EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &uma_log));
665   EXPECT_EQ(GetHistogramSampleCount(uma_log, test_histogram), 1);
666 }
667 
TEST_P(MetricsServiceTestWithFeatures,OnDidCreateMetricsLogAtShutdown)668 TEST_P(MetricsServiceTestWithFeatures, OnDidCreateMetricsLogAtShutdown) {
669   base::HistogramTester histogram_tester;
670   EnableMetricsReporting();
671   TestMetricsServiceClient client;
672 
673   TestMetricsService service(GetMetricsStateManager(), &client,
674                              GetLocalState());
675 
676   // Create a provider that will log to |kOnDidCreateMetricsLogHistogramName|
677   // in OnDidCreateMetricsLog().
678   auto* test_provider = new TestMetricsProviderForOnDidCreateMetricsLog();
679   service.RegisterMetricsProvider(
680       std::unique_ptr<MetricsProvider>(test_provider));
681 
682   service.InitializeMetricsRecordingState();
683   // Start() will create the first ongoing log.
684   service.Start();
685 
686   // OnDidCreateMetricsLog() is called once when the first ongoing log is
687   // created.
688   histogram_tester.ExpectBucketCount(kOnDidCreateMetricsLogHistogramName, true,
689                                      1);
690   service.Stop();
691 
692   // OnDidCreateMetricsLog() will be called during shutdown to emit histograms.
693   histogram_tester.ExpectBucketCount(kOnDidCreateMetricsLogHistogramName, true,
694                                      2);
695 
696   // Clean up histograms.
697   base::StatisticsRecorder::ForgetHistogramForTesting(
698       kOnDidCreateMetricsLogHistogramName);
699 }
700 
TEST_P(MetricsServiceTestWithFeatures,ProvideHistograms)701 TEST_P(MetricsServiceTestWithFeatures, ProvideHistograms) {
702   base::HistogramTester histogram_tester;
703   EnableMetricsReporting();
704   TestMetricsServiceClient client;
705 
706   TestMetricsService service(GetMetricsStateManager(), &client,
707                              GetLocalState());
708 
709   // Create a provider that will log to |kProvideHistogramsHistogramName|
710   // in ProvideHistograms().
711   auto* test_provider = new TestMetricsProviderForProvideHistograms();
712   service.RegisterMetricsProvider(
713       std::unique_ptr<MetricsProvider>(test_provider));
714 
715   service.InitializeMetricsRecordingState();
716   // Start() will create the first ongoing log.
717   service.Start();
718 
719   // ProvideHistograms() is called in OnDidCreateMetricsLog().
720   histogram_tester.ExpectBucketCount(kProvideHistogramsHistogramName, true, 1);
721 
722   service.StageCurrentLogForTest();
723 
724   histogram_tester.ExpectBucketCount(kProvideHistogramsHistogramName, true, 2);
725 
726   service.Stop();
727 
728   // Clean up histograms.
729   base::StatisticsRecorder::ForgetHistogramForTesting(
730       kProvideHistogramsHistogramName);
731 }
732 
TEST_P(MetricsServiceTestWithFeatures,ProvideHistogramsEarlyReturn)733 TEST_P(MetricsServiceTestWithFeatures, ProvideHistogramsEarlyReturn) {
734   base::HistogramTester histogram_tester;
735   EnableMetricsReporting();
736   TestMetricsServiceClient client;
737 
738   TestMetricsService service(GetMetricsStateManager(), &client,
739                              GetLocalState());
740 
741   // Create a provider that will log to |kOnDidCreateMetricsLogHistogramName|
742   // in OnDidCreateMetricsLog().
743   auto* test_provider =
744       new TestMetricsProviderForProvideHistogramsEarlyReturn();
745   service.RegisterMetricsProvider(
746       std::unique_ptr<MetricsProvider>(test_provider));
747 
748   service.InitializeMetricsRecordingState();
749   // Start() will create the first ongoing log.
750   service.Start();
751 
752   // Make sure no histogram is emitted when having an early return.
753   histogram_tester.ExpectBucketCount(kProvideHistogramsHistogramName, true, 0);
754 
755   service.StageCurrentLogForTest();
756   // ProvideHistograms() should be called in ProvideCurrentSessionData() if
757   // histograms haven't been emitted.
758   histogram_tester.ExpectBucketCount(kProvideHistogramsHistogramName, true, 1);
759 
760   // Try another log to make sure emission status is reset between logs.
761   service.LogStoreForTest()->DiscardStagedLog();
762   service.StageCurrentLogForTest();
763   histogram_tester.ExpectBucketCount(kProvideHistogramsHistogramName, true, 2);
764 
765   service.Stop();
766 
767   // Clean up histograms.
768   base::StatisticsRecorder::ForgetHistogramForTesting(
769       kProvideHistogramsHistogramName);
770 }
771 
772 INSTANTIATE_TEST_SUITE_P(
773     All,
774     MetricsServiceTestWithStartupVisibility,
775     ::testing::Combine(
776         ::testing::Values(
777             StartupVisibilityTestParams{
778                 .startup_visibility = StartupVisibility::kUnknown,
779                 .expected_beacon_value = true},
780             StartupVisibilityTestParams{
781                 .startup_visibility = StartupVisibility::kBackground,
782                 .expected_beacon_value = true},
783             StartupVisibilityTestParams{
784                 .startup_visibility = StartupVisibility::kForeground,
785                 .expected_beacon_value = false}),
786         ::testing::Bool()));
787 
TEST_P(MetricsServiceTestWithStartupVisibility,InitialStabilityLogAfterCrash)788 TEST_P(MetricsServiceTestWithStartupVisibility, InitialStabilityLogAfterCrash) {
789   base::HistogramTester histogram_tester;
790   PrefService* local_state = GetLocalState();
791   EnableMetricsReporting();
792 
793   // Write a beacon file indicating that Chrome exited uncleanly. Note that the
794   // crash streak value is arbitrary.
795   const base::FilePath beacon_file_path =
796       user_data_dir_path().Append(kCleanExitBeaconFilename);
797   ASSERT_TRUE(base::WriteFile(
798       beacon_file_path, CleanExitBeacon::CreateBeaconFileContentsForTesting(
799                             /*exited_cleanly=*/false, /*crash_streak=*/1)));
800 
801   // Set up prefs to simulate restarting after a crash.
802 
803   // Save an existing system profile to prefs, to correspond to what would be
804   // saved from a previous session.
805   TestMetricsServiceClient client;
806   const std::string kCrashedVersion = "4.0.321.0-64-devel";
807   client.set_version_string(kCrashedVersion);
808   TestMetricsLog log("0a94430b-18e5-43c8-a657-580f7e855ce1", 1, &client);
809   DelegatingProvider delegating_provider;
810   TestMetricsService::RecordCurrentEnvironmentHelper(&log, local_state,
811                                                      &delegating_provider);
812 
813   // Record stability build time and version from previous session, so that
814   // stability metrics (including exited cleanly flag) won't be cleared.
815   EnvironmentRecorder(local_state)
816       .SetBuildtimeAndVersion(MetricsLog::GetBuildTime(),
817                               client.GetVersionString());
818 
819   const std::string kCurrentVersion = "5.0.322.0-64-devel";
820   client.set_version_string(kCurrentVersion);
821 
822   StartupVisibilityTestParams params = std::get<0>(GetParam());
823   TestMetricsService service(
824       GetMetricsStateManager(user_data_dir_path(), params.startup_visibility),
825       &client, local_state);
826   // Add a provider.
827   TestMetricsProvider* test_provider = new TestMetricsProvider();
828   service.RegisterMetricsProvider(
829       std::unique_ptr<MetricsProvider>(test_provider));
830   service.InitializeMetricsRecordingState();
831 
832   // Verify that Chrome is (or is not) watching for crashes by checking the
833   // beacon value.
834   std::string beacon_file_contents;
835   ASSERT_TRUE(base::ReadFileToString(beacon_file_path, &beacon_file_contents));
836   std::string partial_expected_contents;
837 #if BUILDFLAG(IS_ANDROID)
838   // Whether Chrome is watching for crashes after
839   // InitializeMetricsRecordingState() depends on the type of Android Chrome
840   // session. See the comments in MetricsService::InitializeMetricsState() for
841   // more details.
842   const std::string beacon_value =
843       params.expected_beacon_value ? "true" : "false";
844   partial_expected_contents = "exited_cleanly\":" + beacon_value;
845 #else
846   partial_expected_contents = "exited_cleanly\":false";
847 #endif  // BUILDFLAG(IS_ANDROID)
848   EXPECT_TRUE(base::Contains(beacon_file_contents, partial_expected_contents));
849 
850   // The initial stability log should be generated and persisted in unsent logs.
851   MetricsLogStore* test_log_store = service.LogStoreForTest();
852   EXPECT_TRUE(test_log_store->has_unsent_logs());
853   EXPECT_FALSE(test_log_store->has_staged_log());
854 
855   // Ensure that HasPreviousSessionData() is always called on providers,
856   // for consistency, even if other conditions already indicate their presence.
857   EXPECT_TRUE(test_provider->has_initial_stability_metrics_called());
858 
859   // The test provider should have been called upon to provide initial
860   // stability and regular stability metrics.
861   EXPECT_TRUE(test_provider->provide_initial_stability_metrics_called());
862   EXPECT_TRUE(test_provider->provide_stability_metrics_called());
863 
864   // The test provider should have been called when the initial stability log
865   // was closed.
866   EXPECT_TRUE(test_provider->record_initial_histogram_snapshots_called());
867 
868   // Stage the log and retrieve it.
869   test_log_store->StageNextLog();
870   EXPECT_TRUE(test_log_store->has_staged_log());
871 
872   ChromeUserMetricsExtension uma_log;
873   EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &uma_log));
874 
875   EXPECT_TRUE(uma_log.has_client_id());
876   EXPECT_TRUE(uma_log.has_session_id());
877   EXPECT_TRUE(uma_log.has_system_profile());
878   EXPECT_EQ(0, uma_log.user_action_event_size());
879   EXPECT_EQ(0, uma_log.omnibox_event_size());
880   CheckForNonStabilityHistograms(uma_log);
881   EXPECT_EQ(
882       1, GetHistogramSampleCount(uma_log, "UMA.InitialStabilityRecordBeacon"));
883 
884   // Verify that the histograms emitted by the test provider made it into the
885   // log.
886   EXPECT_EQ(GetHistogramSampleCount(uma_log, "TestMetricsProvider.Initial"), 1);
887   EXPECT_EQ(GetHistogramSampleCount(uma_log, "TestMetricsProvider.Regular"), 1);
888 
889   EXPECT_EQ(kCrashedVersion, uma_log.system_profile().app_version());
890   EXPECT_EQ(kCurrentVersion,
891             uma_log.system_profile().log_written_by_app_version());
892 
893   histogram_tester.ExpectBucketCount("Stability.Counts2",
894                                      StabilityEventType::kBrowserCrash, 1);
895 }
896 
TEST_P(MetricsServiceTestWithFeatures,InitialLogsHaveOnDidCreateMetricsLogHistograms)897 TEST_P(MetricsServiceTestWithFeatures,
898        InitialLogsHaveOnDidCreateMetricsLogHistograms) {
899   EnableMetricsReporting();
900   TestMetricsServiceClient client;
901   TestMetricsService service(GetMetricsStateManager(), &client,
902                              GetLocalState());
903 
904   // Create a provider that will log to |kOnDidCreateMetricsLogHistogramName|
905   // in OnDidCreateMetricsLog()
906   auto* test_provider = new TestMetricsProviderForOnDidCreateMetricsLog();
907   service.RegisterMetricsProvider(
908       std::unique_ptr<MetricsProvider>(test_provider));
909 
910   service.InitializeMetricsRecordingState();
911   // Start() will create the first ongoing log.
912   service.Start();
913   ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
914 
915   // Fast forward the time by |initialization_delay|, which is when the pending
916   // init tasks will run.
917   base::TimeDelta initialization_delay = service.GetInitializationDelay();
918   task_environment_.FastForwardBy(initialization_delay);
919   EXPECT_EQ(TestMetricsService::INIT_TASK_DONE, service.state());
920 
921   // Fast forward the time until the MetricsRotationScheduler first runs, which
922   // should complete the first ongoing log. Also verify that the test provider
923   // was called when closing the log.
924   // Note: The first log is only created after N = GetInitialIntervalSeconds()
925   // seconds since the start, and since we already fast forwarded by
926   // |initialization_delay| once, we only need to fast forward by
927   // N - |initialization_delay|.
928   task_environment_.FastForwardBy(
929       base::Seconds(MetricsScheduler::GetInitialIntervalSeconds()) -
930       initialization_delay);
931   ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
932   EXPECT_TRUE(test_provider->record_histogram_snapshots_called());
933 
934   MetricsLogStore* test_log_store = service.LogStoreForTest();
935 
936   // Stage the next log, which should be the first ongoing log.
937   // Check that it has one sample in |kOnDidCreateMetricsLogHistogramName|.
938   test_log_store->StageNextLog();
939   EXPECT_EQ(1, GetSampleCountOfOnDidCreateLogHistogram(test_log_store));
940 
941   // Discard the staged log and close and stage the next log, which is the
942   // second "ongoing log".
943   // Check that it has one sample in |kOnDidCreateMetricsLogHistogramName|.
944   // Also verify that the test provider was called when closing the new log.
945   test_provider->set_record_histogram_snapshots_called(false);
946   test_log_store->DiscardStagedLog();
947   service.StageCurrentLogForTest();
948   EXPECT_EQ(1, GetSampleCountOfOnDidCreateLogHistogram(test_log_store));
949   EXPECT_TRUE(test_provider->record_histogram_snapshots_called());
950 
951   // Check one more log for good measure.
952   test_provider->set_record_histogram_snapshots_called(false);
953   test_log_store->DiscardStagedLog();
954   service.StageCurrentLogForTest();
955   EXPECT_EQ(1, GetSampleCountOfOnDidCreateLogHistogram(test_log_store));
956   EXPECT_TRUE(test_provider->record_histogram_snapshots_called());
957 
958   service.Stop();
959 
960   // Clean up histograms.
961   base::StatisticsRecorder::ForgetHistogramForTesting(
962       kOnDidCreateMetricsLogHistogramName);
963 }
964 
TEST_P(MetricsServiceTestWithFeatures,MarkCurrentHistogramsAsReported)965 TEST_P(MetricsServiceTestWithFeatures, MarkCurrentHistogramsAsReported) {
966   EnableMetricsReporting();
967   TestMetricsServiceClient client;
968   TestMetricsService service(GetMetricsStateManager(), &client,
969                              GetLocalState());
970 
971   // Emit to histogram |Test.Before.Histogram|.
972   ASSERT_FALSE(HistogramExists("Test.Before.Histogram"));
973   base::UmaHistogramBoolean("Test.Before.Histogram", true);
974   ASSERT_TRUE(HistogramExists("Test.Before.Histogram"));
975 
976   // Mark histogram data that has been collected until now (in particular, the
977   // |Test.Before.Histogram| sample) as reported.
978   service.MarkCurrentHistogramsAsReported();
979 
980   // Emit to histogram |Test.After.Histogram|.
981   ASSERT_FALSE(HistogramExists("Test.After.Histogram"));
982   base::UmaHistogramBoolean("Test.After.Histogram", true);
983   ASSERT_TRUE(HistogramExists("Test.After.Histogram"));
984 
985   // Verify that the |Test.Before.Histogram| sample was marked as reported, and
986   // is not included in the next snapshot.
987   EXPECT_EQ(0, GetHistogramDeltaTotalCount("Test.Before.Histogram"));
988   // Verify that the |Test.After.Histogram| sample was not marked as reported,
989   // and is included in the next snapshot.
990   EXPECT_EQ(1, GetHistogramDeltaTotalCount("Test.After.Histogram"));
991 
992   // Clean up histograms.
993   base::StatisticsRecorder::ForgetHistogramForTesting("Test.Before.Histogram");
994   base::StatisticsRecorder::ForgetHistogramForTesting("Test.After.Histogram");
995 }
996 
TEST_P(MetricsServiceTestWithFeatures,LogHasUserActions)997 TEST_P(MetricsServiceTestWithFeatures, LogHasUserActions) {
998   // This test verifies that user actions are properly captured in UMA logs.
999   // In particular, it checks that the first log has actions, a behavior that
1000   // was buggy in the past, plus additional checks for subsequent logs with
1001   // different numbers of actions.
1002   EnableMetricsReporting();
1003   TestMetricsServiceClient client;
1004   TestMetricsService service(GetMetricsStateManager(), &client,
1005                              GetLocalState());
1006 
1007   service.InitializeMetricsRecordingState();
1008 
1009   // Start() will create an initial log.
1010   service.Start();
1011   ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
1012 
1013   base::RecordAction(base::UserMetricsAction("TestAction"));
1014   base::RecordAction(base::UserMetricsAction("TestAction"));
1015   base::RecordAction(base::UserMetricsAction("DifferentAction"));
1016 
1017   // Fast forward the time by |initialization_delay|, which is when the pending
1018   // init tasks will run.
1019   base::TimeDelta initialization_delay = service.GetInitializationDelay();
1020   task_environment_.FastForwardBy(initialization_delay);
1021   EXPECT_EQ(TestMetricsService::INIT_TASK_DONE, service.state());
1022 
1023   // Fast forward the time until the MetricsRotationScheduler first runs, which
1024   // should complete the first ongoing log.
1025   // Note: The first log is only created after N = GetInitialIntervalSeconds()
1026   // seconds since the start, and since we already fast forwarded by
1027   // |initialization_delay| once, we only need to fast forward by
1028   // N - |initialization_delay|.
1029   task_environment_.FastForwardBy(
1030       base::Seconds(MetricsScheduler::GetInitialIntervalSeconds()) -
1031       initialization_delay);
1032   ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
1033 
1034   MetricsLogStore* test_log_store = service.LogStoreForTest();
1035 
1036   // Stage the next log, which should be the initial metrics log.
1037   test_log_store->StageNextLog();
1038   EXPECT_EQ(3, GetNumberOfUserActions(test_log_store));
1039 
1040   // Log another action.
1041   base::RecordAction(base::UserMetricsAction("TestAction"));
1042   test_log_store->DiscardStagedLog();
1043   service.StageCurrentLogForTest();
1044   EXPECT_EQ(1, GetNumberOfUserActions(test_log_store));
1045 
1046   // Check a log with no actions.
1047   test_log_store->DiscardStagedLog();
1048   service.StageCurrentLogForTest();
1049   EXPECT_EQ(0, GetNumberOfUserActions(test_log_store));
1050 
1051   // And another one with a couple.
1052   base::RecordAction(base::UserMetricsAction("TestAction"));
1053   base::RecordAction(base::UserMetricsAction("TestAction"));
1054   test_log_store->DiscardStagedLog();
1055   service.StageCurrentLogForTest();
1056   EXPECT_EQ(2, GetNumberOfUserActions(test_log_store));
1057 }
1058 
TEST_P(MetricsServiceTestWithFeatures,FirstLogCreatedBeforeUnsentLogsSent)1059 TEST_P(MetricsServiceTestWithFeatures, FirstLogCreatedBeforeUnsentLogsSent) {
1060   // This test checks that we will create and serialize the first ongoing log
1061   // before starting to send unsent logs from the past session. The latter is
1062   // simulated by injecting some fake ongoing logs into the MetricsLogStore.
1063   EnableMetricsReporting();
1064   TestMetricsServiceClient client;
1065   TestMetricsService service(GetMetricsStateManager(), &client,
1066                              GetLocalState());
1067 
1068   service.InitializeMetricsRecordingState();
1069   // Start() will create the first ongoing log.
1070   service.Start();
1071   ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
1072 
1073   MetricsLogStore* test_log_store = service.LogStoreForTest();
1074 
1075   // Set up the log store with an existing fake log entry. The string content
1076   // is never deserialized to proto, so we're just passing some dummy content.
1077   ASSERT_EQ(0u, test_log_store->initial_log_count());
1078   ASSERT_EQ(0u, test_log_store->ongoing_log_count());
1079   test_log_store->StoreLog("blah_blah", MetricsLog::ONGOING_LOG, LogMetadata(),
1080                            MetricsLogsEventManager::CreateReason::kUnknown);
1081   // Note: |initial_log_count()| refers to initial stability logs, so the above
1082   // log is counted an ongoing log (per its type).
1083   ASSERT_EQ(0u, test_log_store->initial_log_count());
1084   ASSERT_EQ(1u, test_log_store->ongoing_log_count());
1085 
1086   // Fast forward the time by |initialization_delay|, which is when the pending
1087   // init tasks will run.
1088   base::TimeDelta initialization_delay = service.GetInitializationDelay();
1089   task_environment_.FastForwardBy(initialization_delay);
1090   EXPECT_EQ(TestMetricsService::INIT_TASK_DONE, service.state());
1091 
1092   // Fast forward the time until the MetricsRotationScheduler first runs, which
1093   // should complete the first ongoing log.
1094   // Note: The first log is only created after N = GetInitialIntervalSeconds()
1095   // seconds since the start, and since we already fast forwarded by
1096   // |initialization_delay| once, we only need to fast forward by
1097   // N - |initialization_delay|.
1098   task_environment_.FastForwardBy(
1099       base::Seconds(MetricsScheduler::GetInitialIntervalSeconds()) -
1100       initialization_delay);
1101   ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
1102   // When the init task is complete, the first ongoing log should be created
1103   // and added to the ongoing logs.
1104   EXPECT_EQ(0u, test_log_store->initial_log_count());
1105   EXPECT_EQ(2u, test_log_store->ongoing_log_count());
1106 }
1107 
TEST_P(MetricsServiceTestWithFeatures,MetricsProviderOnRecordingDisabledCalledOnInitialStop)1108 TEST_P(MetricsServiceTestWithFeatures,
1109        MetricsProviderOnRecordingDisabledCalledOnInitialStop) {
1110   TestMetricsServiceClient client;
1111   TestMetricsService service(GetMetricsStateManager(), &client,
1112                              GetLocalState());
1113 
1114   TestMetricsProvider* test_provider = new TestMetricsProvider();
1115   service.RegisterMetricsProvider(
1116       std::unique_ptr<MetricsProvider>(test_provider));
1117 
1118   service.InitializeMetricsRecordingState();
1119   service.Stop();
1120 
1121   EXPECT_TRUE(test_provider->on_recording_disabled_called());
1122 }
1123 
TEST_P(MetricsServiceTestWithFeatures,MetricsProvidersInitialized)1124 TEST_P(MetricsServiceTestWithFeatures, MetricsProvidersInitialized) {
1125   TestMetricsServiceClient client;
1126   TestMetricsService service(GetMetricsStateManager(), &client,
1127                              GetLocalState());
1128 
1129   TestMetricsProvider* test_provider = new TestMetricsProvider();
1130   service.RegisterMetricsProvider(
1131       std::unique_ptr<MetricsProvider>(test_provider));
1132 
1133   service.InitializeMetricsRecordingState();
1134 
1135   EXPECT_TRUE(test_provider->init_called());
1136 }
1137 
1138 // Verify that FieldTrials activated by a MetricsProvider are reported by the
1139 // FieldTrialsProvider.
TEST_P(MetricsServiceTestWithFeatures,ActiveFieldTrialsReported)1140 TEST_P(MetricsServiceTestWithFeatures, ActiveFieldTrialsReported) {
1141   EnableMetricsReporting();
1142   TestMetricsServiceClient client;
1143   TestMetricsService service(GetMetricsStateManager(), &client,
1144                              GetLocalState());
1145 
1146   // Set up FieldTrials.
1147   const std::string trial_name1 = "CoffeeExperiment";
1148   const std::string group_name1 = "Free";
1149   base::FieldTrial* trial1 =
1150       base::FieldTrialList::CreateFieldTrial(trial_name1, group_name1);
1151 
1152   const std::string trial_name2 = "DonutExperiment";
1153   const std::string group_name2 = "MapleBacon";
1154   base::FieldTrial* trial2 =
1155       base::FieldTrialList::CreateFieldTrial(trial_name2, group_name2);
1156 
1157   service.RegisterMetricsProvider(
1158       std::make_unique<ExperimentTestMetricsProvider>(trial1, trial2));
1159 
1160   service.InitializeMetricsRecordingState();
1161   service.Start();
1162   service.StageCurrentLogForTest();
1163 
1164   MetricsLogStore* test_log_store = service.LogStoreForTest();
1165   ChromeUserMetricsExtension uma_log;
1166   EXPECT_TRUE(DecodeLogDataToProto(test_log_store->staged_log(), &uma_log));
1167 
1168   // Verify that the reported FieldTrial IDs are for the trial set up by this
1169   // test.
1170   EXPECT_TRUE(
1171       IsFieldTrialPresent(uma_log.system_profile(), trial_name1, group_name1));
1172   EXPECT_TRUE(
1173       IsFieldTrialPresent(uma_log.system_profile(), trial_name2, group_name2));
1174 }
1175 
TEST_P(MetricsServiceTestWithFeatures,SystemProfileDataProvidedOnEnableRecording)1176 TEST_P(MetricsServiceTestWithFeatures,
1177        SystemProfileDataProvidedOnEnableRecording) {
1178   EnableMetricsReporting();
1179   TestMetricsServiceClient client;
1180   TestMetricsService service(GetMetricsStateManager(), &client,
1181                              GetLocalState());
1182 
1183   TestMetricsProvider* test_provider = new TestMetricsProvider();
1184   service.RegisterMetricsProvider(
1185       std::unique_ptr<MetricsProvider>(test_provider));
1186 
1187   service.InitializeMetricsRecordingState();
1188 
1189   // ProvideSystemProfileMetrics() shouldn't be called initially.
1190   EXPECT_FALSE(test_provider->provide_system_profile_metrics_called());
1191   EXPECT_FALSE(service.persistent_system_profile_provided());
1192 
1193   service.Start();
1194 
1195   // Start should call ProvideSystemProfileMetrics().
1196   EXPECT_TRUE(test_provider->provide_system_profile_metrics_called());
1197   EXPECT_TRUE(service.persistent_system_profile_provided());
1198   EXPECT_FALSE(service.persistent_system_profile_complete());
1199 }
1200 
1201 // Verify that the two separate MetricsSchedulers (MetricsRotationScheduler and
1202 // MetricsUploadScheduler) function together properly.
TEST_P(MetricsServiceTestWithFeatures,SplitRotation)1203 TEST_P(MetricsServiceTestWithFeatures, SplitRotation) {
1204   EnableMetricsReporting();
1205   TestMetricsServiceClient client;
1206   TestMetricsService service(GetMetricsStateManager(), &client,
1207                              GetLocalState());
1208   service.InitializeMetricsRecordingState();
1209   service.Start();
1210   ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
1211 
1212   // Fast forward the time by |initialization_delay|, which is when the pending
1213   // init tasks will run.
1214   base::TimeDelta initialization_delay = service.GetInitializationDelay();
1215   task_environment_.FastForwardBy(initialization_delay);
1216   EXPECT_EQ(TestMetricsService::INIT_TASK_DONE, service.state());
1217 
1218   // Fast forward the time until the MetricsRotationScheduler first runs, which
1219   // should complete the first ongoing log. The independent-metrics upload job
1220   // will be started and always be a task. This should also mark the rotation
1221   // scheduler as idle, so that the next time we attempt to create a log, we
1222   // return early (and don't create a log).
1223   // Note: The first log is only created after N = GetInitialIntervalSeconds()
1224   // seconds since the start, and since we already fast forwarded by
1225   // |initialization_delay| once, we only need to fast forward by
1226   // N - |initialization_delay|.
1227   MetricsLogStore* log_store = service.LogStoreForTest();
1228   EXPECT_FALSE(log_store->has_unsent_logs());
1229   task_environment_.FastForwardBy(
1230       base::Seconds(MetricsScheduler::GetInitialIntervalSeconds()) -
1231       initialization_delay);
1232   ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
1233   EXPECT_TRUE(log_store->has_unsent_logs());
1234   EXPECT_EQ(1U, log_store->ongoing_log_count());
1235 
1236   // There should be three (delayed) tasks: one for querying independent logs
1237   // from metrics providers, one for uploading the unsent log, and one for
1238   // creating the next log.
1239   EXPECT_EQ(3U, task_environment_.GetPendingMainThreadTaskCount());
1240 
1241   // Fast forward the time so that the upload loop starts uploading logs.
1242   base::TimeDelta unsent_log_interval =
1243       MetricsUploadScheduler::GetUnsentLogsInterval();
1244   task_environment_.FastForwardBy(unsent_log_interval);
1245   EXPECT_TRUE(client.uploader()->is_uploading());
1246   // There should be two (delayed) tasks: one for querying independent logs from
1247   // metrics providers, and one for creating the next log. I.e., the task to
1248   // upload a log should be running, and should not be in the task queue
1249   // anymore. The uploading of this log will only be completed later on in order
1250   // to simulate an edge case here.
1251   EXPECT_EQ(2U, task_environment_.GetPendingMainThreadTaskCount());
1252 
1253   // Fast forward the time so that the task to create another log is run. This
1254   // time, however, it should return early due to being idle (i.e., not create a
1255   // log), and it should not post another task to create another log. I.e.,
1256   // there should only be one (delayed) task: one for querying independent logs
1257   // from metrics providers.
1258   // Note: The log is only created after |rotation_scheduler_interval| seconds,
1259   // and since we already fast forwarded by |unsent_log_interval| once, we only
1260   // need to fast forward by
1261   // |rotation_scheduler_interval| - |unsent_log_interval|.
1262   base::TimeDelta rotation_scheduler_interval = client.GetUploadInterval();
1263   task_environment_.FastForwardBy(rotation_scheduler_interval -
1264                                   unsent_log_interval);
1265   EXPECT_EQ(1U, log_store->ongoing_log_count());
1266   EXPECT_EQ(1U, task_environment_.GetPendingMainThreadTaskCount());
1267 
1268   // Simulate completing the upload. Since there is no other log to be uploaded,
1269   // no task should be re-posted. I.e., there should only be one (delayed)
1270   // task: one for querying independent logs from metrics providers.
1271   client.uploader()->CompleteUpload(200);
1272   EXPECT_FALSE(client.uploader()->is_uploading());
1273   EXPECT_FALSE(log_store->has_unsent_logs());
1274   EXPECT_EQ(1U, task_environment_.GetPendingMainThreadTaskCount());
1275 
1276   // Simulate interacting with the browser, which should 1) set the rotation
1277   // scheduler to not idle, 2) queue a task to upload the next log (if there is
1278   // one), and 3) queue a task to create the next log. I.e., there should be
1279   // three (delayed) tasks: one for querying independent logs from metrics
1280   // providers, one for uploading an unsent log, and one for creating the next
1281   // log.
1282   service.OnApplicationNotIdle();
1283   EXPECT_EQ(3U, task_environment_.GetPendingMainThreadTaskCount());
1284 
1285   // We now simulate a more common scenario.
1286 
1287   // Fast forward the time so that the task to upload a log runs. Since there
1288   // should be no logs, it should return early, and not re-post a task. I.e.,
1289   // there should be two tasks: one for querying independent logs from metrics
1290   // providers, and one for creating the next log.
1291   task_environment_.FastForwardBy(unsent_log_interval);
1292   EXPECT_FALSE(client.uploader()->is_uploading());
1293   EXPECT_FALSE(log_store->has_unsent_logs());
1294   EXPECT_EQ(2U, task_environment_.GetPendingMainThreadTaskCount());
1295 
1296   // Fast forward the time so that the next log is created. It should re-post
1297   // a task to create a new log, and should also re-start the upload scheduler.
1298   // I.e., there should be three (delayed) tasks: one for querying independent
1299   // logs from metrics providers, one for uploading an unsent log, and one for
1300   // creating the next log.
1301   // Note: The log is only created after |rotation_scheduler_interval| seconds,
1302   // and since we already fast forwarded by |unsent_log_interval| once, we only
1303   // need to fast forward by
1304   // |rotation_scheduler_interval| - |unsent_log_interval|.
1305   task_environment_.FastForwardBy(rotation_scheduler_interval -
1306                                   unsent_log_interval);
1307   EXPECT_TRUE(log_store->has_unsent_logs());
1308   EXPECT_EQ(3U, task_environment_.GetPendingMainThreadTaskCount());
1309 
1310   // Fast forward the time so that the task to upload a log runs.
1311   task_environment_.FastForwardBy(unsent_log_interval);
1312   EXPECT_TRUE(client.uploader()->is_uploading());
1313   // There should be two (delayed) tasks: one for querying independent logs from
1314   // metrics providers, and one for creating the next log. I.e., the task to
1315   // upload a log should be running, and should not be in the task queue
1316   // anymore.
1317   EXPECT_EQ(2U, task_environment_.GetPendingMainThreadTaskCount());
1318 
1319   // Simulate completing the upload. However, before doing so, add a dummy log
1320   // in order to test that when the upload task completes, if it detects another
1321   // log, it will re-post a task to upload the next log. I.e., after uploading
1322   // the log, there should be three (delayed) tasks: one for querying
1323   // independent logs from metrics providers, one for uploading an unsent log,
1324   // and one for creating the next log.
1325   log_store->StoreLog("dummy log", MetricsLog::LogType::ONGOING_LOG,
1326                       LogMetadata(),
1327                       MetricsLogsEventManager::CreateReason::kUnknown);
1328   EXPECT_EQ(2U, log_store->ongoing_log_count());
1329   client.uploader()->CompleteUpload(200);
1330   EXPECT_FALSE(client.uploader()->is_uploading());
1331   EXPECT_EQ(1U, log_store->ongoing_log_count());
1332   EXPECT_EQ(3U, task_environment_.GetPendingMainThreadTaskCount());
1333 
1334   // Fast forward the time so that the task to upload a log runs.
1335   task_environment_.FastForwardBy(unsent_log_interval);
1336   EXPECT_TRUE(client.uploader()->is_uploading());
1337   // There should be two (delayed) tasks: one for querying independent logs from
1338   // metrics providers, and one for creating the next log. I.e., the task to
1339   // upload a log should be running, and should not be in the task queue
1340   // anymore.
1341   EXPECT_EQ(2U, task_environment_.GetPendingMainThreadTaskCount());
1342 
1343   // Simulate completing the upload. Since there is no other log to be uploaded,
1344   // no task should be posted. I.e., there should only be two (delayed) tasks:
1345   // one for querying independent logs from metrics providers, and one.
1346   client.uploader()->CompleteUpload(200);
1347   EXPECT_FALSE(client.uploader()->is_uploading());
1348   EXPECT_FALSE(log_store->has_unsent_logs());
1349   EXPECT_EQ(2U, task_environment_.GetPendingMainThreadTaskCount());
1350 
1351   // Fast forward the time so that the task to create another log is run. It
1352   // should return early due to being idle (i.e., not create a log), and it
1353   // should not post another task to create another log. I.e., there should only
1354   // be one (delayed) task: one for querying independent logs from metrics
1355   // providers.
1356   // Note: The log is only created after |rotation_scheduler_interval| seconds,
1357   // and since we already fast forwarded by |unsent_log_interval| twice, we only
1358   // need to fast forward by
1359   // |rotation_scheduler_interval| - 2 * |unsent_log_interval|.
1360   task_environment_.FastForwardBy(rotation_scheduler_interval -
1361                                   2 * unsent_log_interval);
1362   EXPECT_FALSE(log_store->has_unsent_logs());
1363   EXPECT_EQ(1U, task_environment_.GetPendingMainThreadTaskCount());
1364 }
1365 
TEST_P(MetricsServiceTestWithFeatures,LastLiveTimestamp)1366 TEST_P(MetricsServiceTestWithFeatures, LastLiveTimestamp) {
1367   EnableMetricsReporting();
1368   TestMetricsServiceClient client;
1369   TestMetricsService service(GetMetricsStateManager(), &client,
1370                              GetLocalState());
1371 
1372   base::Time initial_last_live_time =
1373       GetLocalState()->GetTime(prefs::kStabilityBrowserLastLiveTimeStamp);
1374 
1375   service.InitializeMetricsRecordingState();
1376   service.Start();
1377   ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
1378 
1379   // Fast forward the time by |initialization_delay|, which is when the pending
1380   // init tasks will run.
1381   base::TimeDelta initialization_delay = service.GetInitializationDelay();
1382   task_environment_.FastForwardBy(initialization_delay);
1383   EXPECT_EQ(TestMetricsService::INIT_TASK_DONE, service.state());
1384 
1385   // Fast forward the time until the MetricsRotationScheduler first runs, which
1386   // should complete the first ongoing log. Also verify that the test provider
1387   // was called when closing the log.
1388   // Note: The first log is only created after N = GetInitialIntervalSeconds()
1389   // seconds since the start, and since we already fast forwarded by
1390   // |initialization_delay| once, we only need to fast forward by
1391   // N - |initialization_delay|.
1392   task_environment_.FastForwardBy(
1393       base::Seconds(MetricsScheduler::GetInitialIntervalSeconds()) -
1394       initialization_delay);
1395   ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
1396   size_t num_pending_tasks = task_environment_.GetPendingMainThreadTaskCount();
1397 
1398   service.StartUpdatingLastLiveTimestamp();
1399 
1400   // Starting the update sequence should not write anything, but should
1401   // set up for a later write.
1402   EXPECT_EQ(
1403       initial_last_live_time,
1404       GetLocalState()->GetTime(prefs::kStabilityBrowserLastLiveTimeStamp));
1405   EXPECT_EQ(num_pending_tasks + 1,
1406             task_environment_.GetPendingMainThreadTaskCount());
1407 
1408   // Fast forward the time so that the task to update the "last alive timestamp"
1409   // runs.
1410   task_environment_.FastForwardBy(service.GetUpdateLastAliveTimestampDelay());
1411 
1412   // Verify that the time has updated in local state.
1413   base::Time updated_last_live_time =
1414       GetLocalState()->GetTime(prefs::kStabilityBrowserLastLiveTimeStamp);
1415   EXPECT_LT(initial_last_live_time, updated_last_live_time);
1416 
1417   // Double check that an update was scheduled again.
1418   task_environment_.FastForwardBy(service.GetUpdateLastAliveTimestampDelay());
1419   EXPECT_LT(
1420       updated_last_live_time,
1421       GetLocalState()->GetTime(prefs::kStabilityBrowserLastLiveTimeStamp));
1422 }
1423 
TEST_P(MetricsServiceTestWithFeatures,EnablementObserverNotification)1424 TEST_P(MetricsServiceTestWithFeatures, EnablementObserverNotification) {
1425   EnableMetricsReporting();
1426   TestMetricsServiceClient client;
1427   TestMetricsService service(GetMetricsStateManager(), &client,
1428                              GetLocalState());
1429   service.InitializeMetricsRecordingState();
1430 
1431   std::optional<bool> enabled;
1432   auto observer = [&enabled](bool notification) { enabled = notification; };
1433 
1434   auto subscription =
1435       service.AddEnablementObserver(base::BindLambdaForTesting(observer));
1436 
1437   service.Start();
1438   ASSERT_TRUE(enabled.has_value());
1439   EXPECT_TRUE(enabled.value());
1440 
1441   enabled.reset();
1442 
1443   service.Stop();
1444   ASSERT_TRUE(enabled.has_value());
1445   EXPECT_FALSE(enabled.value());
1446 }
1447 
1448 // Verifies that when a cloned install is detected, logs are purged.
TEST_P(MetricsServiceTestWithFeatures,PurgeLogsOnClonedInstallDetected)1449 TEST_P(MetricsServiceTestWithFeatures, PurgeLogsOnClonedInstallDetected) {
1450   EnableMetricsReporting();
1451   TestMetricsServiceClient client;
1452   TestMetricsService service(GetMetricsStateManager(), &client,
1453                              GetLocalState());
1454   service.InitializeMetricsRecordingState();
1455 
1456   // Store various logs.
1457   MetricsLogStore* test_log_store = service.LogStoreForTest();
1458   test_log_store->StoreLog("dummy log data", MetricsLog::ONGOING_LOG,
1459                            LogMetadata(),
1460                            MetricsLogsEventManager::CreateReason::kUnknown);
1461   test_log_store->StageNextLog();
1462   test_log_store->StoreLog("more dummy log data", MetricsLog::ONGOING_LOG,
1463                            LogMetadata(),
1464                            MetricsLogsEventManager::CreateReason::kUnknown);
1465   test_log_store->StoreLog("dummy stability log",
1466                            MetricsLog::INITIAL_STABILITY_LOG, LogMetadata(),
1467                            MetricsLogsEventManager::CreateReason::kUnknown);
1468   test_log_store->SetAlternateOngoingLogStore(InitializeTestLogStoreAndGet());
1469   test_log_store->StoreLog("dummy log for alternate ongoing log store",
1470                            MetricsLog::ONGOING_LOG, LogMetadata(),
1471                            MetricsLogsEventManager::CreateReason::kUnknown);
1472   EXPECT_TRUE(test_log_store->has_staged_log());
1473   EXPECT_TRUE(test_log_store->has_unsent_logs());
1474 
1475   ClonedInstallDetector* cloned_install_detector =
1476       GetMetricsStateManager()->cloned_install_detector_for_testing();
1477 
1478   static constexpr char kTestRawId[] = "test";
1479   // Hashed machine id for |kTestRawId|.
1480   static constexpr int kTestHashedId = 2216819;
1481 
1482   // Save a machine id that will not cause a clone to be detected.
1483   GetLocalState()->SetInteger(prefs::kMetricsMachineId, kTestHashedId);
1484   cloned_install_detector->SaveMachineId(GetLocalState(), kTestRawId);
1485   // Verify that the logs are still present.
1486   EXPECT_TRUE(test_log_store->has_staged_log());
1487   EXPECT_TRUE(test_log_store->has_unsent_logs());
1488 
1489   // Save a machine id that will cause a clone to be detected.
1490   GetLocalState()->SetInteger(prefs::kMetricsMachineId, kTestHashedId + 1);
1491   cloned_install_detector->SaveMachineId(GetLocalState(), kTestRawId);
1492   // Verify that the logs were purged.
1493   EXPECT_FALSE(test_log_store->has_staged_log());
1494   EXPECT_FALSE(test_log_store->has_unsent_logs());
1495 }
1496 
1497 #if BUILDFLAG(IS_CHROMEOS_LACROS)
1498 // ResetClientId is only enabled on certain targets.
TEST_P(MetricsServiceTestWithFeatures,SetClientIdToExternalId)1499 TEST_P(MetricsServiceTestWithFeatures, SetClientIdToExternalId) {
1500   EnableMetricsReporting();
1501   TestMetricsServiceClient client;
1502   TestMetricsService service(GetMetricsStateManager(), &client,
1503                              GetLocalState());
1504 
1505   const std::string client_id = "d92ad666-a420-4c73-8718-94311ae2ff5f";
1506 
1507   EXPECT_NE(service.GetClientId(), client_id);
1508 
1509   service.SetExternalClientId(client_id);
1510   // Reset will cause the client id to be regenerated. If an external client id
1511   // is provided, it should defer to using that id instead of creating its own.
1512   service.ResetClientId();
1513 
1514   EXPECT_EQ(service.GetClientId(), client_id);
1515 }
1516 #endif  //  BUILDFLAG(IS_CHROMEOS_LACROS)
1517 
1518 #if BUILDFLAG(IS_CHROMEOS_ASH)
TEST_P(MetricsServiceTestWithFeatures,OngoingLogNotFlushedBeforeInitialLogWhenUserLogStoreSet)1519 TEST_P(MetricsServiceTestWithFeatures,
1520        OngoingLogNotFlushedBeforeInitialLogWhenUserLogStoreSet) {
1521   EnableMetricsReporting();
1522   TestMetricsServiceClient client;
1523   TestMetricsService service(GetMetricsStateManager(), &client,
1524                              GetLocalState());
1525 
1526   service.InitializeMetricsRecordingState();
1527   // Start() will create the first ongoing log.
1528   service.Start();
1529   ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
1530 
1531   MetricsLogStore* test_log_store = service.LogStoreForTest();
1532   std::unique_ptr<TestUnsentLogStore> alternate_ongoing_log_store =
1533       InitializeTestLogStoreAndGet();
1534   TestUnsentLogStore* alternate_ongoing_log_store_ptr =
1535       alternate_ongoing_log_store.get();
1536 
1537   ASSERT_EQ(0u, test_log_store->initial_log_count());
1538   ASSERT_EQ(0u, test_log_store->ongoing_log_count());
1539 
1540   service.SetUserLogStore(std::move(alternate_ongoing_log_store));
1541 
1542   // Initial logs should not have been collected so the ongoing log being
1543   // recorded should not be flushed when a user log store is mounted.
1544   ASSERT_EQ(0u, test_log_store->initial_log_count());
1545   ASSERT_EQ(0u, test_log_store->ongoing_log_count());
1546 
1547   // Fast forward the time by |initialization_delay|, which is when the pending
1548   // init tasks will run.
1549   base::TimeDelta initialization_delay = service.GetInitializationDelay();
1550   task_environment_.FastForwardBy(initialization_delay);
1551   EXPECT_EQ(TestMetricsService::INIT_TASK_DONE, service.state());
1552 
1553   // Fast forward the time until the MetricsRotationScheduler first runs, which
1554   // should complete the first ongoing log.
1555   // Note: The first log is only created after N = GetInitialIntervalSeconds()
1556   // seconds since the start, and since we already fast forwarded by
1557   // |initialization_delay| once, we only need to fast forward by
1558   // N - |initialization_delay|.
1559   task_environment_.FastForwardBy(
1560       base::Seconds(MetricsScheduler::GetInitialIntervalSeconds()) -
1561       initialization_delay);
1562   ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
1563   // When the init task is complete, the first ongoing log should be created
1564   // in the alternate ongoing log store.
1565   EXPECT_EQ(0u, test_log_store->initial_log_count());
1566   EXPECT_EQ(0u, test_log_store->ongoing_log_count());
1567   EXPECT_EQ(1u, alternate_ongoing_log_store_ptr->size());
1568 }
1569 
TEST_P(MetricsServiceTestWithFeatures,OngoingLogFlushedAfterInitialLogWhenUserLogStoreSet)1570 TEST_P(MetricsServiceTestWithFeatures,
1571        OngoingLogFlushedAfterInitialLogWhenUserLogStoreSet) {
1572   EnableMetricsReporting();
1573   TestMetricsServiceClient client;
1574   TestMetricsService service(GetMetricsStateManager(), &client,
1575                              GetLocalState());
1576 
1577   service.InitializeMetricsRecordingState();
1578   // Start() will create the first ongoing log.
1579   service.Start();
1580   ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
1581 
1582   MetricsLogStore* test_log_store = service.LogStoreForTest();
1583   std::unique_ptr<TestUnsentLogStore> alternate_ongoing_log_store =
1584       InitializeTestLogStoreAndGet();
1585 
1586   // Init state.
1587   ASSERT_EQ(0u, test_log_store->initial_log_count());
1588   ASSERT_EQ(0u, test_log_store->ongoing_log_count());
1589 
1590   // Fast forward the time by |initialization_delay|, which is when the pending
1591   // init tasks will run.
1592   base::TimeDelta initialization_delay = service.GetInitializationDelay();
1593   task_environment_.FastForwardBy(initialization_delay);
1594   EXPECT_EQ(TestMetricsService::INIT_TASK_DONE, service.state());
1595 
1596   // Fast forward the time until the MetricsRotationScheduler first runs, which
1597   // should complete the first ongoing log.
1598   // Note: The first log is only created after N = GetInitialIntervalSeconds()
1599   // seconds since the start, and since we already fast forwarded by
1600   // |initialization_delay| once, we only need to fast forward by
1601   // N - |initialization_delay|.
1602   task_environment_.FastForwardBy(
1603       base::Seconds(MetricsScheduler::GetInitialIntervalSeconds()) -
1604       initialization_delay);
1605   ASSERT_EQ(TestMetricsService::SENDING_LOGS, service.state());
1606   ASSERT_EQ(0u, test_log_store->initial_log_count());
1607   ASSERT_EQ(1u, test_log_store->ongoing_log_count());
1608 
1609   // User log store set post-init.
1610   service.SetUserLogStore(std::move(alternate_ongoing_log_store));
1611 
1612   // Another log should have been flushed from setting the user log store.
1613   ASSERT_EQ(0u, test_log_store->initial_log_count());
1614   ASSERT_EQ(2u, test_log_store->ongoing_log_count());
1615 }
1616 
TEST_P(MetricsServiceTestWithFeatures,OngoingLogDiscardedAfterEarlyUnsetUserLogStore)1617 TEST_P(MetricsServiceTestWithFeatures,
1618        OngoingLogDiscardedAfterEarlyUnsetUserLogStore) {
1619   EnableMetricsReporting();
1620   TestMetricsServiceClient client;
1621   TestMetricsService service(GetMetricsStateManager(), &client,
1622                              GetLocalState());
1623 
1624   service.InitializeMetricsRecordingState();
1625   // Start() will create the first ongoing log.
1626   service.Start();
1627   ASSERT_EQ(TestMetricsService::INIT_TASK_SCHEDULED, service.state());
1628 
1629   MetricsLogStore* test_log_store = service.LogStoreForTest();
1630   std::unique_ptr<TestUnsentLogStore> alternate_ongoing_log_store =
1631       InitializeTestLogStoreAndGet();
1632 
1633   ASSERT_EQ(0u, test_log_store->initial_log_count());
1634   ASSERT_EQ(0u, test_log_store->ongoing_log_count());
1635 
1636   service.SetUserLogStore(std::move(alternate_ongoing_log_store));
1637 
1638   // Unset the user log store before we started sending logs.
1639   base::UmaHistogramBoolean("Test.Before.Histogram", true);
1640   service.UnsetUserLogStore();
1641   base::UmaHistogramBoolean("Test.After.Histogram", true);
1642 
1643   // Verify that the current log was discarded.
1644   EXPECT_FALSE(service.GetCurrentLogForTest());
1645 
1646   // Verify that histograms from before unsetting the user log store were
1647   // flushed.
1648   EXPECT_EQ(0, GetHistogramDeltaTotalCount("Test.Before.Histogram"));
1649   EXPECT_EQ(1, GetHistogramDeltaTotalCount("Test.After.Histogram"));
1650 
1651   // Clean up histograms.
1652   base::StatisticsRecorder::ForgetHistogramForTesting("Test.Before.Histogram");
1653   base::StatisticsRecorder::ForgetHistogramForTesting("Test.After.Histogram");
1654 }
1655 
TEST_P(MetricsServiceTestWithFeatures,UnsettingLogStoreShouldDisableRecording)1656 TEST_P(MetricsServiceTestWithFeatures,
1657        UnsettingLogStoreShouldDisableRecording) {
1658   EnableMetricsReporting();
1659   TestMetricsServiceClient client;
1660   TestMetricsService service(GetMetricsStateManager(), &client,
1661                              GetLocalState());
1662 
1663   service.InitializeMetricsRecordingState();
1664   // Start() will register the service to start recording.
1665   service.Start();
1666   ASSERT_TRUE(service.recording_active());
1667 
1668   // Register, set and unset a log store.
1669   // This will clear the log file and thus should also stop recording.
1670   std::unique_ptr<TestUnsentLogStore> alternate_ongoing_log_store =
1671       InitializeTestLogStoreAndGet();
1672   service.SetUserLogStore(std::move(alternate_ongoing_log_store));
1673   service.UnsetUserLogStore();
1674   ASSERT_FALSE(service.recording_active());
1675 
1676   // This should not crash.
1677   base::RecordAction(base::UserMetricsAction("TestAction"));
1678 }
1679 
1680 #endif  // BUILDFLAG(IS_CHROMEOS_LACROS)
1681 
1682 }  // namespace metrics
1683