Update To 11.40.268.0
[platform/framework/web/crosswalk.git] / src / components / metrics / metrics_log_unittest.cc
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "components/metrics/metrics_log.h"
6
7 #include <string>
8
9 #include "base/base64.h"
10 #include "base/basictypes.h"
11 #include "base/metrics/bucket_ranges.h"
12 #include "base/metrics/sample_vector.h"
13 #include "base/prefs/pref_service.h"
14 #include "base/prefs/testing_pref_service.h"
15 #include "base/strings/string_number_conversions.h"
16 #include "base/time/time.h"
17 #include "components/metrics/metrics_pref_names.h"
18 #include "components/metrics/metrics_state_manager.h"
19 #include "components/metrics/proto/chrome_user_metrics_extension.pb.h"
20 #include "components/metrics/test_metrics_service_client.h"
21 #include "components/variations/active_field_trials.h"
22 #include "testing/gtest/include/gtest/gtest.h"
23
24 namespace metrics {
25
26 namespace {
27
28 const char kClientId[] = "bogus client ID";
29 const int64 kInstallDate = 1373051956;
30 const int64 kInstallDateExpected = 1373050800;  // Computed from kInstallDate.
31 const int64 kEnabledDate = 1373001211;
32 const int64 kEnabledDateExpected = 1373000400;  // Computed from kEnabledDate.
33 const int kSessionId = 127;
34 const variations::ActiveGroupId kFieldTrialIds[] = {
35   {37, 43},
36   {13, 47},
37   {23, 17}
38 };
39 const variations::ActiveGroupId kSyntheticTrials[] = {
40   {55, 15},
41   {66, 16}
42 };
43
44 class TestMetricsLog : public MetricsLog {
45  public:
46   TestMetricsLog(const std::string& client_id,
47                  int session_id,
48                  LogType log_type,
49                  MetricsServiceClient* client,
50                  TestingPrefServiceSimple* prefs)
51       : MetricsLog(client_id, session_id, log_type, client, prefs),
52         prefs_(prefs) {
53     InitPrefs();
54   }
55
56   ~TestMetricsLog() override {}
57
58   const ChromeUserMetricsExtension& uma_proto() const {
59     return *MetricsLog::uma_proto();
60   }
61
62   const SystemProfileProto& system_profile() const {
63     return uma_proto().system_profile();
64   }
65
66  private:
67   void InitPrefs() {
68     prefs_->SetString(prefs::kMetricsReportingEnabledTimestamp,
69                       base::Int64ToString(kEnabledDate));
70   }
71
72   void GetFieldTrialIds(
73       std::vector<variations::ActiveGroupId>* field_trial_ids) const override {
74     ASSERT_TRUE(field_trial_ids->empty());
75
76     for (size_t i = 0; i < arraysize(kFieldTrialIds); ++i) {
77       field_trial_ids->push_back(kFieldTrialIds[i]);
78     }
79   }
80
81   // Weak pointer to the PrefsService used by this log.
82   TestingPrefServiceSimple* prefs_;
83
84   DISALLOW_COPY_AND_ASSIGN(TestMetricsLog);
85 };
86
87 }  // namespace
88
89 class MetricsLogTest : public testing::Test {
90  public:
91   MetricsLogTest() {
92     MetricsLog::RegisterPrefs(prefs_.registry());
93     MetricsStateManager::RegisterPrefs(prefs_.registry());
94   }
95
96   ~MetricsLogTest() override {}
97
98  protected:
99   // Check that the values in |system_values| correspond to the test data
100   // defined at the top of this file.
101   void CheckSystemProfile(const SystemProfileProto& system_profile) {
102     EXPECT_EQ(kInstallDateExpected, system_profile.install_date());
103     EXPECT_EQ(kEnabledDateExpected, system_profile.uma_enabled_date());
104
105     ASSERT_EQ(arraysize(kFieldTrialIds) + arraysize(kSyntheticTrials),
106               static_cast<size_t>(system_profile.field_trial_size()));
107     for (size_t i = 0; i < arraysize(kFieldTrialIds); ++i) {
108       const SystemProfileProto::FieldTrial& field_trial =
109           system_profile.field_trial(i);
110       EXPECT_EQ(kFieldTrialIds[i].name, field_trial.name_id());
111       EXPECT_EQ(kFieldTrialIds[i].group, field_trial.group_id());
112     }
113     // Verify the right data is present for the synthetic trials.
114     for (size_t i = 0; i < arraysize(kSyntheticTrials); ++i) {
115       const SystemProfileProto::FieldTrial& field_trial =
116           system_profile.field_trial(i + arraysize(kFieldTrialIds));
117       EXPECT_EQ(kSyntheticTrials[i].name, field_trial.name_id());
118       EXPECT_EQ(kSyntheticTrials[i].group, field_trial.group_id());
119     }
120
121     EXPECT_EQ(TestMetricsServiceClient::kBrandForTesting,
122               system_profile.brand_code());
123
124     const SystemProfileProto::Hardware& hardware =
125         system_profile.hardware();
126
127     EXPECT_TRUE(hardware.has_cpu());
128     EXPECT_TRUE(hardware.cpu().has_vendor_name());
129     EXPECT_TRUE(hardware.cpu().has_signature());
130
131     // TODO(isherman): Verify other data written into the protobuf as a result
132     // of this call.
133   }
134
135  protected:
136   TestingPrefServiceSimple prefs_;
137
138  private:
139   DISALLOW_COPY_AND_ASSIGN(MetricsLogTest);
140 };
141
142 TEST_F(MetricsLogTest, LogType) {
143   TestMetricsServiceClient client;
144   TestingPrefServiceSimple prefs;
145
146   MetricsLog log1("id", 0, MetricsLog::ONGOING_LOG, &client, &prefs);
147   EXPECT_EQ(MetricsLog::ONGOING_LOG, log1.log_type());
148
149   MetricsLog log2("id", 0, MetricsLog::INITIAL_STABILITY_LOG, &client, &prefs);
150   EXPECT_EQ(MetricsLog::INITIAL_STABILITY_LOG, log2.log_type());
151 }
152
153 TEST_F(MetricsLogTest, EmptyRecord) {
154   TestMetricsServiceClient client;
155   client.set_version_string("bogus version");
156   TestingPrefServiceSimple prefs;
157   MetricsLog log("totally bogus client ID", 137, MetricsLog::ONGOING_LOG,
158                  &client, &prefs);
159   log.CloseLog();
160
161   std::string encoded;
162   log.GetEncodedLog(&encoded);
163
164   // A couple of fields are hard to mock, so these will be copied over directly
165   // for the expected output.
166   ChromeUserMetricsExtension parsed;
167   ASSERT_TRUE(parsed.ParseFromString(encoded));
168
169   ChromeUserMetricsExtension expected;
170   expected.set_client_id(5217101509553811875);  // Hashed bogus client ID
171   expected.set_session_id(137);
172   expected.mutable_system_profile()->set_build_timestamp(
173       parsed.system_profile().build_timestamp());
174   expected.mutable_system_profile()->set_app_version("bogus version");
175   expected.mutable_system_profile()->set_channel(client.GetChannel());
176
177   EXPECT_EQ(expected.SerializeAsString(), encoded);
178 }
179
180 TEST_F(MetricsLogTest, HistogramBucketFields) {
181   // Create buckets: 1-5, 5-7, 7-8, 8-9, 9-10, 10-11, 11-12.
182   base::BucketRanges ranges(8);
183   ranges.set_range(0, 1);
184   ranges.set_range(1, 5);
185   ranges.set_range(2, 7);
186   ranges.set_range(3, 8);
187   ranges.set_range(4, 9);
188   ranges.set_range(5, 10);
189   ranges.set_range(6, 11);
190   ranges.set_range(7, 12);
191
192   base::SampleVector samples(&ranges);
193   samples.Accumulate(3, 1);   // Bucket 1-5.
194   samples.Accumulate(6, 1);   // Bucket 5-7.
195   samples.Accumulate(8, 1);   // Bucket 8-9. (7-8 skipped)
196   samples.Accumulate(10, 1);  // Bucket 10-11. (9-10 skipped)
197   samples.Accumulate(11, 1);  // Bucket 11-12.
198
199   TestMetricsServiceClient client;
200   TestingPrefServiceSimple prefs;
201   TestMetricsLog log(
202       kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
203   log.RecordHistogramDelta("Test", samples);
204
205   const ChromeUserMetricsExtension& uma_proto = log.uma_proto();
206   const HistogramEventProto& histogram_proto =
207       uma_proto.histogram_event(uma_proto.histogram_event_size() - 1);
208
209   // Buckets with samples: 1-5, 5-7, 8-9, 10-11, 11-12.
210   // Should become: 1-/, 5-7, /-9, 10-/, /-12.
211   ASSERT_EQ(5, histogram_proto.bucket_size());
212
213   // 1-5 becomes 1-/ (max is same as next min).
214   EXPECT_TRUE(histogram_proto.bucket(0).has_min());
215   EXPECT_FALSE(histogram_proto.bucket(0).has_max());
216   EXPECT_EQ(1, histogram_proto.bucket(0).min());
217
218   // 5-7 stays 5-7 (no optimization possible).
219   EXPECT_TRUE(histogram_proto.bucket(1).has_min());
220   EXPECT_TRUE(histogram_proto.bucket(1).has_max());
221   EXPECT_EQ(5, histogram_proto.bucket(1).min());
222   EXPECT_EQ(7, histogram_proto.bucket(1).max());
223
224   // 8-9 becomes /-9 (min is same as max - 1).
225   EXPECT_FALSE(histogram_proto.bucket(2).has_min());
226   EXPECT_TRUE(histogram_proto.bucket(2).has_max());
227   EXPECT_EQ(9, histogram_proto.bucket(2).max());
228
229   // 10-11 becomes 10-/ (both optimizations apply, omit max is prioritized).
230   EXPECT_TRUE(histogram_proto.bucket(3).has_min());
231   EXPECT_FALSE(histogram_proto.bucket(3).has_max());
232   EXPECT_EQ(10, histogram_proto.bucket(3).min());
233
234   // 11-12 becomes /-12 (last record must keep max, min is same as max - 1).
235   EXPECT_FALSE(histogram_proto.bucket(4).has_min());
236   EXPECT_TRUE(histogram_proto.bucket(4).has_max());
237   EXPECT_EQ(12, histogram_proto.bucket(4).max());
238 }
239
240 TEST_F(MetricsLogTest, RecordEnvironment) {
241   TestMetricsServiceClient client;
242   TestMetricsLog log(
243       kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
244
245   std::vector<variations::ActiveGroupId> synthetic_trials;
246   // Add two synthetic trials.
247   synthetic_trials.push_back(kSyntheticTrials[0]);
248   synthetic_trials.push_back(kSyntheticTrials[1]);
249
250   log.RecordEnvironment(std::vector<MetricsProvider*>(),
251                         synthetic_trials,
252                         kInstallDate);
253   // Check that the system profile on the log has the correct values set.
254   CheckSystemProfile(log.system_profile());
255
256   // Check that the system profile has also been written to prefs.
257   const std::string base64_system_profile =
258       prefs_.GetString(prefs::kStabilitySavedSystemProfile);
259   EXPECT_FALSE(base64_system_profile.empty());
260   std::string serialied_system_profile;
261   EXPECT_TRUE(base::Base64Decode(base64_system_profile,
262                                  &serialied_system_profile));
263   SystemProfileProto decoded_system_profile;
264   EXPECT_TRUE(decoded_system_profile.ParseFromString(serialied_system_profile));
265   CheckSystemProfile(decoded_system_profile);
266 }
267
268 TEST_F(MetricsLogTest, LoadSavedEnvironmentFromPrefs) {
269   const char* kSystemProfilePref = prefs::kStabilitySavedSystemProfile;
270   const char* kSystemProfileHashPref =
271       prefs::kStabilitySavedSystemProfileHash;
272
273   TestMetricsServiceClient client;
274
275   // The pref value is empty, so loading it from prefs should fail.
276   {
277     TestMetricsLog log(
278         kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
279     EXPECT_FALSE(log.LoadSavedEnvironmentFromPrefs());
280   }
281
282   // Do a RecordEnvironment() call and check whether the pref is recorded.
283   {
284     TestMetricsLog log(
285         kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
286     log.RecordEnvironment(std::vector<MetricsProvider*>(),
287                           std::vector<variations::ActiveGroupId>(),
288                           kInstallDate);
289     EXPECT_FALSE(prefs_.GetString(kSystemProfilePref).empty());
290     EXPECT_FALSE(prefs_.GetString(kSystemProfileHashPref).empty());
291   }
292
293   {
294     TestMetricsLog log(
295         kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
296     EXPECT_TRUE(log.LoadSavedEnvironmentFromPrefs());
297     // Check some values in the system profile.
298     EXPECT_EQ(kInstallDateExpected, log.system_profile().install_date());
299     EXPECT_EQ(kEnabledDateExpected, log.system_profile().uma_enabled_date());
300     // Ensure that the call cleared the prefs.
301     EXPECT_TRUE(prefs_.GetString(kSystemProfilePref).empty());
302     EXPECT_TRUE(prefs_.GetString(kSystemProfileHashPref).empty());
303   }
304
305   // Ensure that a non-matching hash results in the pref being invalid.
306   {
307     TestMetricsLog log(
308         kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
309     // Call RecordEnvironment() to record the pref again.
310     log.RecordEnvironment(std::vector<MetricsProvider*>(),
311                           std::vector<variations::ActiveGroupId>(),
312                           kInstallDate);
313   }
314
315   {
316     // Set the hash to a bad value.
317     prefs_.SetString(kSystemProfileHashPref, "deadbeef");
318     TestMetricsLog log(
319         kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
320     EXPECT_FALSE(log.LoadSavedEnvironmentFromPrefs());
321     // Ensure that the prefs are cleared, even if the call failed.
322     EXPECT_TRUE(prefs_.GetString(kSystemProfilePref).empty());
323     EXPECT_TRUE(prefs_.GetString(kSystemProfileHashPref).empty());
324   }
325 }
326
327 TEST_F(MetricsLogTest, InitialLogStabilityMetrics) {
328   TestMetricsServiceClient client;
329   TestMetricsLog log(kClientId,
330                      kSessionId,
331                      MetricsLog::INITIAL_STABILITY_LOG,
332                      &client,
333                      &prefs_);
334   std::vector<MetricsProvider*> metrics_providers;
335   log.RecordEnvironment(metrics_providers,
336                         std::vector<variations::ActiveGroupId>(),
337                         kInstallDate);
338   log.RecordStabilityMetrics(metrics_providers, base::TimeDelta(),
339                              base::TimeDelta());
340   const SystemProfileProto_Stability& stability =
341       log.system_profile().stability();
342   // Required metrics:
343   EXPECT_TRUE(stability.has_launch_count());
344   EXPECT_TRUE(stability.has_crash_count());
345   // Initial log metrics:
346   EXPECT_TRUE(stability.has_incomplete_shutdown_count());
347   EXPECT_TRUE(stability.has_breakpad_registration_success_count());
348   EXPECT_TRUE(stability.has_breakpad_registration_failure_count());
349   EXPECT_TRUE(stability.has_debugger_present_count());
350   EXPECT_TRUE(stability.has_debugger_not_present_count());
351 }
352
353 TEST_F(MetricsLogTest, OngoingLogStabilityMetrics) {
354   TestMetricsServiceClient client;
355   TestMetricsLog log(
356       kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
357   std::vector<MetricsProvider*> metrics_providers;
358   log.RecordEnvironment(metrics_providers,
359                         std::vector<variations::ActiveGroupId>(),
360                         kInstallDate);
361   log.RecordStabilityMetrics(metrics_providers, base::TimeDelta(),
362                              base::TimeDelta());
363   const SystemProfileProto_Stability& stability =
364       log.system_profile().stability();
365   // Required metrics:
366   EXPECT_TRUE(stability.has_launch_count());
367   EXPECT_TRUE(stability.has_crash_count());
368   // Initial log metrics:
369   EXPECT_FALSE(stability.has_incomplete_shutdown_count());
370   EXPECT_FALSE(stability.has_breakpad_registration_success_count());
371   EXPECT_FALSE(stability.has_breakpad_registration_failure_count());
372   EXPECT_FALSE(stability.has_debugger_present_count());
373   EXPECT_FALSE(stability.has_debugger_not_present_count());
374 }
375
376 TEST_F(MetricsLogTest, ChromeChannelWrittenToProtobuf) {
377   TestMetricsServiceClient client;
378   TestMetricsLog log(
379       kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
380   EXPECT_TRUE(log.uma_proto().system_profile().has_channel());
381 }
382
383 TEST_F(MetricsLogTest, ProductNotSetIfDefault) {
384   TestMetricsServiceClient client;
385   EXPECT_EQ(ChromeUserMetricsExtension::CHROME, client.GetProduct());
386   TestMetricsLog log(
387       kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
388   // Check that the product isn't set, since it's default and also verify the
389   // default value is indeed equal to Chrome.
390   EXPECT_FALSE(log.uma_proto().has_product());
391   EXPECT_EQ(ChromeUserMetricsExtension::CHROME, log.uma_proto().product());
392 }
393
394 TEST_F(MetricsLogTest, ProductSetIfNotDefault) {
395   const int32_t kTestProduct = 100;
396   EXPECT_NE(ChromeUserMetricsExtension::CHROME, kTestProduct);
397
398   TestMetricsServiceClient client;
399   client.set_product(kTestProduct);
400   TestMetricsLog log(
401       kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
402   // Check that the product is set to |kTestProduct|.
403   EXPECT_TRUE(log.uma_proto().has_product());
404   EXPECT_EQ(kTestProduct, log.uma_proto().product());
405 }
406
407 }  // namespace metrics