- add sources.
[platform/framework/web/crosswalk.git] / src / base / test / launcher / test_results_tracker.cc
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/test/launcher/test_results_tracker.h"
6
7 #include "base/command_line.h"
8 #include "base/file_util.h"
9 #include "base/files/file_path.h"
10 #include "base/format_macros.h"
11 #include "base/json/json_file_value_serializer.h"
12 #include "base/logging.h"
13 #include "base/strings/stringprintf.h"
14 #include "base/test/launcher/test_launcher.h"
15 #include "base/values.h"
16
17 namespace base {
18
19 // See https://groups.google.com/a/chromium.org/d/msg/chromium-dev/nkdTP7sstSc/uT3FaE_sgkAJ .
20 using ::operator<<;
21
22 namespace {
23
24 // The default output file for XML output.
25 const FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL(
26     "test_detail.xml");
27
28 // Utility function to print a list of test names. Uses iterator to be
29 // compatible with different containers, like vector and set.
30 template<typename InputIterator>
31 void PrintTests(InputIterator first,
32                 InputIterator last,
33                 const std::string& description) {
34   size_t count = std::distance(first, last);
35   if (count == 0)
36     return;
37
38   fprintf(stdout,
39           "%" PRIuS " test%s %s:\n",
40           count,
41           count != 1 ? "s" : "",
42           description.c_str());
43   for (InputIterator i = first; i != last; ++i)
44     fprintf(stdout, "    %s\n", (*i).c_str());
45   fflush(stdout);
46 }
47
48 }  // namespace
49
50 TestResultsTracker::TestResultsTracker() : iteration_(-1), out_(NULL) {
51 }
52
53 TestResultsTracker::~TestResultsTracker() {
54   DCHECK(thread_checker_.CalledOnValidThread());
55
56   if (!out_)
57     return;
58   fprintf(out_, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
59   fprintf(out_, "<testsuites name=\"AllTests\" tests=\"\" failures=\"\""
60           " disabled=\"\" errors=\"\" time=\"\">\n");
61
62   // Maps test case names to test results.
63   typedef std::map<std::string, std::vector<TestResult> > TestCaseMap;
64   TestCaseMap test_case_map;
65
66   for (PerIterationData::ResultsMap::iterator i =
67            per_iteration_data_[iteration_].results.begin();
68        i != per_iteration_data_[iteration_].results.end();
69        ++i) {
70     // Use the last test result as the final one.
71     TestResult result = i->second.test_results.back();
72     test_case_map[result.GetTestCaseName()].push_back(result);
73   }
74   for (TestCaseMap::iterator i = test_case_map.begin();
75        i != test_case_map.end();
76        ++i) {
77     fprintf(out_, "  <testsuite name=\"%s\" tests=\"%" PRIuS "\" failures=\"\""
78             " disabled=\"\" errors=\"\" time=\"\">\n",
79             i->first.c_str(), i->second.size());
80     for (size_t j = 0; j < i->second.size(); ++j) {
81       const TestResult& result = i->second[j];
82       fprintf(out_, "    <testcase name=\"%s\" status=\"run\" time=\"%.3f\""
83               " classname=\"%s\">\n",
84               result.GetTestName().c_str(),
85               result.elapsed_time.InSecondsF(),
86               result.GetTestCaseName().c_str());
87       if (result.status != TestResult::TEST_SUCCESS)
88         fprintf(out_, "      <failure message=\"\" type=\"\"></failure>\n");
89       fprintf(out_, "    </testcase>\n");
90     }
91     fprintf(out_, "  </testsuite>\n");
92   }
93   fprintf(out_, "</testsuites>\n");
94   fclose(out_);
95 }
96
97 bool TestResultsTracker::Init(const CommandLine& command_line) {
98   DCHECK(thread_checker_.CalledOnValidThread());
99
100   // Prevent initializing twice.
101   if (out_) {
102     NOTREACHED();
103     return false;
104   }
105
106   if (!command_line.HasSwitch(kGTestOutputFlag))
107     return true;
108
109   std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag);
110   size_t colon_pos = flag.find(':');
111   FilePath path;
112   if (colon_pos != std::string::npos) {
113     FilePath flag_path =
114         command_line.GetSwitchValuePath(kGTestOutputFlag);
115     FilePath::StringType path_string = flag_path.value();
116     path = FilePath(path_string.substr(colon_pos + 1));
117     // If the given path ends with '/', consider it is a directory.
118     // Note: This does NOT check that a directory (or file) actually exists
119     // (the behavior is same as what gtest does).
120     if (path.EndsWithSeparator()) {
121       FilePath executable = command_line.GetProgram().BaseName();
122       path = path.Append(executable.ReplaceExtension(
123                              FilePath::StringType(FILE_PATH_LITERAL("xml"))));
124     }
125   }
126   if (path.value().empty())
127     path = FilePath(kDefaultOutputFile);
128   FilePath dir_name = path.DirName();
129   if (!DirectoryExists(dir_name)) {
130     LOG(WARNING) << "The output directory does not exist. "
131                  << "Creating the directory: " << dir_name.value();
132     // Create the directory if necessary (because the gtest does the same).
133     if (!file_util::CreateDirectory(dir_name)) {
134       LOG(ERROR) << "Failed to created directory " << dir_name.value();
135       return false;
136     }
137   }
138   out_ = file_util::OpenFile(path, "w");
139   if (!out_) {
140     LOG(ERROR) << "Cannot open output file: "
141                << path.value() << ".";
142     return false;
143   }
144
145   return true;
146 }
147
148 void TestResultsTracker::OnTestIterationStarting() {
149   DCHECK(thread_checker_.CalledOnValidThread());
150
151   // Start with a fresh state for new iteration.
152   iteration_++;
153   per_iteration_data_.push_back(PerIterationData());
154 }
155
156 void TestResultsTracker::AddTestResult(const TestResult& result) {
157   DCHECK(thread_checker_.CalledOnValidThread());
158
159   per_iteration_data_[iteration_].results[
160       result.full_name].test_results.push_back(result);
161 }
162
163 void TestResultsTracker::PrintSummaryOfCurrentIteration() const {
164   std::map<TestResult::Status, std::set<std::string> > tests_by_status;
165
166   for (PerIterationData::ResultsMap::const_iterator j =
167            per_iteration_data_[iteration_].results.begin();
168        j != per_iteration_data_[iteration_].results.end();
169        ++j) {
170     // Use the last test result as the final one.
171     TestResult result = j->second.test_results.back();
172     tests_by_status[result.status].insert(result.full_name);
173   }
174
175   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
176              tests_by_status[TestResult::TEST_FAILURE].end(),
177              "failed");
178   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
179              tests_by_status[TestResult::TEST_TIMEOUT].end(),
180              "timed out");
181   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
182              tests_by_status[TestResult::TEST_CRASH].end(),
183              "crashed");
184   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
185              tests_by_status[TestResult::TEST_SKIPPED].end(),
186              "skipped");
187   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
188              tests_by_status[TestResult::TEST_UNKNOWN].end(),
189              "had unknown result");
190 }
191
192 void TestResultsTracker::PrintSummaryOfAllIterations() const {
193   DCHECK(thread_checker_.CalledOnValidThread());
194
195   std::map<TestResult::Status, std::set<std::string> > tests_by_status;
196
197   for (int i = 0; i <= iteration_; i++) {
198     for (PerIterationData::ResultsMap::const_iterator j =
199              per_iteration_data_[i].results.begin();
200          j != per_iteration_data_[i].results.end();
201          ++j) {
202       // Use the last test result as the final one.
203       TestResult result = j->second.test_results.back();
204       tests_by_status[result.status].insert(result.full_name);
205     }
206   }
207
208   fprintf(stdout, "Summary of all itest iterations:\n");
209   fflush(stdout);
210
211   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
212              tests_by_status[TestResult::TEST_FAILURE].end(),
213              "failed");
214   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
215              tests_by_status[TestResult::TEST_TIMEOUT].end(),
216              "timed out");
217   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
218              tests_by_status[TestResult::TEST_CRASH].end(),
219              "crashed");
220   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
221              tests_by_status[TestResult::TEST_SKIPPED].end(),
222              "skipped");
223   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
224              tests_by_status[TestResult::TEST_UNKNOWN].end(),
225              "had unknown result");
226
227   fprintf(stdout, "End of the summary.\n");
228   fflush(stdout);
229 }
230
231 void TestResultsTracker::AddGlobalTag(const std::string& tag) {
232   global_tags_.insert(tag);
233 }
234
235 bool TestResultsTracker::SaveSummaryAsJSON(const FilePath& path) const {
236   scoped_ptr<DictionaryValue> summary_root(new DictionaryValue);
237
238   ListValue* global_tags = new ListValue;
239   summary_root->Set("global_tags", global_tags);
240
241   for (std::set<std::string>::const_iterator i = global_tags_.begin();
242        i != global_tags_.end();
243        ++i) {
244     global_tags->AppendString(*i);
245   }
246
247   ListValue* per_iteration_data = new ListValue;
248   summary_root->Set("per_iteration_data", per_iteration_data);
249
250   for (int i = 0; i <= iteration_; i++) {
251     DictionaryValue* current_iteration_data = new DictionaryValue;
252     per_iteration_data->Append(current_iteration_data);
253
254     for (PerIterationData::ResultsMap::const_iterator j =
255              per_iteration_data_[i].results.begin();
256          j != per_iteration_data_[i].results.end();
257          ++j) {
258       ListValue* test_results = new ListValue;
259       current_iteration_data->SetWithoutPathExpansion(j->first, test_results);
260
261       for (size_t k = 0; k < j->second.test_results.size(); k++) {
262         const TestResult& test_result = j->second.test_results[k];
263
264         DictionaryValue* test_result_value = new DictionaryValue;
265         test_results->Append(test_result_value);
266
267         test_result_value->SetString("status", test_result.StatusAsString());
268         test_result_value->SetInteger(
269             "elapsed_time_ms", test_result.elapsed_time.InMilliseconds());
270         test_result_value->SetString("output_snippet",
271                                      test_result.output_snippet);
272       }
273     }
274   }
275
276   JSONFileValueSerializer serializer(path);
277   return serializer.Serialize(*summary_root);
278 }
279
280 TestResultsTracker::AggregateTestResult::AggregateTestResult() {
281 }
282
283 TestResultsTracker::AggregateTestResult::~AggregateTestResult() {
284 }
285
286 TestResultsTracker::PerIterationData::PerIterationData() {
287 }
288
289 TestResultsTracker::PerIterationData::~PerIterationData() {
290 }
291
292 }  // namespace base