- add sources.
[platform/framework/web/crosswalk.git] / src / chrome / browser / history / url_database_unittest.cc
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/files/file_path.h"
6 #include "base/files/scoped_temp_dir.h"
7 #include "base/path_service.h"
8 #include "base/strings/string_util.h"
9 #include "base/strings/utf_string_conversions.h"
10 #include "chrome/browser/history/url_database.h"
11 #include "sql/connection.h"
12 #include "testing/gtest/include/gtest/gtest.h"
13
14 using base::Time;
15 using base::TimeDelta;
16
17 namespace history {
18
19 namespace {
20
21 bool IsURLRowEqual(const URLRow& a,
22                    const URLRow& b) {
23   // TODO(brettw) when the database stores an actual Time value rather than
24   // a time_t, do a reaul comparison. Instead, we have to do a more rough
25   // comparison since the conversion reduces the precision.
26   return a.title() == b.title() &&
27       a.visit_count() == b.visit_count() &&
28       a.typed_count() == b.typed_count() &&
29       a.last_visit() - b.last_visit() <= TimeDelta::FromSeconds(1) &&
30       a.hidden() == b.hidden();
31 }
32
33 }  // namespace
34
35 class URLDatabaseTest : public testing::Test,
36                         public URLDatabase {
37  public:
38   URLDatabaseTest() {
39   }
40
41  protected:
42   // Provided for URL/VisitDatabase.
43   virtual sql::Connection& GetDB() OVERRIDE {
44     return db_;
45   }
46
47  private:
48   // Test setup.
49   virtual void SetUp() {
50     ASSERT_TRUE(temp_dir_.CreateUniqueTempDir());
51     base::FilePath db_file = temp_dir_.path().AppendASCII("URLTest.db");
52
53     EXPECT_TRUE(db_.Open(db_file));
54
55     // Initialize the tables for this test.
56     CreateURLTable(false);
57     CreateMainURLIndex();
58     InitKeywordSearchTermsTable();
59     CreateKeywordSearchTermsIndices();
60   }
61   virtual void TearDown() {
62     db_.Close();
63   }
64
65   base::ScopedTempDir temp_dir_;
66   sql::Connection db_;
67 };
68
69 // Test add and query for the URL table in the HistoryDatabase.
70 TEST_F(URLDatabaseTest, AddURL) {
71   // First, add two URLs.
72   const GURL url1("http://www.google.com/");
73   URLRow url_info1(url1);
74   url_info1.set_title(UTF8ToUTF16("Google"));
75   url_info1.set_visit_count(4);
76   url_info1.set_typed_count(2);
77   url_info1.set_last_visit(Time::Now() - TimeDelta::FromDays(1));
78   url_info1.set_hidden(false);
79   EXPECT_TRUE(AddURL(url_info1));
80
81   const GURL url2("http://mail.google.com/");
82   URLRow url_info2(url2);
83   url_info2.set_title(UTF8ToUTF16("Google Mail"));
84   url_info2.set_visit_count(3);
85   url_info2.set_typed_count(0);
86   url_info2.set_last_visit(Time::Now() - TimeDelta::FromDays(2));
87   url_info2.set_hidden(true);
88   EXPECT_TRUE(AddURL(url_info2));
89
90   // Query both of them.
91   URLRow info;
92   EXPECT_TRUE(GetRowForURL(url1, &info));
93   EXPECT_TRUE(IsURLRowEqual(url_info1, info));
94   URLID id2 = GetRowForURL(url2, &info);
95   EXPECT_TRUE(id2);
96   EXPECT_TRUE(IsURLRowEqual(url_info2, info));
97
98   // Update the second.
99   url_info2.set_title(UTF8ToUTF16("Google Mail Too"));
100   url_info2.set_visit_count(4);
101   url_info2.set_typed_count(1);
102   url_info2.set_typed_count(91011);
103   url_info2.set_hidden(false);
104   EXPECT_TRUE(UpdateURLRow(id2, url_info2));
105
106   // Make sure it got updated.
107   URLRow info2;
108   EXPECT_TRUE(GetRowForURL(url2, &info2));
109   EXPECT_TRUE(IsURLRowEqual(url_info2, info2));
110
111   // Query a nonexistent URL.
112   EXPECT_EQ(0, GetRowForURL(GURL("http://news.google.com/"), &info));
113
114   // Delete all urls in the domain.
115   // TODO(acw): test the new url based delete domain
116   // EXPECT_TRUE(db.DeleteDomain(kDomainID));
117
118   // Make sure the urls have been properly removed.
119   // TODO(acw): commented out because remove no longer works.
120   // EXPECT_TRUE(db.GetURLInfo(url1, NULL) == NULL);
121   // EXPECT_TRUE(db.GetURLInfo(url2, NULL) == NULL);
122 }
123
124 // Tests adding, querying and deleting keyword visits.
125 TEST_F(URLDatabaseTest, KeywordSearchTermVisit) {
126   URLRow url_info1(GURL("http://www.google.com/"));
127   url_info1.set_title(UTF8ToUTF16("Google"));
128   url_info1.set_visit_count(4);
129   url_info1.set_typed_count(2);
130   url_info1.set_last_visit(Time::Now() - TimeDelta::FromDays(1));
131   url_info1.set_hidden(false);
132   URLID url_id = AddURL(url_info1);
133   ASSERT_NE(0, url_id);
134
135   // Add a keyword visit.
136   TemplateURLID keyword_id = 100;
137   string16 keyword = UTF8ToUTF16("visit");
138   ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id, keyword_id, keyword));
139
140   // Make sure we get it back.
141   std::vector<KeywordSearchTermVisit> matches;
142   GetMostRecentKeywordSearchTerms(keyword_id, keyword, 10, &matches);
143   ASSERT_EQ(1U, matches.size());
144   ASSERT_EQ(keyword, matches[0].term);
145
146   KeywordSearchTermRow keyword_search_term_row;
147   ASSERT_TRUE(GetKeywordSearchTermRow(url_id, &keyword_search_term_row));
148   EXPECT_EQ(keyword_id, keyword_search_term_row.keyword_id);
149   EXPECT_EQ(url_id, keyword_search_term_row.url_id);
150   EXPECT_EQ(keyword, keyword_search_term_row.term);
151
152   // Delete the keyword visit.
153   DeleteAllSearchTermsForKeyword(keyword_id);
154
155   // Make sure we don't get it back when querying.
156   matches.clear();
157   GetMostRecentKeywordSearchTerms(keyword_id, keyword, 10, &matches);
158   ASSERT_EQ(0U, matches.size());
159
160   ASSERT_FALSE(GetKeywordSearchTermRow(url_id, &keyword_search_term_row));
161 }
162
163 // Make sure deleting a URL also deletes a keyword visit.
164 TEST_F(URLDatabaseTest, DeleteURLDeletesKeywordSearchTermVisit) {
165   URLRow url_info1(GURL("http://www.google.com/"));
166   url_info1.set_title(UTF8ToUTF16("Google"));
167   url_info1.set_visit_count(4);
168   url_info1.set_typed_count(2);
169   url_info1.set_last_visit(Time::Now() - TimeDelta::FromDays(1));
170   url_info1.set_hidden(false);
171   URLID url_id = AddURL(url_info1);
172   ASSERT_NE(0, url_id);
173
174   // Add a keyword visit.
175   ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id, 1, UTF8ToUTF16("visit")));
176
177   // Delete the url.
178   ASSERT_TRUE(DeleteURLRow(url_id));
179
180   // Make sure the keyword visit was deleted.
181   std::vector<KeywordSearchTermVisit> matches;
182   GetMostRecentKeywordSearchTerms(1, UTF8ToUTF16("visit"), 10, &matches);
183   ASSERT_EQ(0U, matches.size());
184 }
185
186 TEST_F(URLDatabaseTest, EnumeratorForSignificant) {
187   std::set<std::string> good_urls;
188   // Add URLs which do and don't meet the criteria.
189   URLRow url_no_match(GURL("http://www.url_no_match.com/"));
190   EXPECT_TRUE(AddURL(url_no_match));
191
192   std::string url_string2("http://www.url_match_visit_count.com/");
193   good_urls.insert("http://www.url_match_visit_count.com/");
194   URLRow url_match_visit_count(GURL("http://www.url_match_visit_count.com/"));
195   url_match_visit_count.set_visit_count(kLowQualityMatchVisitLimit);
196   EXPECT_TRUE(AddURL(url_match_visit_count));
197
198   good_urls.insert("http://www.url_match_typed_count.com/");
199   URLRow url_match_typed_count(GURL("http://www.url_match_typed_count.com/"));
200   url_match_typed_count.set_typed_count(kLowQualityMatchTypedLimit);
201   EXPECT_TRUE(AddURL(url_match_typed_count));
202
203   good_urls.insert("http://www.url_match_last_visit.com/");
204   URLRow url_match_last_visit(GURL("http://www.url_match_last_visit.com/"));
205   url_match_last_visit.set_last_visit(Time::Now() - TimeDelta::FromDays(1));
206   EXPECT_TRUE(AddURL(url_match_last_visit));
207
208   URLRow url_no_match_last_visit(GURL(
209       "http://www.url_no_match_last_visit.com/"));
210   url_no_match_last_visit.set_last_visit(Time::Now() -
211       TimeDelta::FromDays(kLowQualityMatchAgeLimitInDays + 1));
212   EXPECT_TRUE(AddURL(url_no_match_last_visit));
213
214   URLDatabase::URLEnumerator history_enum;
215   EXPECT_TRUE(InitURLEnumeratorForSignificant(&history_enum));
216   URLRow row;
217   int row_count = 0;
218   for (; history_enum.GetNextURL(&row); ++row_count)
219     EXPECT_EQ(1U, good_urls.count(row.url().spec()));
220   EXPECT_EQ(3, row_count);
221 }
222
223 // Test GetKeywordSearchTermRows and DeleteSearchTerm
224 TEST_F(URLDatabaseTest, GetAndDeleteKeywordSearchTermByTerm) {
225   URLRow url_info1(GURL("http://www.google.com/"));
226   url_info1.set_title(UTF8ToUTF16("Google"));
227   url_info1.set_visit_count(4);
228   url_info1.set_typed_count(2);
229   url_info1.set_last_visit(Time::Now() - TimeDelta::FromDays(1));
230   url_info1.set_hidden(false);
231   URLID url_id1 = AddURL(url_info1);
232   ASSERT_NE(0, url_id1);
233
234   // Add a keyword visit.
235   TemplateURLID keyword_id = 100;
236   string16 keyword = UTF8ToUTF16("visit");
237   ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id1, keyword_id, keyword));
238
239   URLRow url_info2(GURL("https://www.google.com/"));
240   url_info2.set_title(UTF8ToUTF16("Google"));
241   url_info2.set_visit_count(4);
242   url_info2.set_typed_count(2);
243   url_info2.set_last_visit(Time::Now() - TimeDelta::FromDays(1));
244   url_info2.set_hidden(false);
245   URLID url_id2 = AddURL(url_info2);
246   ASSERT_NE(0, url_id2);
247   // Add the same keyword for url_info2.
248   ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id2, keyword_id, keyword));
249
250   // Add another URL for different keyword.
251   URLRow url_info3(GURL("https://www.google.com/search"));
252   url_info3.set_title(UTF8ToUTF16("Google"));
253   url_info3.set_visit_count(4);
254   url_info3.set_typed_count(2);
255   url_info3.set_last_visit(Time::Now() - TimeDelta::FromDays(1));
256   url_info3.set_hidden(false);
257   URLID url_id3 = AddURL(url_info3);
258   ASSERT_NE(0, url_id3);
259   string16 keyword2 = UTF8ToUTF16("Search");
260
261   ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id3, keyword_id, keyword2));
262
263   // We should get 2 rows for |keyword|.
264   std::vector<KeywordSearchTermRow> rows;
265   ASSERT_TRUE(GetKeywordSearchTermRows(keyword, &rows));
266   ASSERT_EQ(2u, rows.size());
267   if (rows[0].url_id == url_id1) {
268     EXPECT_EQ(keyword, rows[0].term);
269     EXPECT_EQ(keyword, rows[1].term);
270     EXPECT_EQ(url_id2, rows[1].url_id);
271   } else {
272     EXPECT_EQ(keyword, rows[0].term);
273     EXPECT_EQ(url_id1, rows[1].url_id);
274     EXPECT_EQ(keyword, rows[1].term);
275     EXPECT_EQ(url_id2, rows[0].url_id);
276   }
277
278   // We should get 1 row for |keyword2|.
279   rows.clear();
280   ASSERT_TRUE(GetKeywordSearchTermRows(keyword2, &rows));
281   ASSERT_EQ(1u, rows.size());
282   EXPECT_EQ(keyword2, rows[0].term);
283   EXPECT_EQ(url_id3, rows[0].url_id);
284
285   // Delete all rows have keyword.
286   ASSERT_TRUE(DeleteKeywordSearchTerm(keyword));
287   rows.clear();
288   // We should still find keyword2.
289   ASSERT_TRUE(GetKeywordSearchTermRows(keyword2, &rows));
290   ASSERT_EQ(1u, rows.size());
291   EXPECT_EQ(keyword2, rows[0].term);
292   EXPECT_EQ(url_id3, rows[0].url_id);
293   rows.clear();
294   // No row for keyword.
295   ASSERT_TRUE(GetKeywordSearchTermRows(keyword, &rows));
296   EXPECT_TRUE(rows.empty());
297 }
298
299 }  // namespace history