• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/basictypes.h"
6 #include "base/callback.h"
7 #include "base/file_path.h"
8 #include "base/file_util.h"
9 #include "base/memory/scoped_temp_dir.h"
10 #include "base/path_service.h"
11 #include "base/utf_string_conversions.h"
12 #include "chrome/browser/history/history.h"
13 #include "testing/gtest/include/gtest/gtest.h"
14 
15 using base::Time;
16 using base::TimeDelta;
17 
18 // Tests the history service for querying functionality.
19 
20 namespace history {
21 
22 namespace {
23 
24 struct TestEntry {
25   const char* url;
26   const char* title;
27   const int days_ago;
28   const char* body;
29   Time time;  // Filled by SetUp.
30 } test_entries[] = {
31   // This one is visited super long ago so it will be in a different database
32   // from the next appearance of it at the end.
33   {"http://example.com/", "Other", 180, "Other"},
34 
35   // These are deliberately added out of chronological order. The history
36   // service should sort them by visit time when returning query results.
37   // The correct index sort order is 4 2 3 1 0.
38   {"http://www.google.com/1", "Title 1", 10,
39    "PAGEONE FOO some body text"},
40   {"http://www.google.com/3", "Title 3", 8,
41    "PAGETHREE BAR some hello world for you"},
42   {"http://www.google.com/2", "Title 2", 9,
43    "PAGETWO FOO some more blah blah blah"},
44 
45   // A more recent visit of the first one.
46   {"http://example.com/", "Other", 6, "Other"},
47 };
48 
49 // Returns true if the nth result in the given results set matches. It will
50 // return false on a non-match or if there aren't enough results.
NthResultIs(const QueryResults & results,int n,int test_entry_index)51 bool NthResultIs(const QueryResults& results,
52                  int n,  // Result index to check.
53                  int test_entry_index) {  // Index of test_entries to compare.
54   if (static_cast<int>(results.size()) <= n)
55     return false;
56 
57   const URLResult& result = results[n];
58 
59   // Check the visit time.
60   if (result.visit_time() != test_entries[test_entry_index].time)
61     return false;
62 
63   // Now check the URL & title.
64   return result.url() == GURL(test_entries[test_entry_index].url) &&
65          result.title() == UTF8ToUTF16(test_entries[test_entry_index].title);
66 }
67 
68 }  // namespace
69 
70 class HistoryQueryTest : public testing::Test {
71  public:
HistoryQueryTest()72   HistoryQueryTest() {
73   }
74 
75   // Acts like a synchronous call to history's QueryHistory.
QueryHistory(const std::string & text_query,const QueryOptions & options,QueryResults * results)76   void QueryHistory(const std::string& text_query,
77                     const QueryOptions& options,
78                     QueryResults* results) {
79     history_->QueryHistory(UTF8ToUTF16(text_query), options, &consumer_,
80         NewCallback(this, &HistoryQueryTest::QueryHistoryComplete));
81     MessageLoop::current()->Run();  // Will go until ...Complete calls Quit.
82     results->Swap(&last_query_results_);
83   }
84 
85  protected:
86   scoped_refptr<HistoryService> history_;
87 
88  private:
SetUp()89   virtual void SetUp() {
90     ASSERT_TRUE(temp_dir_.CreateUniqueTempDir());
91     history_dir_ = temp_dir_.path().AppendASCII("HistoryTest");
92     ASSERT_TRUE(file_util::CreateDirectory(history_dir_));
93 
94     history_ = new HistoryService;
95     if (!history_->Init(history_dir_, NULL)) {
96       history_ = NULL;  // Tests should notice this NULL ptr & fail.
97       return;
98     }
99 
100     // Fill the test data.
101     Time now = Time::Now().LocalMidnight();
102     for (size_t i = 0; i < arraysize(test_entries); i++) {
103       test_entries[i].time =
104           now - (test_entries[i].days_ago * TimeDelta::FromDays(1));
105 
106       // We need the ID scope and page ID so that the visit tracker can find it.
107       const void* id_scope = reinterpret_cast<void*>(1);
108       int32 page_id = i;
109       GURL url(test_entries[i].url);
110 
111       history_->AddPage(url, test_entries[i].time, id_scope, page_id, GURL(),
112                         PageTransition::LINK, history::RedirectList(),
113                         history::SOURCE_BROWSED, false);
114       history_->SetPageTitle(url, UTF8ToUTF16(test_entries[i].title));
115       history_->SetPageContents(url, UTF8ToUTF16(test_entries[i].body));
116     }
117   }
118 
TearDown()119   virtual void TearDown() {
120     if (history_.get()) {
121       history_->SetOnBackendDestroyTask(new MessageLoop::QuitTask);
122       history_->Cleanup();
123       history_ = NULL;
124       MessageLoop::current()->Run();  // Wait for the other thread.
125     }
126   }
127 
QueryHistoryComplete(HistoryService::Handle,QueryResults * results)128   void QueryHistoryComplete(HistoryService::Handle, QueryResults* results) {
129     results->Swap(&last_query_results_);
130     MessageLoop::current()->Quit();  // Will return out to QueryHistory.
131   }
132 
133   ScopedTempDir temp_dir_;
134 
135   MessageLoop message_loop_;
136 
137   FilePath history_dir_;
138 
139   CancelableRequestConsumer consumer_;
140 
141   // The QueryHistoryComplete callback will put the results here so QueryHistory
142   // can return them.
143   QueryResults last_query_results_;
144 
145   DISALLOW_COPY_AND_ASSIGN(HistoryQueryTest);
146 };
147 
TEST_F(HistoryQueryTest,Basic)148 TEST_F(HistoryQueryTest, Basic) {
149   ASSERT_TRUE(history_.get());
150 
151   QueryOptions options;
152   QueryResults results;
153 
154   // Test duplicate collapsing.
155   QueryHistory(std::string(), options, &results);
156   EXPECT_EQ(4U, results.size());
157   EXPECT_TRUE(NthResultIs(results, 0, 4));
158   EXPECT_TRUE(NthResultIs(results, 1, 2));
159   EXPECT_TRUE(NthResultIs(results, 2, 3));
160   EXPECT_TRUE(NthResultIs(results, 3, 1));
161 
162   // Next query a time range. The beginning should be inclusive, the ending
163   // should be exclusive.
164   options.begin_time = test_entries[3].time;
165   options.end_time = test_entries[2].time;
166   QueryHistory(std::string(), options, &results);
167   EXPECT_EQ(1U, results.size());
168   EXPECT_TRUE(NthResultIs(results, 0, 3));
169 }
170 
171 // Tests max_count feature for basic (non-Full Text Search) queries.
TEST_F(HistoryQueryTest,BasicCount)172 TEST_F(HistoryQueryTest, BasicCount) {
173   ASSERT_TRUE(history_.get());
174 
175   QueryOptions options;
176   QueryResults results;
177 
178   // Query all time but with a limit on the number of entries. We should
179   // get the N most recent entries.
180   options.max_count = 2;
181   QueryHistory(std::string(), options, &results);
182   EXPECT_EQ(2U, results.size());
183   EXPECT_TRUE(NthResultIs(results, 0, 4));
184   EXPECT_TRUE(NthResultIs(results, 1, 2));
185 }
186 
TEST_F(HistoryQueryTest,ReachedBeginning)187 TEST_F(HistoryQueryTest, ReachedBeginning) {
188   ASSERT_TRUE(history_.get());
189 
190   QueryOptions options;
191   QueryResults results;
192 
193   QueryHistory(std::string(), options, &results);
194   EXPECT_TRUE(results.reached_beginning());
195 
196   options.begin_time = test_entries[1].time;
197   QueryHistory(std::string(), options, &results);
198   EXPECT_FALSE(results.reached_beginning());
199 
200   options.begin_time = test_entries[0].time + TimeDelta::FromMicroseconds(1);
201   QueryHistory(std::string(), options, &results);
202   EXPECT_FALSE(results.reached_beginning());
203 
204   options.begin_time = test_entries[0].time;
205   QueryHistory(std::string(), options, &results);
206   EXPECT_TRUE(results.reached_beginning());
207 
208   options.begin_time = test_entries[0].time - TimeDelta::FromMicroseconds(1);
209   QueryHistory(std::string(), options, &results);
210   EXPECT_TRUE(results.reached_beginning());
211 }
212 
213 // This does most of the same tests above, but searches for a FTS string that
214 // will match the pages in question. This will trigger a different code path.
TEST_F(HistoryQueryTest,FTS)215 TEST_F(HistoryQueryTest, FTS) {
216   ASSERT_TRUE(history_.get());
217 
218   QueryOptions options;
219   QueryResults results;
220 
221   // Query all of them to make sure they are there and in order. Note that
222   // this query will return the starred item twice since we requested all
223   // starred entries and no de-duping.
224   QueryHistory("some", options, &results);
225   EXPECT_EQ(3U, results.size());
226   EXPECT_TRUE(NthResultIs(results, 0, 2));
227   EXPECT_TRUE(NthResultIs(results, 1, 3));
228   EXPECT_TRUE(NthResultIs(results, 2, 1));
229 
230   // Do a query that should only match one of them.
231   QueryHistory("PAGETWO", options, &results);
232   EXPECT_EQ(1U, results.size());
233   EXPECT_TRUE(NthResultIs(results, 0, 3));
234 
235   // Next query a time range. The beginning should be inclusive, the ending
236   // should be exclusive.
237   options.begin_time = test_entries[1].time;
238   options.end_time = test_entries[3].time;
239   QueryHistory("some", options, &results);
240   EXPECT_EQ(1U, results.size());
241   EXPECT_TRUE(NthResultIs(results, 0, 1));
242 }
243 
244 // Searches titles.
TEST_F(HistoryQueryTest,FTSTitle)245 TEST_F(HistoryQueryTest, FTSTitle) {
246   ASSERT_TRUE(history_.get());
247 
248   QueryOptions options;
249   QueryResults results;
250 
251   // Query all time but with a limit on the number of entries. We should
252   // get the N most recent entries.
253   QueryHistory("title", options, &results);
254   EXPECT_EQ(3U, results.size());
255   EXPECT_TRUE(NthResultIs(results, 0, 2));
256   EXPECT_TRUE(NthResultIs(results, 1, 3));
257   EXPECT_TRUE(NthResultIs(results, 2, 1));
258 }
259 
260 // Tests prefix searching for Full Text Search queries.
TEST_F(HistoryQueryTest,FTSPrefix)261 TEST_F(HistoryQueryTest, FTSPrefix) {
262   ASSERT_TRUE(history_.get());
263 
264   QueryOptions options;
265   QueryResults results;
266 
267   // Query with a prefix search.  Should return matches for "PAGETWO" and
268   // "PAGETHREE".
269   QueryHistory("PAGET", options, &results);
270   EXPECT_EQ(2U, results.size());
271   EXPECT_TRUE(NthResultIs(results, 0, 2));
272   EXPECT_TRUE(NthResultIs(results, 1, 3));
273 }
274 
275 // Tests max_count feature for Full Text Search queries.
TEST_F(HistoryQueryTest,FTSCount)276 TEST_F(HistoryQueryTest, FTSCount) {
277   ASSERT_TRUE(history_.get());
278 
279   QueryOptions options;
280   QueryResults results;
281 
282   // Query all time but with a limit on the number of entries. We should
283   // get the N most recent entries.
284   options.max_count = 2;
285   QueryHistory("some", options, &results);
286   EXPECT_EQ(2U, results.size());
287   EXPECT_TRUE(NthResultIs(results, 0, 2));
288   EXPECT_TRUE(NthResultIs(results, 1, 3));
289 
290   // Now query a subset of the pages and limit by N items. "FOO" should match
291   // the 2nd & 3rd pages, but we should only get the 3rd one because of the one
292   // page max restriction.
293   options.max_count = 1;
294   QueryHistory("FOO", options, &results);
295   EXPECT_EQ(1U, results.size());
296   EXPECT_TRUE(NthResultIs(results, 0, 3));
297 }
298 
299 // Tests that FTS queries can find URLs when they exist only in the archived
300 // database. This also tests that imported URLs can be found, since we use
301 // AddPageWithDetails just like the importer.
TEST_F(HistoryQueryTest,FTSArchived)302 TEST_F(HistoryQueryTest, FTSArchived) {
303   ASSERT_TRUE(history_.get());
304 
305   std::vector<URLRow> urls_to_add;
306 
307   URLRow row1(GURL("http://foo.bar/"));
308   row1.set_title(UTF8ToUTF16("archived title"));
309   row1.set_last_visit(Time::Now() - TimeDelta::FromDays(365));
310   urls_to_add.push_back(row1);
311 
312   URLRow row2(GURL("http://foo.bar/"));
313   row2.set_title(UTF8ToUTF16("nonarchived title"));
314   row2.set_last_visit(Time::Now());
315   urls_to_add.push_back(row2);
316 
317   history_->AddPagesWithDetails(urls_to_add, history::SOURCE_BROWSED);
318 
319   QueryOptions options;
320   QueryResults results;
321 
322   // Query all time. The title we get should be the one in the full text
323   // database and not the most current title (since otherwise highlighting in
324   // the title might be wrong).
325   QueryHistory("archived", options, &results);
326   ASSERT_EQ(1U, results.size());
327   EXPECT_TRUE(row1.url() == results[0].url());
328   EXPECT_TRUE(row1.title() == results[0].title());
329 }
330 
331 /* TODO(brettw) re-enable this. It is commented out because the current history
332    code prohibits adding more than one indexed page with the same URL. When we
333    have tiered history, there could be a dupe in the archived history which
334    won't get picked up by the deletor and it can happen again. When this is the
335    case, we should fix this test to duplicate that situation.
336 
337 // Tests duplicate collapsing and not in Full Text Search situations.
338 TEST_F(HistoryQueryTest, FTSDupes) {
339   ASSERT_TRUE(history_.get());
340 
341   QueryOptions options;
342   QueryResults results;
343 
344   QueryHistory("Other", options, &results);
345   EXPECT_EQ(1, results.urls().size());
346   EXPECT_TRUE(NthResultIs(results, 0, 4));
347 }
348 */
349 
350 }  // namespace history
351