• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2006-2009 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "net/url_request/request_tracker.h"
6 
7 #include "base/compiler_specific.h"
8 #include "base/format_macros.h"
9 #include "base/string_util.h"
10 #include "testing/gtest/include/gtest/gtest.h"
11 
12 namespace {
13 
14 static const int kMaxNumLoadLogEntries = 1;
15 
16 class TestRequest {
17  public:
TestRequest(const GURL & url)18   explicit TestRequest(const GURL& url)
19       : url_(url),
20         load_log_(new net::LoadLog(kMaxNumLoadLogEntries)),
21         ALLOW_THIS_IN_INITIALIZER_LIST(request_tracker_node_(this)) {}
~TestRequest()22   ~TestRequest() {}
23 
24   // This method is used in RequestTrackerTest::Basic test.
original_url() const25   const GURL& original_url() const { return url_; }
26 
27  private:
28   // RequestTracker<T> will access GetRecentRequestInfo() and
29   // |request_tracker_node_|.
30   friend class RequestTracker<TestRequest>;
31 
GetInfoForTracker(RequestTracker<TestRequest>::RecentRequestInfo * info) const32   void GetInfoForTracker(
33       RequestTracker<TestRequest>::RecentRequestInfo* info) const {
34     info->original_url = url_;
35     info->load_log = load_log_;
36   }
37 
38   const GURL url_;
39   scoped_refptr<net::LoadLog> load_log_;
40 
41   RequestTracker<TestRequest>::Node request_tracker_node_;
42 
43   DISALLOW_COPY_AND_ASSIGN(TestRequest);
44 };
45 
46 
TEST(RequestTrackerTest,BasicBounded)47 TEST(RequestTrackerTest, BasicBounded) {
48   RequestTracker<TestRequest> tracker;
49   EXPECT_FALSE(tracker.IsUnbounded());
50   EXPECT_EQ(0u, tracker.GetLiveRequests().size());
51   EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
52 
53   TestRequest req1(GURL("http://req1"));
54   TestRequest req2(GURL("http://req2"));
55   TestRequest req3(GURL("http://req3"));
56   TestRequest req4(GURL("http://req4"));
57   TestRequest req5(GURL("http://req5"));
58 
59   tracker.Add(&req1);
60   tracker.Add(&req2);
61   tracker.Add(&req3);
62   tracker.Add(&req4);
63   tracker.Add(&req5);
64 
65   std::vector<TestRequest*> live_reqs = tracker.GetLiveRequests();
66 
67   ASSERT_EQ(5u, live_reqs.size());
68   EXPECT_EQ(GURL("http://req1"), live_reqs[0]->original_url());
69   EXPECT_EQ(GURL("http://req2"), live_reqs[1]->original_url());
70   EXPECT_EQ(GURL("http://req3"), live_reqs[2]->original_url());
71   EXPECT_EQ(GURL("http://req4"), live_reqs[3]->original_url());
72   EXPECT_EQ(GURL("http://req5"), live_reqs[4]->original_url());
73 
74   tracker.Remove(&req1);
75   tracker.Remove(&req5);
76   tracker.Remove(&req3);
77 
78   ASSERT_EQ(3u, tracker.GetRecentlyDeceased().size());
79 
80   live_reqs = tracker.GetLiveRequests();
81 
82   ASSERT_EQ(2u, live_reqs.size());
83   EXPECT_EQ(GURL("http://req2"), live_reqs[0]->original_url());
84   EXPECT_EQ(GURL("http://req4"), live_reqs[1]->original_url());
85 }
86 
TEST(RequestTrackerTest,GraveyardBounded)87 TEST(RequestTrackerTest, GraveyardBounded) {
88   RequestTracker<TestRequest> tracker;
89   EXPECT_FALSE(tracker.IsUnbounded());
90   EXPECT_EQ(0u, tracker.GetLiveRequests().size());
91   EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
92 
93   // Add twice as many requests as will fit in the graveyard.
94   for (size_t i = 0;
95        i < RequestTracker<TestRequest>::kMaxGraveyardSize * 2;
96        ++i) {
97     TestRequest req(GURL(StringPrintf("http://req%" PRIuS, i).c_str()));
98     tracker.Add(&req);
99     tracker.Remove(&req);
100   }
101 
102   // Check that only the last |kMaxGraveyardSize| requests are in-memory.
103 
104   RequestTracker<TestRequest>::RecentRequestInfoList recent_reqs =
105       tracker.GetRecentlyDeceased();
106 
107   ASSERT_EQ(RequestTracker<TestRequest>::kMaxGraveyardSize, recent_reqs.size());
108 
109   for (size_t i = 0; i < RequestTracker<TestRequest>::kMaxGraveyardSize; ++i) {
110     size_t req_number = i + RequestTracker<TestRequest>::kMaxGraveyardSize;
111     GURL url(StringPrintf("http://req%" PRIuS, req_number).c_str());
112     EXPECT_EQ(url, recent_reqs[i].original_url);
113   }
114 }
115 
TEST(RequestTrackerTest,GraveyardUnbounded)116 TEST(RequestTrackerTest, GraveyardUnbounded) {
117   RequestTracker<TestRequest> tracker;
118   EXPECT_FALSE(tracker.IsUnbounded());
119   EXPECT_EQ(0u, tracker.GetLiveRequests().size());
120   EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
121 
122   tracker.SetUnbounded(true);
123 
124   EXPECT_TRUE(tracker.IsUnbounded());
125 
126   // Add twice as many requests as would fit in the bounded graveyard.
127 
128   size_t kMaxSize = RequestTracker<TestRequest>::kMaxGraveyardSize * 2;
129   for (size_t i = 0; i < kMaxSize; ++i) {
130     TestRequest req(GURL(StringPrintf("http://req%" PRIuS, i).c_str()));
131     tracker.Add(&req);
132     tracker.Remove(&req);
133   }
134 
135   // Check that all of them got saved.
136 
137   RequestTracker<TestRequest>::RecentRequestInfoList recent_reqs =
138       tracker.GetRecentlyDeceased();
139 
140   ASSERT_EQ(kMaxSize, recent_reqs.size());
141 
142   for (size_t i = 0; i < kMaxSize; ++i) {
143     GURL url(StringPrintf("http://req%" PRIuS, i).c_str());
144     EXPECT_EQ(url, recent_reqs[i].original_url);
145   }
146 }
147 
148 // Check that very long URLs are truncated.
TEST(RequestTrackerTest,GraveyardURLBounded)149 TEST(RequestTrackerTest, GraveyardURLBounded) {
150   RequestTracker<TestRequest> tracker;
151   EXPECT_FALSE(tracker.IsUnbounded());
152 
153   std::string big_url_spec("http://");
154   big_url_spec.resize(2 * RequestTracker<TestRequest>::kMaxGraveyardURLSize,
155                       'x');
156   GURL big_url(big_url_spec);
157   TestRequest req(big_url);
158 
159   tracker.Add(&req);
160   tracker.Remove(&req);
161 
162   ASSERT_EQ(1u, tracker.GetRecentlyDeceased().size());
163   // The +1 is because GURL canonicalizes with a trailing '/' ... maybe
164   // we should just save the std::string rather than the GURL.
165   EXPECT_EQ(RequestTracker<TestRequest>::kMaxGraveyardURLSize + 1,
166             tracker.GetRecentlyDeceased()[0].original_url.spec().size());
167 }
168 
169 // Test the doesn't fail if the URL was invalid. http://crbug.com/21423.
TEST(URLRequestTrackerTest,TrackingInvalidURL)170 TEST(URLRequestTrackerTest, TrackingInvalidURL) {
171   RequestTracker<TestRequest> tracker;
172   EXPECT_FALSE(tracker.IsUnbounded());
173 
174   EXPECT_EQ(0u, tracker.GetLiveRequests().size());
175   EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
176 
177   GURL invalid_url("xabc");
178   EXPECT_FALSE(invalid_url.is_valid());
179   TestRequest req(invalid_url);
180 
181   tracker.Add(&req);
182   tracker.Remove(&req);
183 
184   // Check that the invalid URL made it into graveyard.
185   ASSERT_EQ(1u, tracker.GetRecentlyDeceased().size());
186   EXPECT_FALSE(tracker.GetRecentlyDeceased()[0].original_url.is_valid());
187 }
188 
ShouldRequestBeAddedToGraveyard(const GURL & url)189 bool ShouldRequestBeAddedToGraveyard(const GURL& url) {
190   return !url.SchemeIs("chrome") && !url.SchemeIs("data");
191 }
192 
193 // Check that we can exclude "chrome://" URLs and "data:" URLs from being
194 // saved into the recent requests list (graveyard), by using a filter.
TEST(RequestTrackerTest,GraveyardCanBeFiltered)195 TEST(RequestTrackerTest, GraveyardCanBeFiltered) {
196   RequestTracker<TestRequest> tracker;
197   EXPECT_FALSE(tracker.IsUnbounded());
198 
199   tracker.SetGraveyardFilter(ShouldRequestBeAddedToGraveyard);
200 
201   // This will be excluded.
202   TestRequest req1(GURL("chrome://dontcare"));
203   tracker.Add(&req1);
204   tracker.Remove(&req1);
205 
206   // This will be be added to graveyard.
207   TestRequest req2(GURL("chrome2://dontcare"));
208   tracker.Add(&req2);
209   tracker.Remove(&req2);
210 
211   // This will be be added to graveyard.
212   TestRequest req3(GURL("http://foo"));
213   tracker.Add(&req3);
214   tracker.Remove(&req3);
215 
216   // This will be be excluded.
217   TestRequest req4(GURL("data:sup"));
218   tracker.Add(&req4);
219   tracker.Remove(&req4);
220 
221   ASSERT_EQ(2u, tracker.GetRecentlyDeceased().size());
222   EXPECT_EQ("chrome2://dontcare/",
223             tracker.GetRecentlyDeceased()[0].original_url.spec());
224   EXPECT_EQ("http://foo/",
225             tracker.GetRecentlyDeceased()[1].original_url.spec());
226 }
227 
228 // Convert an unbounded tracker back to being bounded.
TEST(RequestTrackerTest,ConvertUnboundedToBounded)229 TEST(RequestTrackerTest, ConvertUnboundedToBounded) {
230   RequestTracker<TestRequest> tracker;
231   EXPECT_FALSE(tracker.IsUnbounded());
232   EXPECT_EQ(0u, tracker.GetLiveRequests().size());
233   EXPECT_EQ(0u, tracker.GetRecentlyDeceased().size());
234 
235   tracker.SetUnbounded(true);
236   EXPECT_TRUE(tracker.IsUnbounded());
237 
238   // Add twice as many requests as would fit in the bounded graveyard.
239 
240   size_t kMaxSize = RequestTracker<TestRequest>::kMaxGraveyardSize * 2;
241   for (size_t i = 0; i < kMaxSize; ++i) {
242     TestRequest req(GURL(StringPrintf("http://req%" PRIuS, i).c_str()));
243     tracker.Add(&req);
244     tracker.Remove(&req);
245   }
246 
247   // Check that all of them got saved.
248   ASSERT_EQ(kMaxSize, tracker.GetRecentlyDeceased().size());
249 
250   // Now make the tracker bounded, and add more entries to its graveyard.
251   tracker.SetUnbounded(false);
252 
253   kMaxSize = RequestTracker<TestRequest>::kMaxGraveyardSize;
254   for (size_t i = 0; i < kMaxSize; ++i) {
255     TestRequest req(GURL(StringPrintf("http://req%" PRIuS, i).c_str()));
256     tracker.Add(&req);
257     tracker.Remove(&req);
258   }
259 
260   // We should only have kMaxGraveyardSize entries now.
261   ASSERT_EQ(kMaxSize, tracker.GetRecentlyDeceased().size());
262 }
263 
264 }  // namespace
265