• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "net/proxy/proxy_script_fetcher_impl.h"
6 
7 #include <string>
8 
9 #include "base/file_path.h"
10 #include "base/compiler_specific.h"
11 #include "base/path_service.h"
12 #include "base/utf_string_conversions.h"
13 #include "net/base/net_util.h"
14 #include "net/base/ssl_config_service_defaults.h"
15 #include "net/base/test_completion_callback.h"
16 #include "net/disk_cache/disk_cache.h"
17 #include "net/http/http_cache.h"
18 #include "net/http/http_network_session.h"
19 #include "net/url_request/url_request_test_util.h"
20 #include "testing/gtest/include/gtest/gtest.h"
21 #include "testing/platform_test.h"
22 
23 namespace net {
24 
25 // TODO(eroman):
26 //   - Test canceling an outstanding request.
27 //   - Test deleting ProxyScriptFetcher while a request is in progress.
28 
29 namespace {
30 
31 const FilePath::CharType kDocRoot[] =
32     FILE_PATH_LITERAL("net/data/proxy_script_fetcher_unittest");
33 
34 struct FetchResult {
35   int code;
36   string16 text;
37 };
38 
39 // A non-mock URL request which can access http:// and file:// urls.
40 class RequestContext : public URLRequestContext {
41  public:
RequestContext()42   RequestContext() {
43     ProxyConfig no_proxy;
44     set_host_resolver(
45         CreateSystemHostResolver(HostResolver::kDefaultParallelism,
46                                       NULL, NULL));
47     set_cert_verifier(new CertVerifier);
48     set_proxy_service(ProxyService::CreateFixed(no_proxy));
49     set_ssl_config_service(new SSLConfigServiceDefaults);
50 
51     HttpNetworkSession::Params params;
52     params.host_resolver = host_resolver();
53     params.cert_verifier = cert_verifier();
54     params.proxy_service = proxy_service();
55     params.ssl_config_service = ssl_config_service();
56     scoped_refptr<HttpNetworkSession> network_session(
57         new HttpNetworkSession(params));
58     set_http_transaction_factory(new HttpCache(
59         network_session,
60         HttpCache::DefaultBackend::InMemory(0)));
61   }
62 
63  private:
~RequestContext()64   ~RequestContext() {
65     delete http_transaction_factory();
66     delete cert_verifier();
67     delete host_resolver();
68   }
69 };
70 
71 // Get a file:// url relative to net/data/proxy/proxy_script_fetcher_unittest.
GetTestFileUrl(const std::string & relpath)72 GURL GetTestFileUrl(const std::string& relpath) {
73   FilePath path;
74   PathService::Get(base::DIR_SOURCE_ROOT, &path);
75   path = path.AppendASCII("net");
76   path = path.AppendASCII("data");
77   path = path.AppendASCII("proxy_script_fetcher_unittest");
78   GURL base_url = FilePathToFileURL(path);
79   return GURL(base_url.spec() + "/" + relpath);
80 }
81 
82 }  // namespace
83 
84 class ProxyScriptFetcherImplTest : public PlatformTest {
85  public:
ProxyScriptFetcherImplTest()86   ProxyScriptFetcherImplTest()
87       : test_server_(TestServer::TYPE_HTTP, FilePath(kDocRoot)) {
88   }
89 
SetUpTestCase()90   static void SetUpTestCase() {
91     URLRequest::AllowFileAccess();
92   }
93 
94  protected:
95   TestServer test_server_;
96 };
97 
TEST_F(ProxyScriptFetcherImplTest,FileUrl)98 TEST_F(ProxyScriptFetcherImplTest, FileUrl) {
99   scoped_refptr<URLRequestContext> context(new RequestContext);
100   ProxyScriptFetcherImpl pac_fetcher(context);
101 
102   { // Fetch a non-existent file.
103     string16 text;
104     TestCompletionCallback callback;
105     int result = pac_fetcher.Fetch(GetTestFileUrl("does-not-exist"),
106                                    &text, &callback);
107     EXPECT_EQ(ERR_IO_PENDING, result);
108     EXPECT_EQ(ERR_FILE_NOT_FOUND, callback.WaitForResult());
109     EXPECT_TRUE(text.empty());
110   }
111   { // Fetch a file that exists.
112     string16 text;
113     TestCompletionCallback callback;
114     int result = pac_fetcher.Fetch(GetTestFileUrl("pac.txt"),
115                                    &text, &callback);
116     EXPECT_EQ(ERR_IO_PENDING, result);
117     EXPECT_EQ(OK, callback.WaitForResult());
118     EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text);
119   }
120 }
121 
122 // Note that all mime types are allowed for PAC file, to be consistent
123 // with other browsers.
TEST_F(ProxyScriptFetcherImplTest,HttpMimeType)124 TEST_F(ProxyScriptFetcherImplTest, HttpMimeType) {
125   ASSERT_TRUE(test_server_.Start());
126 
127   scoped_refptr<URLRequestContext> context(new RequestContext);
128   ProxyScriptFetcherImpl pac_fetcher(context);
129 
130   { // Fetch a PAC with mime type "text/plain"
131     GURL url(test_server_.GetURL("files/pac.txt"));
132     string16 text;
133     TestCompletionCallback callback;
134     int result = pac_fetcher.Fetch(url, &text, &callback);
135     EXPECT_EQ(ERR_IO_PENDING, result);
136     EXPECT_EQ(OK, callback.WaitForResult());
137     EXPECT_EQ(ASCIIToUTF16("-pac.txt-\n"), text);
138   }
139   { // Fetch a PAC with mime type "text/html"
140     GURL url(test_server_.GetURL("files/pac.html"));
141     string16 text;
142     TestCompletionCallback callback;
143     int result = pac_fetcher.Fetch(url, &text, &callback);
144     EXPECT_EQ(ERR_IO_PENDING, result);
145     EXPECT_EQ(OK, callback.WaitForResult());
146     EXPECT_EQ(ASCIIToUTF16("-pac.html-\n"), text);
147   }
148   { // Fetch a PAC with mime type "application/x-ns-proxy-autoconfig"
149     GURL url(test_server_.GetURL("files/pac.nsproxy"));
150     string16 text;
151     TestCompletionCallback callback;
152     int result = pac_fetcher.Fetch(url, &text, &callback);
153     EXPECT_EQ(ERR_IO_PENDING, result);
154     EXPECT_EQ(OK, callback.WaitForResult());
155     EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text);
156   }
157 }
158 
TEST_F(ProxyScriptFetcherImplTest,HttpStatusCode)159 TEST_F(ProxyScriptFetcherImplTest, HttpStatusCode) {
160   ASSERT_TRUE(test_server_.Start());
161 
162   scoped_refptr<URLRequestContext> context(new RequestContext);
163   ProxyScriptFetcherImpl pac_fetcher(context);
164 
165   { // Fetch a PAC which gives a 500 -- FAIL
166     GURL url(test_server_.GetURL("files/500.pac"));
167     string16 text;
168     TestCompletionCallback callback;
169     int result = pac_fetcher.Fetch(url, &text, &callback);
170     EXPECT_EQ(ERR_IO_PENDING, result);
171     EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult());
172     EXPECT_TRUE(text.empty());
173   }
174   { // Fetch a PAC which gives a 404 -- FAIL
175     GURL url(test_server_.GetURL("files/404.pac"));
176     string16 text;
177     TestCompletionCallback callback;
178     int result = pac_fetcher.Fetch(url, &text, &callback);
179     EXPECT_EQ(ERR_IO_PENDING, result);
180     EXPECT_EQ(ERR_PAC_STATUS_NOT_OK, callback.WaitForResult());
181     EXPECT_TRUE(text.empty());
182   }
183 }
184 
TEST_F(ProxyScriptFetcherImplTest,ContentDisposition)185 TEST_F(ProxyScriptFetcherImplTest, ContentDisposition) {
186   ASSERT_TRUE(test_server_.Start());
187 
188   scoped_refptr<URLRequestContext> context(new RequestContext);
189   ProxyScriptFetcherImpl pac_fetcher(context);
190 
191   // Fetch PAC scripts via HTTP with a Content-Disposition header -- should
192   // have no effect.
193   GURL url(test_server_.GetURL("files/downloadable.pac"));
194   string16 text;
195   TestCompletionCallback callback;
196   int result = pac_fetcher.Fetch(url, &text, &callback);
197   EXPECT_EQ(ERR_IO_PENDING, result);
198   EXPECT_EQ(OK, callback.WaitForResult());
199   EXPECT_EQ(ASCIIToUTF16("-downloadable.pac-\n"), text);
200 }
201 
TEST_F(ProxyScriptFetcherImplTest,NoCache)202 TEST_F(ProxyScriptFetcherImplTest, NoCache) {
203   ASSERT_TRUE(test_server_.Start());
204 
205   scoped_refptr<URLRequestContext> context(new RequestContext);
206   ProxyScriptFetcherImpl pac_fetcher(context);
207 
208   // Fetch a PAC script whose HTTP headers make it cacheable for 1 hour.
209   GURL url(test_server_.GetURL("files/cacheable_1hr.pac"));
210   {
211     string16 text;
212     TestCompletionCallback callback;
213     int result = pac_fetcher.Fetch(url, &text, &callback);
214     EXPECT_EQ(ERR_IO_PENDING, result);
215     EXPECT_EQ(OK, callback.WaitForResult());
216     EXPECT_EQ(ASCIIToUTF16("-cacheable_1hr.pac-\n"), text);
217   }
218 
219   // Now kill the HTTP server.
220   ASSERT_TRUE(test_server_.Stop());
221 
222   // Try to fetch the file again -- if should fail, since the server is not
223   // running anymore. (If it were instead being loaded from cache, we would
224   // get a success.
225   {
226     string16 text;
227     TestCompletionCallback callback;
228     int result = pac_fetcher.Fetch(url, &text, &callback);
229     EXPECT_EQ(ERR_IO_PENDING, result);
230     EXPECT_EQ(ERR_CONNECTION_REFUSED, callback.WaitForResult());
231   }
232 }
233 
TEST_F(ProxyScriptFetcherImplTest,TooLarge)234 TEST_F(ProxyScriptFetcherImplTest, TooLarge) {
235   ASSERT_TRUE(test_server_.Start());
236 
237   scoped_refptr<URLRequestContext> context(new RequestContext);
238   ProxyScriptFetcherImpl pac_fetcher(context);
239 
240   // Set the maximum response size to 50 bytes.
241   int prev_size = pac_fetcher.SetSizeConstraint(50);
242 
243   // These two URLs are the same file, but are http:// vs file://
244   GURL urls[] = {
245     test_server_.GetURL("files/large-pac.nsproxy"),
246     GetTestFileUrl("large-pac.nsproxy")
247   };
248 
249   // Try fetching URLs that are 101 bytes large. We should abort the request
250   // after 50 bytes have been read, and fail with a too large error.
251   for (size_t i = 0; i < arraysize(urls); ++i) {
252     const GURL& url = urls[i];
253     string16 text;
254     TestCompletionCallback callback;
255     int result = pac_fetcher.Fetch(url, &text, &callback);
256     EXPECT_EQ(ERR_IO_PENDING, result);
257     EXPECT_EQ(ERR_FILE_TOO_BIG, callback.WaitForResult());
258     EXPECT_TRUE(text.empty());
259   }
260 
261   // Restore the original size bound.
262   pac_fetcher.SetSizeConstraint(prev_size);
263 
264   { // Make sure we can still fetch regular URLs.
265     GURL url(test_server_.GetURL("files/pac.nsproxy"));
266     string16 text;
267     TestCompletionCallback callback;
268     int result = pac_fetcher.Fetch(url, &text, &callback);
269     EXPECT_EQ(ERR_IO_PENDING, result);
270     EXPECT_EQ(OK, callback.WaitForResult());
271     EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text);
272   }
273 }
274 
TEST_F(ProxyScriptFetcherImplTest,Hang)275 TEST_F(ProxyScriptFetcherImplTest, Hang) {
276   ASSERT_TRUE(test_server_.Start());
277 
278   scoped_refptr<URLRequestContext> context(new RequestContext);
279   ProxyScriptFetcherImpl pac_fetcher(context);
280 
281   // Set the timeout period to 0.5 seconds.
282   base::TimeDelta prev_timeout = pac_fetcher.SetTimeoutConstraint(
283       base::TimeDelta::FromMilliseconds(500));
284 
285   // Try fetching a URL which takes 1.2 seconds. We should abort the request
286   // after 500 ms, and fail with a timeout error.
287   { GURL url(test_server_.GetURL("slow/proxy.pac?1.2"));
288     string16 text;
289     TestCompletionCallback callback;
290     int result = pac_fetcher.Fetch(url, &text, &callback);
291     EXPECT_EQ(ERR_IO_PENDING, result);
292     EXPECT_EQ(ERR_TIMED_OUT, callback.WaitForResult());
293     EXPECT_TRUE(text.empty());
294   }
295 
296   // Restore the original timeout period.
297   pac_fetcher.SetTimeoutConstraint(prev_timeout);
298 
299   { // Make sure we can still fetch regular URLs.
300     GURL url(test_server_.GetURL("files/pac.nsproxy"));
301     string16 text;
302     TestCompletionCallback callback;
303     int result = pac_fetcher.Fetch(url, &text, &callback);
304     EXPECT_EQ(ERR_IO_PENDING, result);
305     EXPECT_EQ(OK, callback.WaitForResult());
306     EXPECT_EQ(ASCIIToUTF16("-pac.nsproxy-\n"), text);
307   }
308 }
309 
310 // The ProxyScriptFetcher should decode any content-codings
311 // (like gzip, bzip, etc.), and apply any charset conversions to yield
312 // UTF8.
TEST_F(ProxyScriptFetcherImplTest,Encodings)313 TEST_F(ProxyScriptFetcherImplTest, Encodings) {
314   ASSERT_TRUE(test_server_.Start());
315 
316   scoped_refptr<URLRequestContext> context(new RequestContext);
317   ProxyScriptFetcherImpl pac_fetcher(context);
318 
319   // Test a response that is gzip-encoded -- should get inflated.
320   {
321     GURL url(test_server_.GetURL("files/gzipped_pac"));
322     string16 text;
323     TestCompletionCallback callback;
324     int result = pac_fetcher.Fetch(url, &text, &callback);
325     EXPECT_EQ(ERR_IO_PENDING, result);
326     EXPECT_EQ(OK, callback.WaitForResult());
327     EXPECT_EQ(ASCIIToUTF16("This data was gzipped.\n"), text);
328   }
329 
330   // Test a response that was served as UTF-16 (BE). It should
331   // be converted to UTF8.
332   {
333     GURL url(test_server_.GetURL("files/utf16be_pac"));
334     string16 text;
335     TestCompletionCallback callback;
336     int result = pac_fetcher.Fetch(url, &text, &callback);
337     EXPECT_EQ(ERR_IO_PENDING, result);
338     EXPECT_EQ(OK, callback.WaitForResult());
339     EXPECT_EQ(ASCIIToUTF16("This was encoded as UTF-16BE.\n"), text);
340   }
341 }
342 
TEST_F(ProxyScriptFetcherImplTest,DataURLs)343 TEST_F(ProxyScriptFetcherImplTest, DataURLs) {
344   scoped_refptr<URLRequestContext> context(new RequestContext);
345   ProxyScriptFetcherImpl pac_fetcher(context);
346 
347   const char kEncodedUrl[] =
348       "data:application/x-ns-proxy-autoconfig;base64,ZnVuY3Rpb24gRmluZFByb3h5R"
349       "m9yVVJMKHVybCwgaG9zdCkgewogIGlmIChob3N0ID09ICdmb29iYXIuY29tJykKICAgIHJl"
350       "dHVybiAnUFJPWFkgYmxhY2tob2xlOjgwJzsKICByZXR1cm4gJ0RJUkVDVCc7Cn0=";
351   const char kPacScript[] =
352       "function FindProxyForURL(url, host) {\n"
353       "  if (host == 'foobar.com')\n"
354       "    return 'PROXY blackhole:80';\n"
355       "  return 'DIRECT';\n"
356       "}";
357 
358   // Test fetching a "data:"-url containing a base64 encoded PAC script.
359   {
360     GURL url(kEncodedUrl);
361     string16 text;
362     TestCompletionCallback callback;
363     int result = pac_fetcher.Fetch(url, &text, &callback);
364     EXPECT_EQ(OK, result);
365     EXPECT_EQ(ASCIIToUTF16(kPacScript), text);
366   }
367 
368   const char kEncodedUrlBroken[] =
369       "data:application/x-ns-proxy-autoconfig;base64,ZnVuY3Rpb24gRmluZFByb3h5R";
370 
371   // Test a broken "data:"-url containing a base64 encoded PAC script.
372   {
373     GURL url(kEncodedUrlBroken);
374     string16 text;
375     TestCompletionCallback callback;
376     int result = pac_fetcher.Fetch(url, &text, &callback);
377     EXPECT_EQ(ERR_FAILED, result);
378   }
379 }
380 
381 }  // namespace net
382