Home
last modified time | relevance | path

Searched refs:urllib (Results 1 – 25 of 400) sorted by relevance

12345678910>>...16

/external/python/cpython3/Lib/test/
Dtest_urllib.py3 import urllib.parse
4 import urllib.request
5 import urllib.error
40 opener = urllib.request.FancyURLopener(proxies=proxies)
56 return urllib.request.FancyURLopener()
129 self._ftpwrapper_class = urllib.request.ftpwrapper
130 urllib.request.ftpwrapper = FakeFtpWrapper
133 urllib.request.ftpwrapper = self._ftpwrapper_class
154 self.quoted_pathname = urllib.parse.quote(self.pathname)
226 self.assertRaises(ValueError,urllib.request.urlopen,'./' + self.pathname)
[all …]
Dtest_urlparse.py4 import urllib.parse
86 result = urllib.parse.urlparse(url)
92 result2 = urllib.parse.urlunparse(result)
98 result3 = urllib.parse.urlparse(result.geturl())
113 result = urllib.parse.urlsplit(url)
118 result2 = urllib.parse.urlunsplit(result)
123 result3 = urllib.parse.urlsplit(result.geturl())
138 result = urllib.parse.parse_qsl(orig, keep_blank_values=True)
141 result = urllib.parse.parse_qsl(orig, keep_blank_values=False)
147 result = urllib.parse.parse_qs(orig, keep_blank_values=True)
[all …]
Dtest_urllib2net.py8 import urllib.error
9 import urllib.request
38 _urlopen_with_retry = _wrap_with_retry_thrice(urllib.request.urlopen,
39 urllib.error.URLError)
85 self.addCleanup(urllib.request.urlcleanup)
112 None, urllib.error.URLError),
125 urllib.error.URLError),
131 self.assertRaises(ValueError, urllib.request.urlopen,'./relative_path/to/file')
164 req = urllib.request.Request(urlwith_frag)
165 res = urllib.request.urlopen(req)
[all …]
Dtest_urllib2_localnet.py4 import urllib.parse
5 import urllib.request
265 (scm, netloc, path, params, query, fragment) = urllib.parse.urlparse(
304 ah = urllib.request.HTTPBasicAuthHandler()
306 urllib.request.install_opener(urllib.request.build_opener(ah))
308 self.assertTrue(urllib.request.urlopen(self.server_url))
309 except urllib.error.HTTPError:
313 ah = urllib.request.HTTPBasicAuthHandler()
315 urllib.request.install_opener(urllib.request.build_opener(ah))
316 self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, self.server_url)
[all …]
Dtest_urllib2.py14 import urllib.request
17 from urllib.request import (Request, OpenerDirector, HTTPBasicAuthHandler,
21 from urllib.parse import urlparse
22 import urllib.error
52 self.addCleanup(urllib.request.urlcleanup)
54 self.assertRaises(ValueError, urllib.request.urlopen, 'bogus url')
57 fname = os.path.abspath(urllib.request.__file__).replace(os.sep, '/')
64 with urllib.request.urlopen(file_url) as f:
76 self.assertEqual(urllib.request.parse_http_list(string), list)
79 err = urllib.error.URLError('reason')
[all …]
Dtest_urllibnet.py7 import urllib.parse
8 import urllib.request
28 self.addCleanup(urllib.request.urlcleanup)
30 domain = urllib.parse.urlparse(support.TEST_HTTP_URL).netloc
32 f = urllib.request.urlopen(support.TEST_HTTP_URL)
55 self.addCleanup(urllib.request.urlcleanup)
61 r = urllib.request.urlopen(*args, **kwargs)
104 open_url = urllib.request.FancyURLopener().open(URL)
147 urllib.request.urlopen("http://{}/".format(bogus_domain))
155 self.addCleanup(urllib.request.urlcleanup)
[all …]
Dtest_http_cookiejar.py8 import urllib.request
318 req = urllib.request.Request(url)
470 request = urllib.request.Request(url)
604 req = urllib.request.Request("http://www.coyote.com/")
731 req = urllib.request.Request(
737 req = urllib.request.Request(
742 req = urllib.request.Request("http://www.example.com")
760 req = urllib.request.Request(url)
770 req = urllib.request.Request("http://www.acme.com:1234/",
773 req = urllib.request.Request("http://www.acme.com/",
[all …]
/external/python/cpython2/Lib/test/
Dtest_urllib.py4 import urllib
84 self.returned_obj = urllib.urlopen("file:%s" % self.pathname)
145 self.assertRaises(ValueError,urllib.urlopen,'./' + self.pathname)
164 proxies = urllib.getproxies_environment()
169 self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com'))
170 self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com:8888'))
171 self.assertTrue(urllib.proxy_bypass_environment('newdomain.com:1234'))
176 proxies = urllib.getproxies_environment()
179 proxies = urllib.getproxies_environment()
186 bypass = urllib.proxy_bypass_environment
[all …]
/external/python/cpython3/Doc/library/
Durllib.rst1 :mod:`urllib` --- URL handling modules
4 .. module:: urllib
6 **Source code:** :source:`Lib/urllib/`
10 ``urllib`` is a package that collects several modules for working with URLs:
12 * :mod:`urllib.request` for opening and reading URLs
13 * :mod:`urllib.error` containing the exceptions raised by :mod:`urllib.request`
14 * :mod:`urllib.parse` for parsing URLs
15 * :mod:`urllib.robotparser` for parsing ``robots.txt`` files
Durllib.error.rst1 :mod:`urllib.error` --- Exception classes raised by urllib.request
4 .. module:: urllib.error
5 :synopsis: Exception classes raised by urllib.request.
10 **Source code:** :source:`Lib/urllib/error.py`
14 The :mod:`urllib.error` module defines the exception classes for exceptions
15 raised by :mod:`urllib.request`. The base exception class is :exc:`URLError`.
17 The following exceptions are raised by :mod:`urllib.error` as appropriate:
38 value (the same thing that :func:`~urllib.request.urlopen` returns). This
61 This exception is raised when the :func:`~urllib.request.urlretrieve`
/external/autotest/client/common_lib/cros/fake_device_server/client_lib/
Dcommands.py9 from six.moves import urllib
29 request = urllib.request.Request(self.get_url([command_id]),
31 url_h = urllib.request.urlopen(request)
40 request = urllib.request.Request(
43 url_h = urllib.request.urlopen(request)
59 request = urllib.request.Request(
67 url_h = urllib.request.urlopen(request)
79 request = urllib.request.Request(self.get_url(),
82 url_h = urllib.request.urlopen(request)
Doauth_helpers.py9 from six.moves import urllib
24 return '%s?%s' % (auth_url, urllib.parse.urlencode(params))
37 request = urllib.request.Request(token_url,
38 data=urllib.parse.urlencode(data),
40 url_h = urllib.request.urlopen(request)
56 request = urllib.request.Request(token_url,
57 data=urllib.parse.urlencode(data),
59 url_h = urllib.request.urlopen(request)
Ddevices.py8 from six.moves import urllib
28 request = urllib.request.Request(self.get_url([device_id]),
30 url_h = urllib.request.urlopen(request)
36 request = urllib.request.Request(self.get_url(),
38 url_h = urllib.request.urlopen(request)
53 request = urllib.request.Request(self.get_url(), json.dumps(data),
55 url_h = urllib.request.urlopen(request)
Dregistration.py9 from six.moves import urllib
29 url_h = urllib.request.urlopen(self.get_url([ticket_id]))
51 request = urllib.request.Request(self.get_url([ticket_id]),
58 url_h = urllib.request.urlopen(request)
73 request = urllib.request.Request(self.get_url(),
76 url_h = urllib.request.urlopen(request)
85 request = urllib.request.Request(self.get_url([ticket_id, 'finalize']),
87 url_h = urllib.request.urlopen(request)
Dfail_control.py8 from six.moves import urllib
26 request = urllib.request.Request(
29 url_h = urllib.request.urlopen(request)
36 request = urllib.request.Request(
39 url_h = urllib.request.urlopen(request)
Doauth.py8 from six.moves import urllib
26 request = urllib.request.Request(
30 url_h = urllib.request.urlopen(request)
37 request = urllib.request.Request(
40 url_h = urllib.request.urlopen(request)
/external/python/cpython2/Lib/
Dnturl2path.py12 import string, urllib
24 return urllib.unquote('\\'.join(components))
34 path = path + '\\' + urllib.unquote(comp)
47 import urllib
56 return urllib.quote('/'.join(components))
62 drive = urllib.quote(comp[0].upper())
67 path = path + '/' + urllib.quote(comp)
/external/python/cpython3/Lib/
Dnturl2path.py17 import string, urllib.parse
29 return urllib.parse.unquote('\\'.join(components))
39 path = path + '\\' + urllib.parse.unquote(comp)
52 import urllib.parse
61 return urllib.parse.quote('/'.join(components))
67 drive = urllib.parse.quote(comp[0].upper())
72 path = path + '/' + urllib.parse.quote(comp)
/external/python/cpython3/Doc/howto/
Durllib2.rst4 HOWTO Fetch Internet Resources Using The urllib Package
29 **urllib.request** is a Python module for fetching URLs
36 urllib.request supports fetching URLs for many "URL schemes" (identified by the string
45 not intended to be easy to read. This HOWTO aims to illustrate using *urllib*,
47 the :mod:`urllib.request` docs, but is supplementary to them.
53 The simplest way to use urllib.request is as follows::
55 import urllib.request
56 with urllib.request.urlopen('http://python.org/') as response:
65 import urllib.request
67 with urllib.request.urlopen('http://python.org/') as response:
[all …]
/external/python/cpython3/Lib/urllib/
Drobotparser.py14 import urllib.parse
15 import urllib.request
57 self.host, self.path = urllib.parse.urlparse(url)[1:3]
62 f = urllib.request.urlopen(self.url)
63 except urllib.error.HTTPError as err:
114 line[1] = urllib.parse.unquote(line[1].strip())
168 parsed_url = urllib.parse.urlparse(urllib.parse.unquote(url))
169 url = urllib.parse.urlunparse(('','',parsed_url.path,
171 url = urllib.parse.quote(url)
222 path = urllib.parse.urlunparse(urllib.parse.urlparse(path))
[all …]
/external/python/httplib2/python3/
Dhttplib2test.py21 import urllib.parse
323 uri = urllib.parse.urljoin(
333 uri = urllib.parse.urljoin(base, "methods/method_reflector.cgi")
339 uri = urllib.parse.urljoin(base, "methods/method_reflector.cgi")
357 uri = urllib.parse.urljoin(base, "304/test_etag.txt")
364 uri = urllib.parse.urljoin(base, "304/test_etag.txt")
374 uri = urllib.parse.urljoin(base, "304/test_etag.txt")
387 uri = urllib.parse.urljoin(base, "304/test_etag.txt")
396 uri = urllib.parse.urljoin(base, "user-agent/test.cgi")
404 uri = urllib.parse.urljoin(base, "user-agent/test.cgi")
[all …]
/external/python/six/documentation/
Dindex.rst523 The :mod:`py2:urllib`, :mod:`py2:urllib2`, and :mod:`py2:urlparse` modules have
524 been combined in the :mod:`py3:urllib` package in Python 3. The
525 :mod:`six.moves.urllib` package is a version-independent location for this
527 :mod:`py3:urllib` package.
660 | ``urllib.parse`` | See :mod:`six.moves.urllib.parse` | :mod:`py3:urllib.parse` …
662 | ``urllib.error`` | See :mod:`six.moves.urllib.error` | :mod:`py3:urllib.error` …
664 | ``urllib.request`` | See :mod:`six.moves.urllib.request` | :mod:`py3:urllib.request` …
666 | ``urllib.response`` | See :mod:`six.moves.urllib.response`| :mod:`py3:urllib.response` …
668 | ``urllib.robotparser`` | :mod:`py2:robotparser` | :mod:`py3:urllib.robotparser…
670 | ``urllib_robotparser`` | :mod:`py2:robotparser` | :mod:`py3:urllib.robotparser…
[all …]
/external/autotest/client/common_lib/
Dfile_utils.py10 from six.moves import urllib
142 proxy_handler = urllib.request.ProxyHandler(proxies)
143 opener = urllib.request.build_opener(proxy_handler)
144 urllib.request.install_opener(opener)
151 remote_file = urllib.request.urlopen(remote_path)
157 except urllib.error.HTTPError as e:
164 except urllib.error.URLError as e:
/external/python/setuptools/setuptools/
Dpackage_index.py13 from setuptools.extern.six.moves import urllib, http_client, configparser, map
84 parts = urllib.parse.urlparse(url)
86 base = urllib.parse.unquote(path.split('/')[-1])
88 base = urllib.parse.unquote(path.split('/')[-2])
228 yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
235 yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
277 fragment = urllib.parse.urlparse(url)[-1]
318 self.opener = urllib.request.urlopen
358 if isinstance(f, urllib.error.HTTPError):
366 link = urllib.parse.urljoin(base, htmldecode(match.group(1)))
[all …]
/external/autotest/server/
Dafe_urls.py18 from six.moves import urllib
19 import six.moves.urllib.parse
38 self._root_url_parts = six.moves.urllib.parse.urlsplit(root_url)
75 fragment = urllib.parse.urlencode(params)
76 return six.moves.urllib.parse.SplitResult(

12345678910>>...16