/device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/ |
D | urlparse.py | 128 def urlparse(url, scheme='', allow_fragments=True): argument 134 tuple = urlsplit(url, scheme, allow_fragments) 135 scheme, netloc, url, query, fragment = tuple 136 if scheme in uses_params and ';' in url: 137 url, params = _splitparams(url) 140 return ParseResult(scheme, netloc, url, params, query, fragment) 142 def _splitparams(url): argument 143 if '/' in url: 144 i = url.find(';', url.rfind('/')) 146 return url, '' [all …]
|
D | nturl2path.py | 3 def url2pathname(url): argument 12 url = url.replace(':', '|') 13 if not '|' in url: 15 if url[:4] == '////': 19 url = url[2:] 20 components = url.split('/') 23 comp = url.split('|') 25 error = 'Bad URL: ' + url 34 if path.endswith(':') and url.endswith('/'):
|
D | urllib.py | 69 def urlopen(url, data=None, proxies=None): argument 84 return opener.open(url) 86 return opener.open(url, data) 87 def urlretrieve(url, filename=None, reporthook=None, data=None): argument 91 return _urlopener.retrieve(url, filename, reporthook, data) 185 urltype, url = splittype(fullurl) 192 url = (host, fullurl) # Signal special case to open_*() 205 return getattr(self, name)(url) 207 return getattr(self, name)(url, data) 213 type, url = splittype(fullurl) [all …]
|
D | robotparser.py | 24 def __init__(self, url=''): argument 29 self.set_url(url) 49 def set_url(self, url): argument 51 self.url = url 52 self.host, self.path = urlparse.urlparse(url)[1:3] 57 f = opener.open(self.url) 128 def can_fetch(self, useragent, url): argument 136 parsed_url = urlparse.urlparse(urllib.unquote(url)) 137 url = urlparse.urlunparse(('', '', parsed_url.path, 139 url = urllib.quote(url) [all …]
|
D | webbrowser.py | 58 def open(url, new=0, autoraise=True): argument 61 if browser.open(url, new, autoraise): 65 def open_new(url): argument 66 return open(url, 1) 68 def open_new_tab(url): argument 69 return open(url, 2) 147 def open(self, url, new=0, autoraise=True): argument 150 def open_new(self, url): argument 151 return self.open(url, 1) 153 def open_new_tab(self, url): argument [all …]
|
D | mimetypes.py | 95 def guess_type(self, url, strict=True): argument 114 scheme, url = urllib.splittype(url) 122 comma = url.find(',') 126 semi = url.find(';', 0, comma) 128 type = url[:semi] 130 type = url[:comma] 134 base, ext = posixpath.splitext(url) 275 def guess_type(url, strict=True): argument 295 return _db.guess_type(url, strict)
|
D | DocXMLRPCServer.py | 49 url = escape(all).replace('"', '"') 50 results.append('<a href="%s">%s</a>' % (url, url)) 52 url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc) 53 results.append('<a href="%s">%s</a>' % (url, escape(all))) 55 url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep) 56 results.append('<a href="%s">%s</a>' % (url, escape(all)))
|
/device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.10/Lib/ |
D | urlparse.py | 137 def urlparse(url, scheme='', allow_fragments=True): argument 143 tuple = urlsplit(url, scheme, allow_fragments) 144 scheme, netloc, url, query, fragment = tuple 145 if scheme in uses_params and ';' in url: 146 url, params = _splitparams(url) 149 return ParseResult(scheme, netloc, url, params, query, fragment) 151 def _splitparams(url): argument 152 if '/' in url: 153 i = url.find(';', url.rfind('/')) 155 return url, '' [all …]
|
/device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Tools/webchecker/ |
D | webchecker.py | 310 for url in self.bad.keys(): 311 self.markerror(url) 331 url = urlparse.urljoin(root, "/robots.txt") 333 self.note(2, "Parsing %s", url) 335 rp.set_url(url) 339 self.note(1, "I/O error parsing %s: %s", url, msg) 348 for url in urls: 349 self.dopage(url) 383 for url, rawlink, msg in triples: 384 if rawlink != self.format_url(url): s = " (%s)" % rawlink [all …]
|
D | wcgui.py | 241 url = self.__todo.items[i] 242 self.__checking.config(text="Checking "+self.format_url(url)) 244 self.dopage(url) 250 def showinfo(self, url): argument 253 d.put("URL: %s\n" % self.format_url(url)) 254 if self.bad.has_key(url): 255 d.put("Error: %s\n" % str(self.bad[url])) 256 if url in self.roots: 258 if self.done.has_key(url): 260 o = self.done[url] [all …]
|
D | websucker.py | 54 url = url_pair[0] 56 path = self.savefilename(url) 64 if nurl != url: 65 url = nurl 66 path = self.savefilename(url) 70 if not self.checkforhtml(info, url): 73 if self.checkforhtml({}, url): 76 return text, url 89 def savefilename(self, url): argument 90 type, rest = urllib.splittype(url)
|
D | wsgui.py | 46 def run1(self, url): argument 50 self.addroot(url) 64 def getpage(self, url): argument 67 return websucker.Sucker.getpage(self, url) 69 def savefilename(self, url): argument 70 path = websucker.Sucker.savefilename(self, url) 149 url = self.url_entry.get() 150 url = url.strip() 151 if not url: 155 self.rooturl = url [all …]
|
/device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/test/ |
D | test_robotparser.py | 5 def __init__(self, index, parser, url, good, agent): argument 8 self.str = "RobotTest(%d, good, %s)" % (index, url) 10 self.str = "RobotTest(%d, bad, %s)" % (index, url) 12 self.url = url 17 if isinstance(self.url, tuple): 18 agent, url = self.url 20 url = self.url 23 self.assertTrue(self.parser.can_fetch(agent, url)) 25 self.assertFalse(self.parser.can_fetch(agent, url)) 38 for url in good_urls: [all …]
|
D | test_urllib2net.py | 177 url = "http://www.example.com" 178 with test_support.transient_internet(url): 180 request = urllib2.Request(url) 198 for url in urls: 199 if isinstance(url, tuple): 200 url, req, expected_err = url 203 with test_support.transient_internet(url): 204 debug(url) 206 f = urlopen(url, req, TIMEOUT) 211 (expected_err, url, req, type(err), err)) [all …]
|
D | test_urlparse.py | 31 def checkRoundtrips(self, url, parsed, split): argument 32 result = urlparse.urlparse(url) 39 self.assertEqual(result2, url) 59 result = urlparse.urlsplit(url) 65 self.assertEqual(result2, url) 114 for url, parsed, split in testcases: 115 self.checkRoundtrips(url, parsed, split) 139 for url, parsed, split in testcases: 140 url = scheme + url 143 self.checkRoundtrips(url, parsed, split) [all …]
|
D | test_cookielib.py | 221 def __init__(self, headers=[], url=None): argument 228 self._url = url 231 def interact_2965(cookiejar, url, *set_cookie_hdrs): argument 232 return _interact(cookiejar, url, set_cookie_hdrs, "Set-Cookie2") 234 def interact_netscape(cookiejar, url, *set_cookie_hdrs): argument 235 return _interact(cookiejar, url, set_cookie_hdrs, "Set-Cookie") 237 def _interact(cookiejar, url, set_cookie_hdrs, hdr_name): argument 240 req = Request(url) 246 res = FakeResponse(headers, url) 349 for url, domain, ok in [ [all …]
|
D | test_urllib2.py | 243 def __init__(self, code, msg, headers, data, url=None): argument 245 self.code, self.msg, self.headers, self.url = code, msg, headers, url 249 return self.url 296 def request(self, method, url, body=None, headers=None): argument 298 self.selector = url 431 self.url = uri 625 for url, host, port, user, passwd, type_, dirs, filename, mimetype in [ 645 req = Request(url) 680 for url in urls: 688 r = h.file_open(Request(url)) [all …]
|
/device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/wsgiref/ |
D | util.py | 45 url = environ['wsgi.url_scheme']+'://' 49 url += environ['HTTP_HOST'] 51 url += environ['SERVER_NAME'] 55 url += ':' + environ['SERVER_PORT'] 58 url += ':' + environ['SERVER_PORT'] 60 url += quote(environ.get('SCRIPT_NAME') or '/') 61 return url 65 url = application_uri(environ) 69 url += path_info[1:] 71 url += path_info [all …]
|
/device/google/cuttlefish/host/commands/fetcher/ |
D | curl_wrapper.cc | 65 bool CurlWrapper::DownloadToFile(const std::string& url, const std::string& path) { in DownloadToFile() argument 66 return CurlWrapper::DownloadToFile(url, path, {}); in DownloadToFile() 69 bool CurlWrapper::DownloadToFile(const std::string& url, const std::string& path, in DownloadToFile() argument 71 LOG(INFO) << "Attempting to save \"" << url << "\" to \"" << path << "\""; in DownloadToFile() 80 curl_easy_setopt(curl, CURLOPT_URL, url.c_str()); in DownloadToFile() 105 std::string CurlWrapper::DownloadToString(const std::string& url) { in DownloadToString() argument 106 return DownloadToString(url, {}); in DownloadToString() 109 std::string CurlWrapper::DownloadToString(const std::string& url, in DownloadToString() argument 111 LOG(INFO) << "Attempting to download \"" << url << "\""; in DownloadToString() 120 curl_easy_setopt(curl, CURLOPT_URL, url.c_str()); in DownloadToString() [all …]
|
D | curl_wrapper.h | 32 bool DownloadToFile(const std::string& url, const std::string& path); 33 bool DownloadToFile(const std::string& url, const std::string& path, 35 std::string DownloadToString(const std::string& url); 36 std::string DownloadToString(const std::string& url, 38 Json::Value DownloadToJson(const std::string& url); 39 Json::Value DownloadToJson(const std::string& url,
|
D | build_api.cc | 88 std::string url = BUILD_API + "/builds?branch=" + branch in LatestBuildId() local 91 auto response = curl.DownloadToJson(url, Headers()); in LatestBuildId() 105 std::string url = BUILD_API + "/builds/" + build.id + "/" + build.target; in BuildStatus() local 106 auto response_json = curl.DownloadToJson(url, Headers()); in BuildStatus() 114 std::string url = BUILD_API + "/builds/" + build.id + "/" + build.target in Artifacts() local 116 auto artifacts_json = curl.DownloadToJson(url, Headers()); in Artifacts() 150 std::string url = BUILD_API + "/builds/" + build.id + "/" + build.target in ArtifactToFile() local 152 return curl.DownloadToFile(url, path, Headers()); in ArtifactToFile()
|
/device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Tools/versioncheck/ |
D | pyversioncheck.py | 17 def versioncheck(package, url, version, verbose=0): argument 18 ok, newversion, fp = checkonly(package, url, version, verbose) 35 def checkonly(package, url, version, verbose=0): argument 38 if type(url) == types.StringType: 39 ok, newversion, fp = _check1version(package, url, version, verbose) 41 for u in url: 47 def _check1version(package, url, version, verbose=0): argument 49 print ' Checking %s'%url 51 fp = urllib.urlopen(url)
|
/device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Lib/distutils/tests/ |
D | test_upload.py | 44 def __init__(self, url): argument 45 self.url = url 46 if not isinstance(url, str): 47 self.req = url 68 def _urlopen(self, url): argument 69 self.last_open = FakeOpen(url)
|
/device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Tools/scripts/ |
D | google.py | 19 url = "http://www.google.com/search?q=%s" % s 20 webbrowser.open(url)
|
/device/linaro/bootloader/edk2/AppPkg/Applications/Python/Python-2.7.2/Tools/faqwiz/ |
D | faqwiz.py | 83 url = m.group(0) 84 while url[-1] in '();:,.?\'"<>': 85 url = url[:-1] 86 i = i + len(url) 87 url = escape(url) 89 if ':' in url: 90 repl = '<A HREF="%s">%s</A>' % (url, url) 92 repl = '<A HREF="mailto:%s">%s</A>' % (url, url) 94 repl = url
|