• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 #!/usr/bin/env python3
2 
3 #
4 # Copyright (C) 2018 The Android Open Source Project
5 #
6 # Licensed under the Apache License, Version 2.0 (the "License");
7 # you may not use this file except in compliance with the License.
8 # You may obtain a copy of the License at
9 #
10 #      http://www.apache.org/licenses/LICENSE-2.0
11 #
12 # Unless required by applicable law or agreed to in writing, software
13 # distributed under the License is distributed on an "AS IS" BASIS,
14 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 # See the License for the specific language governing permissions and
16 # limitations under the License.
17 #
18 
19 """Gerrit Restful API client library."""
20 
21 from __future__ import print_function
22 
23 import argparse
24 import base64
25 import json
26 import os
27 import sys
28 import xml.dom.minidom
29 
30 try:
31     import ssl
32     _HAS_SSL = True
33 except ImportError:
34     _HAS_SSL = False
35 
36 try:
37     # PY3
38     from urllib.error import HTTPError
39     from urllib.parse import urlencode, urlparse
40     from urllib.request import (
41         HTTPBasicAuthHandler, HTTPHandler, OpenerDirector, Request,
42         build_opener
43     )
44     if _HAS_SSL:
45         from urllib.request import HTTPSHandler
46 except ImportError:
47     # PY2
48     from urllib import urlencode
49     from urllib2 import (
50         HTTPBasicAuthHandler, HTTPError, HTTPHandler, OpenerDirector, Request,
51         build_opener
52     )
53     if _HAS_SSL:
54         from urllib2 import HTTPSHandler
55     from urlparse import urlparse
56 
57 try:
58     from http.client import HTTPResponse
59 except ImportError:
60     from httplib import HTTPResponse
61 
62 try:
63     from urllib import addinfourl
64     _HAS_ADD_INFO_URL = True
65 except ImportError:
66     _HAS_ADD_INFO_URL = False
67 
68 try:
69     from io import BytesIO
70 except ImportError:
71     from StringIO import StringIO as BytesIO
72 
73 try:
74     # PY3.5
75     from subprocess import PIPE, run
76 except ImportError:
77     from subprocess import CalledProcessError, PIPE, Popen
78 
79     class CompletedProcess(object):
80         """Process execution result returned by subprocess.run()."""
81         # pylint: disable=too-few-public-methods
82 
83         def __init__(self, args, returncode, stdout, stderr):
84             self.args = args
85             self.returncode = returncode
86             self.stdout = stdout
87             self.stderr = stderr
88 
89     def run(*args, **kwargs):
90         """Run a command with subprocess.Popen() and redirect input/output."""
91 
92         check = kwargs.pop('check', False)
93 
94         try:
95             stdin = kwargs.pop('input')
96             assert 'stdin' not in kwargs
97             kwargs['stdin'] = PIPE
98         except KeyError:
99             stdin = None
100 
101         proc = Popen(*args, **kwargs)
102         try:
103             stdout, stderr = proc.communicate(stdin)
104         except:
105             proc.kill()
106             proc.wait()
107             raise
108         returncode = proc.wait()
109 
110         if check and returncode:
111             raise CalledProcessError(returncode, args, stdout)
112         return CompletedProcess(args, returncode, stdout, stderr)
113 
114 
115 class CurlSocket(object):
116     """A mock socket object that loads the response from a curl output file."""
117 
118     def __init__(self, file_obj):
119         self._file_obj = file_obj
120 
121     def makefile(self, *args):
122         return self._file_obj
123 
124     def close(self):
125         self._file_obj = None
126 
127 
128 def _build_curl_command_for_request(curl_command_name, req):
129     """Build the curl command line for an HTTP/HTTPS request."""
130 
131     cmd = [curl_command_name]
132 
133     # Adds `--no-progress-meter` to hide the progress bar.
134     cmd.append('--no-progress-meter')
135 
136     # Adds `-i` to print the HTTP response headers to stdout.
137     cmd.append('-i')
138 
139     # Uses HTTP 1.1.  The `http.client` module can only parse HTTP 1.1 headers.
140     cmd.append('--http1.1')
141 
142     # Specifies the request method.
143     cmd.append('-X')
144     cmd.append(req.get_method())
145 
146     # Adds the request headers.
147     for name, value in req.headers.items():
148         cmd.append('-H')
149         cmd.append(name + ': ' + value)
150 
151     # Adds the request data.
152     if req.data:
153         cmd.append('-d')
154         cmd.append('@-')
155 
156     # Adds the request full URL.
157     cmd.append(req.get_full_url())
158     return cmd
159 
160 
161 def _handle_open_with_curl(curl_command_name, req):
162     """Send the HTTP request with CURL and return a response object that can be
163     handled by urllib."""
164 
165     # Runs the curl command.
166     cmd = _build_curl_command_for_request(curl_command_name, req)
167     proc = run(cmd, stdout=PIPE, input=req.data, check=True)
168 
169     # Wraps the curl output with a socket-like object.
170     outfile = BytesIO(proc.stdout)
171     socket = CurlSocket(outfile)
172 
173     response = HTTPResponse(socket)
174     try:
175         # Parses the response header.
176         response.begin()
177     except:
178         response.close()
179         raise
180 
181     # Overrides `Transfer-Encoding: chunked` because curl combines chunks.
182     response.chunked = False
183     response.chunk_left = None
184 
185     if _HAS_ADD_INFO_URL:
186         # PY2 urllib2 expects a different return object.
187         result = addinfourl(outfile, response.msg, req.get_full_url())
188         result.code = response.status
189         result.msg = response.reason
190         return result
191 
192     return response  # PY3
193 
194 
195 class CurlHTTPHandler(HTTPHandler):
196     """CURL HTTP handler."""
197 
198     def __init__(self, curl_command_name):
199         self._curl_command_name = curl_command_name
200 
201     def http_open(self, req):
202         return _handle_open_with_curl(self._curl_command_name, req)
203 
204 
205 if _HAS_SSL:
206     class CurlHTTPSHandler(HTTPSHandler):
207         """CURL HTTPS handler."""
208 
209         def __init__(self, curl_command_name):
210             self._curl_command_name = curl_command_name
211 
212         def https_open(self, req):
213             return _handle_open_with_curl(self._curl_command_name, req)
214 
215 
216 def load_auth_credentials_from_file(cookie_file):
217     """Load credentials from an opened .gitcookies file."""
218     credentials = {}
219     for line in cookie_file:
220         if line.startswith('#HttpOnly_'):
221             line = line[len('#HttpOnly_'):]
222 
223         if not line or line[0] == '#':
224             continue
225 
226         row = line.split('\t')
227         if len(row) != 7:
228             continue
229 
230         domain = row[0]
231         cookie = row[6]
232 
233         sep = cookie.find('=')
234         if sep == -1:
235             continue
236         username = cookie[0:sep]
237         password = cookie[sep + 1:]
238 
239         credentials[domain] = (username, password)
240     return credentials
241 
242 
243 def load_auth_credentials(cookie_file_path):
244     """Load credentials from a .gitcookies file path."""
245     with open(cookie_file_path, 'r') as cookie_file:
246         return load_auth_credentials_from_file(cookie_file)
247 
248 
249 def _domain_matches(domain_name, domain_pattern):
250     """Returns whether `domain_name` matches `domain_pattern` under the
251     definition of RFC 6265 (Section 4.1.2.3 and 5.1.3).
252 
253     Pattern matching rule defined by Section 5.1.3:
254 
255         >>> _domain_matches('example.com', 'example.com')
256         True
257         >>> _domain_matches('a.example.com', 'example.com')
258         True
259         >>> _domain_matches('aaaexample.com', 'example.com')
260         False
261 
262     If the domain pattern starts with '.', '.' is ignored (Section 4.1.2.3):
263 
264         >>> _domain_matches('a.example.com', '.example.com')
265         True
266         >>> _domain_matches('example.com', '.example.com')
267         True
268 
269     See also:
270         https://datatracker.ietf.org/doc/html/rfc6265#section-4.1.2.3
271         https://datatracker.ietf.org/doc/html/rfc6265#section-5.1.3
272     """
273     domain_pattern = domain_pattern.removeprefix('.')
274     return (domain_name == domain_pattern or
275             (domain_name.endswith(domain_pattern) and
276              domain_name[-len(domain_pattern) - 1] == '.'))
277 
278 
279 def _find_auth_credentials(credentials, domain):
280     """Find the first set of login credentials (username, password)
281     that `domain` matches.
282     """
283     for domain_pattern, login in credentials.items():
284         if _domain_matches(domain, domain_pattern):
285             return login
286     raise KeyError('Domain {} not found'.format(domain))
287 
288 
289 def create_url_opener(cookie_file_path, domain):
290     """Load username and password from .gitcookies and return a URL opener with
291     an authentication handler."""
292 
293     # Load authentication credentials
294     credentials = load_auth_credentials(cookie_file_path)
295     username, password = _find_auth_credentials(credentials, domain)
296 
297     # Create URL opener with authentication handler
298     auth_handler = HTTPBasicAuthHandler()
299     auth_handler.add_password(domain, domain, username, password)
300     return build_opener(auth_handler)
301 
302 
303 def create_url_opener_from_args(args):
304     """Create URL opener from command line arguments."""
305 
306     if args.use_curl:
307         handlers = []
308         handlers.append(CurlHTTPHandler(args.use_curl))
309         if _HAS_SSL:
310             handlers.append(CurlHTTPSHandler(args.use_curl))
311 
312         opener = build_opener(*handlers)
313         return opener
314 
315     domain = urlparse(args.gerrit).netloc
316 
317     try:
318         return create_url_opener(args.gitcookies, domain)
319     except KeyError:
320         print('error: Cannot find the domain "{}" in "{}". '
321               .format(domain, args.gitcookies), file=sys.stderr)
322         print('error: Please check the Gerrit Code Review URL or follow the '
323               'instructions in '
324               'https://android.googlesource.com/platform/development/'
325               '+/master/tools/repo_pull#installation', file=sys.stderr)
326         sys.exit(1)
327 
328 
329 def _decode_xssi_json(data):
330     """Trim XSSI protector and decode JSON objects.
331 
332     Returns:
333         An object returned by json.loads().
334 
335     Raises:
336         ValueError: If data doesn't start with a XSSI token.
337         json.JSONDecodeError: If data failed to decode.
338     """
339 
340     # Decode UTF-8
341     data = data.decode('utf-8')
342 
343     # Trim cross site script inclusion (XSSI) protector
344     if data[0:4] != ')]}\'':
345         raise ValueError('unexpected responsed content: ' + data)
346     data = data[4:]
347 
348     # Parse JSON objects
349     return json.loads(data)
350 
351 
352 def _query_change_lists(url_opener, gerrit, query_string, start, count):
353     """Query change lists from the Gerrit server with a single request.
354 
355     This function performs a single query of the Gerrit server based on the
356     input parameters for a list of changes.  The server may return less than
357     the number of changes requested.  The caller should check the last record
358     returned for the _more_changes attribute to determine if more changes are
359     available and perform additional queries adjusting the start index.
360 
361     Args:
362         url_opener:  URL opener for request
363         gerrit: Gerrit server URL
364         query_string: Gerrit query string to select changes
365         start: Number of changes to be skipped from the beginning
366         count: Maximum number of changes to return
367 
368     Returns:
369         List of changes
370     """
371     data = [
372         ('q', query_string),
373         ('o', 'CURRENT_REVISION'),
374         ('o', 'CURRENT_COMMIT'),
375         ('start', str(start)),
376         ('n', str(count)),
377     ]
378     url = gerrit + '/a/changes/?' + urlencode(data)
379 
380     response_file = url_opener.open(url)
381     try:
382         return _decode_xssi_json(response_file.read())
383     finally:
384         response_file.close()
385 
386 def query_change_lists(url_opener, gerrit, query_string, start, count):
387     """Query change lists from the Gerrit server.
388 
389     This function queries the Gerrit server based on the input parameters for a
390     list of changes.  This function handles querying the server multiple times
391     if necessary and combining the results that are returned to the caller.
392 
393     Args:
394         url_opener:  URL opener for request
395         gerrit: Gerrit server URL
396         query_string: Gerrit query string to select changes
397         start: Number of changes to be skipped from the beginning
398         count: Maximum number of changes to return
399 
400     Returns:
401         List of changes
402     """
403     changes = []
404     while len(changes) < count:
405         chunk = _query_change_lists(url_opener, gerrit, query_string,
406                                     start + len(changes), count - len(changes))
407         if not chunk:
408             break
409 
410         changes += chunk
411 
412         # The last change object contains a _more_changes attribute if the
413         # number of changes exceeds the query parameter or the internal server
414         # limit.  Stop iteration if `_more_changes` attribute doesn't exist.
415         if '_more_changes' not in chunk[-1]:
416             break
417 
418     return changes
419 
420 
421 def _make_json_post_request(url_opener, url, data, method='POST'):
422     """Open an URL request and decode its response.
423 
424     Returns a 3-tuple of (code, body, json).
425         code: A numerical value, the HTTP status code of the response.
426         body: A bytes, the response body.
427         json: An object, the parsed JSON response.
428     """
429 
430     data = json.dumps(data).encode('utf-8')
431     headers = {
432         'Content-Type': 'application/json; charset=UTF-8',
433     }
434 
435     request = Request(url, data, headers)
436     request.get_method = lambda: method
437 
438     try:
439         response_file = url_opener.open(request)
440     except HTTPError as error:
441         response_file = error
442 
443     with response_file:
444         res_code = response_file.getcode()
445         res_body = response_file.read()
446         try:
447             res_json = _decode_xssi_json(res_body)
448         except ValueError:
449             # The response isn't JSON if it doesn't start with a XSSI token.
450             # Possibly a plain text error message or empty body.
451             res_json = None
452         return (res_code, res_body, res_json)
453 
454 
455 def set_review(url_opener, gerrit_url, change_id, labels, message):
456     """Set review votes to a change list."""
457 
458     url = '{}/a/changes/{}/revisions/current/review'.format(
459         gerrit_url, change_id)
460 
461     data = {}
462     if labels:
463         data['labels'] = labels
464     if message:
465         data['message'] = message
466 
467     return _make_json_post_request(url_opener, url, data)
468 
469 
470 def submit(url_opener, gerrit_url, change_id):
471     """Submit a change list."""
472 
473     url = '{}/a/changes/{}/submit'.format(gerrit_url, change_id)
474 
475     return _make_json_post_request(url_opener, url, {})
476 
477 
478 def abandon(url_opener, gerrit_url, change_id, message):
479     """Abandon a change list."""
480 
481     url = '{}/a/changes/{}/abandon'.format(gerrit_url, change_id)
482 
483     data = {}
484     if message:
485         data['message'] = message
486 
487     return _make_json_post_request(url_opener, url, data)
488 
489 
490 def restore(url_opener, gerrit_url, change_id):
491     """Restore a change list."""
492 
493     url = '{}/a/changes/{}/restore'.format(gerrit_url, change_id)
494 
495     return _make_json_post_request(url_opener, url, {})
496 
497 
498 def set_topic(url_opener, gerrit_url, change_id, name):
499     """Set the topic name."""
500 
501     url = '{}/a/changes/{}/topic'.format(gerrit_url, change_id)
502     data = {'topic': name}
503     return _make_json_post_request(url_opener, url, data, method='PUT')
504 
505 
506 def delete_topic(url_opener, gerrit_url, change_id):
507     """Delete the topic name."""
508 
509     url = '{}/a/changes/{}/topic'.format(gerrit_url, change_id)
510 
511     return _make_json_post_request(url_opener, url, {}, method='DELETE')
512 
513 
514 def set_hashtags(url_opener, gerrit_url, change_id, add_tags=None,
515                  remove_tags=None):
516     """Add or remove hash tags."""
517 
518     url = '{}/a/changes/{}/hashtags'.format(gerrit_url, change_id)
519 
520     data = {}
521     if add_tags:
522         data['add'] = add_tags
523     if remove_tags:
524         data['remove'] = remove_tags
525 
526     return _make_json_post_request(url_opener, url, data)
527 
528 
529 def add_reviewers(url_opener, gerrit_url, change_id, reviewers):
530     """Add reviewers."""
531 
532     url = '{}/a/changes/{}/revisions/current/review'.format(
533         gerrit_url, change_id)
534 
535     data = {}
536     if reviewers:
537         data['reviewers'] = reviewers
538 
539     return _make_json_post_request(url_opener, url, data)
540 
541 
542 def delete_reviewer(url_opener, gerrit_url, change_id, name):
543     """Delete reviewer."""
544 
545     url = '{}/a/changes/{}/reviewers/{}/delete'.format(
546         gerrit_url, change_id, name)
547 
548     return _make_json_post_request(url_opener, url, {})
549 
550 
551 def get_patch(url_opener, gerrit_url, change_id, revision_id='current'):
552     """Download the patch file."""
553 
554     url = '{}/a/changes/{}/revisions/{}/patch'.format(
555         gerrit_url, change_id, revision_id)
556 
557     response_file = url_opener.open(url)
558     try:
559         return base64.b64decode(response_file.read())
560     finally:
561         response_file.close()
562 
563 def find_gerrit_name():
564     """Find the gerrit instance specified in the default remote."""
565     manifest_cmd = ['repo', 'manifest']
566     raw_manifest_xml = run(manifest_cmd, stdout=PIPE, check=True).stdout
567 
568     manifest_xml = xml.dom.minidom.parseString(raw_manifest_xml)
569     default_remote = manifest_xml.getElementsByTagName('default')[0]
570     default_remote_name = default_remote.getAttribute('remote')
571     for remote in manifest_xml.getElementsByTagName('remote'):
572         name = remote.getAttribute('name')
573         review = remote.getAttribute('review')
574         if review and name == default_remote_name:
575             return review.rstrip('/')
576 
577     raise ValueError('cannot find gerrit URL from manifest')
578 
579 def normalize_gerrit_name(gerrit):
580     """Strip the trailing slashes because Gerrit will return 404 when there are
581     redundant trailing slashes."""
582     return gerrit.rstrip('/')
583 
584 def add_common_parse_args(parser):
585     parser.add_argument('query', help='Change list query string')
586     parser.add_argument('-g', '--gerrit', help='Gerrit review URL')
587     parser.add_argument('--gitcookies',
588                         default=os.path.expanduser('~/.gitcookies'),
589                         help='Gerrit cookie file')
590     parser.add_argument('--limits', default=1000, type=int,
591                         help='Max number of change lists')
592     parser.add_argument('--start', default=0, type=int,
593                         help='Skip first N changes in query')
594     parser.add_argument(
595         '--use-curl',
596         help='Send requests with the specified curl command (e.g. `curl`)')
597 
598 def _parse_args():
599     """Parse command line options."""
600     parser = argparse.ArgumentParser()
601     add_common_parse_args(parser)
602     parser.add_argument('--format', default='json',
603                         choices=['json', 'oneline'],
604                         help='Print format')
605     return parser.parse_args()
606 
607 def main():
608     """Main function"""
609     args = _parse_args()
610 
611     if args.gerrit:
612         args.gerrit = normalize_gerrit_name(args.gerrit)
613     else:
614         try:
615             args.gerrit = find_gerrit_name()
616         # pylint: disable=bare-except
617         except:
618             print('gerrit instance not found, use [-g GERRIT]')
619             sys.exit(1)
620 
621     # Query change lists
622     url_opener = create_url_opener_from_args(args)
623     change_lists = query_change_lists(
624         url_opener, args.gerrit, args.query, args.start, args.limits)
625 
626     # Print the result
627     if args.format == 'json':
628         json.dump(change_lists, sys.stdout, indent=4, separators=(', ', ': '))
629         print()  # Print the end-of-line
630     elif args.format == 'oneline':
631         for i, change in enumerate(change_lists):
632             print('{i:<8} {number:<16} {status:<20} ' \
633                   '{change_id:<60} {project:<120} ' \
634                   '{subject}'.format(i=i,
635                                      project=change['project'],
636                                      change_id=change['change_id'],
637                                      status=change['status'],
638                                      number=change['_number'],
639                                      subject=change['subject']))
640 
641 
642 if __name__ == '__main__':
643     main()
644