• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python3
2
3#
4# Copyright (C) 2018 The Android Open Source Project
5#
6# Licensed under the Apache License, Version 2.0 (the "License");
7# you may not use this file except in compliance with the License.
8# You may obtain a copy of the License at
9#
10#      http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS,
14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15# See the License for the specific language governing permissions and
16# limitations under the License.
17#
18
19"""Gerrit Restful API client library."""
20
21from __future__ import print_function
22
23import argparse
24import base64
25import json
26import os
27import sys
28import xml.dom.minidom
29
30try:
31    import ssl
32    _HAS_SSL = True
33except ImportError:
34    _HAS_SSL = False
35
36try:
37    # PY3
38    from urllib.error import HTTPError
39    from urllib.parse import urlencode, urlparse
40    from urllib.request import (
41        HTTPBasicAuthHandler, HTTPHandler, OpenerDirector, Request,
42        build_opener
43    )
44    if _HAS_SSL:
45        from urllib.request import HTTPSHandler
46except ImportError:
47    # PY2
48    from urllib import urlencode
49    from urllib2 import (
50        HTTPBasicAuthHandler, HTTPError, HTTPHandler, OpenerDirector, Request,
51        build_opener
52    )
53    if _HAS_SSL:
54        from urllib2 import HTTPSHandler
55    from urlparse import urlparse
56
57try:
58    from http.client import HTTPResponse
59except ImportError:
60    from httplib import HTTPResponse
61
62try:
63    from urllib import addinfourl
64    _HAS_ADD_INFO_URL = True
65except ImportError:
66    _HAS_ADD_INFO_URL = False
67
68try:
69    from io import BytesIO
70except ImportError:
71    from StringIO import StringIO as BytesIO
72
73try:
74    # PY3.5
75    from subprocess import PIPE, run
76except ImportError:
77    from subprocess import CalledProcessError, PIPE, Popen
78
79    class CompletedProcess(object):
80        """Process execution result returned by subprocess.run()."""
81        # pylint: disable=too-few-public-methods
82
83        def __init__(self, args, returncode, stdout, stderr):
84            self.args = args
85            self.returncode = returncode
86            self.stdout = stdout
87            self.stderr = stderr
88
89    def run(*args, **kwargs):
90        """Run a command with subprocess.Popen() and redirect input/output."""
91
92        check = kwargs.pop('check', False)
93
94        try:
95            stdin = kwargs.pop('input')
96            assert 'stdin' not in kwargs
97            kwargs['stdin'] = PIPE
98        except KeyError:
99            stdin = None
100
101        proc = Popen(*args, **kwargs)
102        try:
103            stdout, stderr = proc.communicate(stdin)
104        except:
105            proc.kill()
106            proc.wait()
107            raise
108        returncode = proc.wait()
109
110        if check and returncode:
111            raise CalledProcessError(returncode, args, stdout)
112        return CompletedProcess(args, returncode, stdout, stderr)
113
114
115class CurlSocket(object):
116    """A mock socket object that loads the response from a curl output file."""
117
118    def __init__(self, file_obj):
119        self._file_obj = file_obj
120
121    def makefile(self, *args):
122        return self._file_obj
123
124    def close(self):
125        self._file_obj = None
126
127
128def _build_curl_command_for_request(curl_command_name, req):
129    """Build the curl command line for an HTTP/HTTPS request."""
130
131    cmd = [curl_command_name]
132
133    # Adds `--no-progress-meter` to hide the progress bar.
134    cmd.append('--no-progress-meter')
135
136    # Adds `-i` to print the HTTP response headers to stdout.
137    cmd.append('-i')
138
139    # Uses HTTP 1.1.  The `http.client` module can only parse HTTP 1.1 headers.
140    cmd.append('--http1.1')
141
142    # Specifies the request method.
143    cmd.append('-X')
144    cmd.append(req.get_method())
145
146    # Adds the request headers.
147    for name, value in req.headers.items():
148        cmd.append('-H')
149        cmd.append(name + ': ' + value)
150
151    # Adds the request data.
152    if req.data:
153        cmd.append('-d')
154        cmd.append('@-')
155
156    # Adds the request full URL.
157    cmd.append(req.get_full_url())
158    return cmd
159
160
161def _handle_open_with_curl(curl_command_name, req):
162    """Send the HTTP request with CURL and return a response object that can be
163    handled by urllib."""
164
165    # Runs the curl command.
166    cmd = _build_curl_command_for_request(curl_command_name, req)
167    proc = run(cmd, stdout=PIPE, input=req.data, check=True)
168
169    # Wraps the curl output with a socket-like object.
170    outfile = BytesIO(proc.stdout)
171    socket = CurlSocket(outfile)
172
173    response = HTTPResponse(socket)
174    try:
175        # Parses the response header.
176        response.begin()
177    except:
178        response.close()
179        raise
180
181    # Overrides `Transfer-Encoding: chunked` because curl combines chunks.
182    response.chunked = False
183    response.chunk_left = None
184
185    if _HAS_ADD_INFO_URL:
186        # PY2 urllib2 expects a different return object.
187        result = addinfourl(outfile, response.msg, req.get_full_url())
188        result.code = response.status
189        result.msg = response.reason
190        return result
191
192    return response  # PY3
193
194
195class CurlHTTPHandler(HTTPHandler):
196    """CURL HTTP handler."""
197
198    def __init__(self, curl_command_name):
199        self._curl_command_name = curl_command_name
200
201    def http_open(self, req):
202        return _handle_open_with_curl(self._curl_command_name, req)
203
204
205if _HAS_SSL:
206    class CurlHTTPSHandler(HTTPSHandler):
207        """CURL HTTPS handler."""
208
209        def __init__(self, curl_command_name):
210            self._curl_command_name = curl_command_name
211
212        def https_open(self, req):
213            return _handle_open_with_curl(self._curl_command_name, req)
214
215
216def load_auth_credentials_from_file(cookie_file):
217    """Load credentials from an opened .gitcookies file."""
218    credentials = {}
219    for line in cookie_file:
220        if line.startswith('#HttpOnly_'):
221            line = line[len('#HttpOnly_'):]
222
223        if not line or line[0] == '#':
224            continue
225
226        row = line.split('\t')
227        if len(row) != 7:
228            continue
229
230        domain = row[0]
231        cookie = row[6]
232
233        sep = cookie.find('=')
234        if sep == -1:
235            continue
236        username = cookie[0:sep]
237        password = cookie[sep + 1:]
238
239        credentials[domain] = (username, password)
240    return credentials
241
242
243def load_auth_credentials(cookie_file_path):
244    """Load credentials from a .gitcookies file path."""
245    with open(cookie_file_path, 'r') as cookie_file:
246        return load_auth_credentials_from_file(cookie_file)
247
248
249def _domain_matches(domain_name, domain_pattern):
250    """Returns whether `domain_name` matches `domain_pattern` under the
251    definition of RFC 6265 (Section 4.1.2.3 and 5.1.3).
252
253    Pattern matching rule defined by Section 5.1.3:
254
255        >>> _domain_matches('example.com', 'example.com')
256        True
257        >>> _domain_matches('a.example.com', 'example.com')
258        True
259        >>> _domain_matches('aaaexample.com', 'example.com')
260        False
261
262    If the domain pattern starts with '.', '.' is ignored (Section 4.1.2.3):
263
264        >>> _domain_matches('a.example.com', '.example.com')
265        True
266        >>> _domain_matches('example.com', '.example.com')
267        True
268
269    See also:
270        https://datatracker.ietf.org/doc/html/rfc6265#section-4.1.2.3
271        https://datatracker.ietf.org/doc/html/rfc6265#section-5.1.3
272    """
273    domain_pattern = domain_pattern.removeprefix('.')
274    return (domain_name == domain_pattern or
275            (domain_name.endswith(domain_pattern) and
276             domain_name[-len(domain_pattern) - 1] == '.'))
277
278
279def _find_auth_credentials(credentials, domain):
280    """Find the first set of login credentials (username, password)
281    that `domain` matches.
282    """
283    for domain_pattern, login in credentials.items():
284        if _domain_matches(domain, domain_pattern):
285            return login
286    raise KeyError('Domain {} not found'.format(domain))
287
288
289def create_url_opener(cookie_file_path, domain):
290    """Load username and password from .gitcookies and return a URL opener with
291    an authentication handler."""
292
293    # Load authentication credentials
294    credentials = load_auth_credentials(cookie_file_path)
295    username, password = _find_auth_credentials(credentials, domain)
296
297    # Create URL opener with authentication handler
298    auth_handler = HTTPBasicAuthHandler()
299    auth_handler.add_password(domain, domain, username, password)
300    return build_opener(auth_handler)
301
302
303def create_url_opener_from_args(args):
304    """Create URL opener from command line arguments."""
305
306    if args.use_curl:
307        handlers = []
308        handlers.append(CurlHTTPHandler(args.use_curl))
309        if _HAS_SSL:
310            handlers.append(CurlHTTPSHandler(args.use_curl))
311
312        opener = build_opener(*handlers)
313        return opener
314
315    domain = urlparse(args.gerrit).netloc
316
317    try:
318        return create_url_opener(args.gitcookies, domain)
319    except KeyError:
320        print('error: Cannot find the domain "{}" in "{}". '
321              .format(domain, args.gitcookies), file=sys.stderr)
322        print('error: Please check the Gerrit Code Review URL or follow the '
323              'instructions in '
324              'https://android.googlesource.com/platform/development/'
325              '+/master/tools/repo_pull#installation', file=sys.stderr)
326        sys.exit(1)
327
328
329def _decode_xssi_json(data):
330    """Trim XSSI protector and decode JSON objects.
331
332    Returns:
333        An object returned by json.loads().
334
335    Raises:
336        ValueError: If data doesn't start with a XSSI token.
337        json.JSONDecodeError: If data failed to decode.
338    """
339
340    # Decode UTF-8
341    data = data.decode('utf-8')
342
343    # Trim cross site script inclusion (XSSI) protector
344    if data[0:4] != ')]}\'':
345        raise ValueError('unexpected responsed content: ' + data)
346    data = data[4:]
347
348    # Parse JSON objects
349    return json.loads(data)
350
351
352def _query_change_lists(url_opener, gerrit, query_string, start, count):
353    """Query change lists from the Gerrit server with a single request.
354
355    This function performs a single query of the Gerrit server based on the
356    input parameters for a list of changes.  The server may return less than
357    the number of changes requested.  The caller should check the last record
358    returned for the _more_changes attribute to determine if more changes are
359    available and perform additional queries adjusting the start index.
360
361    Args:
362        url_opener:  URL opener for request
363        gerrit: Gerrit server URL
364        query_string: Gerrit query string to select changes
365        start: Number of changes to be skipped from the beginning
366        count: Maximum number of changes to return
367
368    Returns:
369        List of changes
370    """
371    data = [
372        ('q', query_string),
373        ('o', 'CURRENT_REVISION'),
374        ('o', 'CURRENT_COMMIT'),
375        ('start', str(start)),
376        ('n', str(count)),
377    ]
378    url = gerrit + '/a/changes/?' + urlencode(data)
379
380    response_file = url_opener.open(url)
381    try:
382        return _decode_xssi_json(response_file.read())
383    finally:
384        response_file.close()
385
386def query_change_lists(url_opener, gerrit, query_string, start, count):
387    """Query change lists from the Gerrit server.
388
389    This function queries the Gerrit server based on the input parameters for a
390    list of changes.  This function handles querying the server multiple times
391    if necessary and combining the results that are returned to the caller.
392
393    Args:
394        url_opener:  URL opener for request
395        gerrit: Gerrit server URL
396        query_string: Gerrit query string to select changes
397        start: Number of changes to be skipped from the beginning
398        count: Maximum number of changes to return
399
400    Returns:
401        List of changes
402    """
403    changes = []
404    while len(changes) < count:
405        chunk = _query_change_lists(url_opener, gerrit, query_string,
406                                    start + len(changes), count - len(changes))
407        if not chunk:
408            break
409
410        changes += chunk
411
412        # The last change object contains a _more_changes attribute if the
413        # number of changes exceeds the query parameter or the internal server
414        # limit.  Stop iteration if `_more_changes` attribute doesn't exist.
415        if '_more_changes' not in chunk[-1]:
416            break
417
418    return changes
419
420
421def _make_json_post_request(url_opener, url, data, method='POST'):
422    """Open an URL request and decode its response.
423
424    Returns a 3-tuple of (code, body, json).
425        code: A numerical value, the HTTP status code of the response.
426        body: A bytes, the response body.
427        json: An object, the parsed JSON response.
428    """
429
430    data = json.dumps(data).encode('utf-8')
431    headers = {
432        'Content-Type': 'application/json; charset=UTF-8',
433    }
434
435    request = Request(url, data, headers)
436    request.get_method = lambda: method
437
438    try:
439        response_file = url_opener.open(request)
440    except HTTPError as error:
441        response_file = error
442
443    with response_file:
444        res_code = response_file.getcode()
445        res_body = response_file.read()
446        try:
447            res_json = _decode_xssi_json(res_body)
448        except ValueError:
449            # The response isn't JSON if it doesn't start with a XSSI token.
450            # Possibly a plain text error message or empty body.
451            res_json = None
452        return (res_code, res_body, res_json)
453
454
455def set_review(url_opener, gerrit_url, change_id, labels, message):
456    """Set review votes to a change list."""
457
458    url = '{}/a/changes/{}/revisions/current/review'.format(
459        gerrit_url, change_id)
460
461    data = {}
462    if labels:
463        data['labels'] = labels
464    if message:
465        data['message'] = message
466
467    return _make_json_post_request(url_opener, url, data)
468
469
470def submit(url_opener, gerrit_url, change_id):
471    """Submit a change list."""
472
473    url = '{}/a/changes/{}/submit'.format(gerrit_url, change_id)
474
475    return _make_json_post_request(url_opener, url, {})
476
477
478def abandon(url_opener, gerrit_url, change_id, message):
479    """Abandon a change list."""
480
481    url = '{}/a/changes/{}/abandon'.format(gerrit_url, change_id)
482
483    data = {}
484    if message:
485        data['message'] = message
486
487    return _make_json_post_request(url_opener, url, data)
488
489
490def restore(url_opener, gerrit_url, change_id):
491    """Restore a change list."""
492
493    url = '{}/a/changes/{}/restore'.format(gerrit_url, change_id)
494
495    return _make_json_post_request(url_opener, url, {})
496
497
498def delete(url_opener, gerrit_url, change_id):
499    """Delete a change list."""
500
501    url = '{}/a/changes/{}'.format(gerrit_url, change_id)
502
503    return _make_json_post_request(url_opener, url, {}, method='DELETE')
504
505
506def set_topic(url_opener, gerrit_url, change_id, name):
507    """Set the topic name."""
508
509    url = '{}/a/changes/{}/topic'.format(gerrit_url, change_id)
510    data = {'topic': name}
511    return _make_json_post_request(url_opener, url, data, method='PUT')
512
513
514def delete_topic(url_opener, gerrit_url, change_id):
515    """Delete the topic name."""
516
517    url = '{}/a/changes/{}/topic'.format(gerrit_url, change_id)
518
519    return _make_json_post_request(url_opener, url, {}, method='DELETE')
520
521
522def set_hashtags(url_opener, gerrit_url, change_id, add_tags=None,
523                 remove_tags=None):
524    """Add or remove hash tags."""
525
526    url = '{}/a/changes/{}/hashtags'.format(gerrit_url, change_id)
527
528    data = {}
529    if add_tags:
530        data['add'] = add_tags
531    if remove_tags:
532        data['remove'] = remove_tags
533
534    return _make_json_post_request(url_opener, url, data)
535
536
537def add_reviewers(url_opener, gerrit_url, change_id, reviewers):
538    """Add reviewers."""
539
540    url = '{}/a/changes/{}/revisions/current/review'.format(
541        gerrit_url, change_id)
542
543    data = {}
544    if reviewers:
545        data['reviewers'] = reviewers
546
547    return _make_json_post_request(url_opener, url, data)
548
549
550def delete_reviewer(url_opener, gerrit_url, change_id, name):
551    """Delete reviewer."""
552
553    url = '{}/a/changes/{}/reviewers/{}/delete'.format(
554        gerrit_url, change_id, name)
555
556    return _make_json_post_request(url_opener, url, {})
557
558
559def get_patch(url_opener, gerrit_url, change_id, revision_id='current'):
560    """Download the patch file."""
561
562    url = '{}/a/changes/{}/revisions/{}/patch'.format(
563        gerrit_url, change_id, revision_id)
564
565    response_file = url_opener.open(url)
566    try:
567        return base64.b64decode(response_file.read())
568    finally:
569        response_file.close()
570
571def find_gerrit_name():
572    """Find the gerrit instance specified in the default remote."""
573    manifest_cmd = ['repo', 'manifest']
574    raw_manifest_xml = run(manifest_cmd, stdout=PIPE, check=True).stdout
575
576    manifest_xml = xml.dom.minidom.parseString(raw_manifest_xml)
577    default_remote = manifest_xml.getElementsByTagName('default')[0]
578    default_remote_name = default_remote.getAttribute('remote')
579    for remote in manifest_xml.getElementsByTagName('remote'):
580        name = remote.getAttribute('name')
581        review = remote.getAttribute('review')
582        if review and name == default_remote_name:
583            return review.rstrip('/')
584
585    raise ValueError('cannot find gerrit URL from manifest')
586
587def normalize_gerrit_name(gerrit):
588    """Strip the trailing slashes because Gerrit will return 404 when there are
589    redundant trailing slashes."""
590    return gerrit.rstrip('/')
591
592def add_common_parse_args(parser):
593    parser.add_argument('query', help='Change list query string')
594    parser.add_argument('-g', '--gerrit', help='Gerrit review URL')
595    parser.add_argument('--gitcookies',
596                        default=os.path.expanduser('~/.gitcookies'),
597                        help='Gerrit cookie file')
598    parser.add_argument('--limits', default=1000, type=int,
599                        help='Max number of change lists')
600    parser.add_argument('--start', default=0, type=int,
601                        help='Skip first N changes in query')
602    parser.add_argument(
603        '--use-curl',
604        help='Send requests with the specified curl command (e.g. `curl`)')
605
606def _parse_args():
607    """Parse command line options."""
608    parser = argparse.ArgumentParser()
609    add_common_parse_args(parser)
610    parser.add_argument('--format', default='json',
611                        choices=['json', 'oneline', 'porcelain'],
612                        help='Print format')
613    return parser.parse_args()
614
615def main():
616    """Main function"""
617    args = _parse_args()
618
619    if args.gerrit:
620        args.gerrit = normalize_gerrit_name(args.gerrit)
621    else:
622        try:
623            args.gerrit = find_gerrit_name()
624        # pylint: disable=bare-except
625        except:
626            print('gerrit instance not found, use [-g GERRIT]')
627            sys.exit(1)
628
629    # Query change lists
630    url_opener = create_url_opener_from_args(args)
631    change_lists = query_change_lists(
632        url_opener, args.gerrit, args.query, args.start, args.limits)
633
634    # Print the result
635    if args.format == 'json':
636        json.dump(change_lists, sys.stdout, indent=4, separators=(', ', ': '))
637        print()  # Print the end-of-line
638    else:
639        if args.format == 'oneline':
640            format_str = ('{i:<8} {number:<16} {status:<20} '
641                          '{change_id:<60} {project:<120} '
642                          '{subject}')
643        else:
644            format_str = ('{i}\t{number}\t{status}\t'
645                          '{change_id}\t{project}\t{subject}')
646
647        for i, change in enumerate(change_lists):
648            print(format_str.format(i=i,
649                                    project=change['project'],
650                                    change_id=change['change_id'],
651                                    status=change['status'],
652                                    number=change['_number'],
653                                    subject=change['subject']))
654
655
656if __name__ == '__main__':
657    main()
658