1#!/usr/bin/env python3 2 3# 4# Copyright (C) 2018 The Android Open Source Project 5# 6# Licensed under the Apache License, Version 2.0 (the "License"); 7# you may not use this file except in compliance with the License. 8# You may obtain a copy of the License at 9# 10# http://www.apache.org/licenses/LICENSE-2.0 11# 12# Unless required by applicable law or agreed to in writing, software 13# distributed under the License is distributed on an "AS IS" BASIS, 14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15# See the License for the specific language governing permissions and 16# limitations under the License. 17# 18 19"""Gerrit Restful API client library.""" 20 21from __future__ import print_function 22 23import argparse 24import base64 25import json 26import os 27import sys 28 29try: 30 from urllib.request import ( 31 HTTPBasicAuthHandler, Request, build_opener) # PY3 32except ImportError: 33 from urllib2 import ( 34 HTTPBasicAuthHandler, Request, build_opener) # PY2 35 36try: 37 # pylint: disable=ungrouped-imports 38 from urllib.parse import urlencode, urlparse # PY3 39except ImportError: 40 # pylint: disable=ungrouped-imports 41 from urllib import urlencode # PY2 42 from urlparse import urlparse # PY2 43 44 45def load_auth_credentials_from_file(cookie_file): 46 """Load credentials from an opened .gitcookies file.""" 47 credentials = {} 48 for line in cookie_file: 49 if line.startswith('#HttpOnly_'): 50 line = line[len('#HttpOnly_'):] 51 52 if not line or line[0] == '#': 53 continue 54 55 row = line.split('\t') 56 if len(row) != 7: 57 continue 58 59 domain = row[0] 60 cookie = row[6] 61 62 sep = cookie.find('=') 63 if sep == -1: 64 continue 65 username = cookie[0:sep] 66 password = cookie[sep + 1:] 67 68 credentials[domain] = (username, password) 69 return credentials 70 71 72def load_auth_credentials(cookie_file_path): 73 """Load credentials from a .gitcookies file path.""" 74 with open(cookie_file_path, 'r') as cookie_file: 75 return load_auth_credentials_from_file(cookie_file) 76 77 78def create_url_opener(cookie_file_path, domain): 79 """Load username and password from .gitcookies and return a URL opener with 80 an authentication handler.""" 81 82 # Load authentication credentials 83 credentials = load_auth_credentials(cookie_file_path) 84 username, password = credentials[domain] 85 86 # Create URL opener with authentication handler 87 auth_handler = HTTPBasicAuthHandler() 88 auth_handler.add_password(domain, domain, username, password) 89 return build_opener(auth_handler) 90 91 92def create_url_opener_from_args(args): 93 """Create URL opener from command line arguments.""" 94 95 domain = urlparse(args.gerrit).netloc 96 97 try: 98 return create_url_opener(args.gitcookies, domain) 99 except KeyError: 100 print('error: Cannot find the domain "{}" in "{}". ' 101 .format(domain, args.gitcookies), file=sys.stderr) 102 print('error: Please check the Gerrit Code Review URL or follow the ' 103 'instructions in ' 104 'https://android.googlesource.com/platform/development/' 105 '+/master/tools/repo_pull#installation', file=sys.stderr) 106 sys.exit(1) 107 108 109def _decode_xssi_json(data): 110 """Trim XSSI protector and decode JSON objects.""" 111 112 # Decode UTF-8 113 data = data.decode('utf-8') 114 115 # Trim cross site script inclusion (XSSI) protector 116 if data[0:4] != ')]}\'': 117 raise ValueError('unexpected responsed content: ' + data) 118 data = data[4:] 119 120 # Parse JSON objects 121 return json.loads(data) 122 123 124def query_change_lists(url_opener, gerrit, query_string, limits): 125 """Query change lists.""" 126 data = [ 127 ('q', query_string), 128 ('o', 'CURRENT_REVISION'), 129 ('o', 'CURRENT_COMMIT'), 130 ('n', str(limits)), 131 ] 132 url = gerrit + '/a/changes/?' + urlencode(data) 133 134 response_file = url_opener.open(url) 135 try: 136 return _decode_xssi_json(response_file.read()) 137 finally: 138 response_file.close() 139 140 141def _make_json_post_request(url_opener, url, data, method='POST'): 142 data = json.dumps(data).encode('utf-8') 143 headers = { 144 'Content-Type': 'application/json; charset=UTF-8', 145 } 146 147 request = Request(url, data, headers) 148 request.get_method = lambda: method 149 response_file = url_opener.open(request) 150 try: 151 res_code = response_file.getcode() 152 res_json = _decode_xssi_json(response_file.read()) 153 return (res_code, res_json) 154 finally: 155 response_file.close() 156 157 158def set_review(url_opener, gerrit_url, change_id, labels, message): 159 """Set review votes to a change list.""" 160 161 url = '{}/a/changes/{}/revisions/current/review'.format( 162 gerrit_url, change_id) 163 164 data = {} 165 if labels: 166 data['labels'] = labels 167 if message: 168 data['message'] = message 169 170 return _make_json_post_request(url_opener, url, data) 171 172 173def abandon(url_opener, gerrit_url, change_id, message): 174 """Abandon a change list.""" 175 176 url = '{}/a/changes/{}/abandon'.format(gerrit_url, change_id) 177 178 data = {} 179 if message: 180 data['message'] = message 181 182 return _make_json_post_request(url_opener, url, data) 183 184 185def set_topic(url_opener, gerrit_url, change_id, name): 186 """Set the topic name.""" 187 188 url = '{}/a/changes/{}/topic'.format(gerrit_url, change_id) 189 data = {'topic': name} 190 return _make_json_post_request(url_opener, url, data, method='PUT') 191 192 193def delete_topic(url_opener, gerrit_url, change_id): 194 """Delete the topic name.""" 195 196 url = '{}/a/changes/{}/topic'.format(gerrit_url, change_id) 197 request = Request(url) 198 request.get_method = lambda: 'DELETE' 199 response_file = url_opener.open(request) 200 try: 201 return (response_file.getcode(), response_file.read()) 202 finally: 203 response_file.close() 204 205 206def set_hashtags(url_opener, gerrit_url, change_id, add_tags=None, 207 remove_tags=None): 208 """Add or remove hash tags.""" 209 210 url = '{}/a/changes/{}/hashtags'.format(gerrit_url, change_id) 211 212 data = {} 213 if add_tags: 214 data['add'] = add_tags 215 if remove_tags: 216 data['remove'] = remove_tags 217 218 return _make_json_post_request(url_opener, url, data) 219 220 221def get_patch(url_opener, gerrit_url, change_id, revision_id='current'): 222 """Download the patch file.""" 223 224 url = '{}/a/changes/{}/revisions/{}/patch'.format( 225 gerrit_url, change_id, revision_id) 226 227 response_file = url_opener.open(url) 228 try: 229 return base64.b64decode(response_file.read()) 230 finally: 231 response_file.close() 232 233 234def _parse_args(): 235 """Parse command line options.""" 236 parser = argparse.ArgumentParser() 237 238 parser.add_argument('query', help='Change list query string') 239 parser.add_argument('-g', '--gerrit', required=True, 240 help='Gerrit review URL') 241 242 parser.add_argument('--gitcookies', 243 default=os.path.expanduser('~/.gitcookies'), 244 help='Gerrit cookie file') 245 parser.add_argument('--limits', default=1000, 246 help='Max number of change lists') 247 248 return parser.parse_args() 249 250 251def main(): 252 """Main function""" 253 args = _parse_args() 254 255 # Query change lists 256 url_opener = create_url_opener_from_args(args) 257 change_lists = query_change_lists( 258 url_opener, args.gerrit, args.query, args.limits) 259 260 # Print the result 261 json.dump(change_lists, sys.stdout, indent=4, separators=(', ', ': ')) 262 print() # Print the end-of-line 263 264if __name__ == '__main__': 265 main() 266