1#!/usr/bin/env python2.7 2# Copyright 2017 gRPC authors. 3# 4# Licensed under the Apache License, Version 2.0 (the "License"); 5# you may not use this file except in compliance with the License. 6# You may obtain a copy of the License at 7# 8# http://www.apache.org/licenses/LICENSE-2.0 9# 10# Unless required by applicable law or agreed to in writing, software 11# distributed under the License is distributed on an "AS IS" BASIS, 12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13# See the License for the specific language governing permissions and 14# limitations under the License. 15"""Run tests using docker images in Google Container Registry per matrix.""" 16 17from __future__ import print_function 18 19import argparse 20import atexit 21import json 22import multiprocessing 23import os 24import re 25import subprocess 26import sys 27import uuid 28 29# Langauage Runtime Matrix 30import client_matrix 31 32python_util_dir = os.path.abspath( 33 os.path.join(os.path.dirname(__file__), '../run_tests/python_utils')) 34sys.path.append(python_util_dir) 35import dockerjob 36import jobset 37import report_utils 38import upload_test_results 39 40_LANGUAGES = client_matrix.LANG_RUNTIME_MATRIX.keys() 41# All gRPC release tags, flattened, deduped and sorted. 42_RELEASES = sorted( 43 list( 44 set( 45 client_matrix.get_release_tag_name(info) 46 for lang in client_matrix.LANG_RELEASE_MATRIX.values() 47 for info in lang))) 48_TEST_TIMEOUT = 60 49 50argp = argparse.ArgumentParser(description='Run interop tests.') 51argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int) 52argp.add_argument( 53 '--gcr_path', 54 default='gcr.io/grpc-testing', 55 help='Path of docker images in Google Container Registry') 56argp.add_argument( 57 '--release', 58 default='all', 59 choices=['all', 'master'] + _RELEASES, 60 help='Release tags to test. When testing all ' 61 'releases defined in client_matrix.py, use "all".') 62argp.add_argument( 63 '-l', 64 '--language', 65 choices=['all'] + sorted(_LANGUAGES), 66 nargs='+', 67 default=['all'], 68 help='Languages to test') 69argp.add_argument( 70 '--keep', 71 action='store_true', 72 help='keep the created local images after finishing the tests.') 73argp.add_argument( 74 '--report_file', default='report.xml', help='The result file to create.') 75argp.add_argument( 76 '--allow_flakes', 77 default=False, 78 action='store_const', 79 const=True, 80 help=('Allow flaky tests to show as passing (re-runs failed ' 81 'tests up to five times)')) 82argp.add_argument( 83 '--bq_result_table', 84 default='', 85 type=str, 86 nargs='?', 87 help='Upload test results to a specified BQ table.') 88argp.add_argument( 89 '--server_host', 90 default='74.125.206.210', 91 type=str, 92 nargs='?', 93 help='The gateway to backend services.') 94 95args = argp.parse_args() 96 97print(str(args)) 98 99 100def find_all_images_for_lang(lang): 101 """Find docker images for a language across releases and runtimes. 102 103 Returns dictionary of list of (<tag>, <image-full-path>) keyed by runtime. 104 """ 105 # Find all defined releases. 106 if args.release == 'all': 107 releases = ['master'] + client_matrix.get_release_tags(lang) 108 else: 109 # Look for a particular release. 110 if args.release not in ['master' 111 ] + client_matrix.get_release_tags(lang): 112 jobset.message( 113 'SKIPPED', 114 '%s for %s is not defined' % (args.release, lang), 115 do_newline=True) 116 return {} 117 releases = [args.release] 118 119 # TODO(jtattermusch): why do we need to query the existing images/tags? 120 # From LANG_RUNTIME_MATRIX and LANG_RELEASE_MATRIX it should be obvious 121 # which tags we want to test - and it should be an error if they are 122 # missing. 123 # Images tuples keyed by runtime. 124 images = {} 125 for runtime in client_matrix.LANG_RUNTIME_MATRIX[lang]: 126 image_path = '%s/grpc_interop_%s' % (args.gcr_path, runtime) 127 output = subprocess.check_output([ 128 'gcloud', 'beta', 'container', 'images', 'list-tags', 129 '--format=json', image_path 130 ]) 131 docker_image_list = json.loads(output) 132 # All images should have a single tag or no tag. 133 # TODO(adelez): Remove tagless images. 134 tags = [i['tags'][0] for i in docker_image_list if i['tags']] 135 jobset.message( 136 'START', 137 'Found images for %s: %s' % (image_path, tags), 138 do_newline=True) 139 skipped = len(docker_image_list) - len(tags) 140 jobset.message( 141 'SKIPPED', 142 'Skipped images (no-tag/unknown-tag): %d' % skipped, 143 do_newline=True) 144 # Filter tags based on the releases. 145 images[runtime] = [(tag, '%s:%s' % (image_path, tag)) 146 for tag in tags 147 if tag in releases] 148 return images 149 150 151# caches test cases (list of JobSpec) loaded from file. Keyed by lang and runtime. 152def find_test_cases(lang, runtime, release, suite_name): 153 """Returns the list of test cases from testcase files per lang/release.""" 154 testcase_dir = os.path.join(os.path.dirname(__file__), 'testcases') 155 filename_prefix = lang 156 if lang == 'csharp': 157 filename_prefix = runtime 158 # Check to see if we need to use a particular version of test cases. 159 lang_version = '%s_%s' % (filename_prefix, release) 160 if lang_version in client_matrix.TESTCASES_VERSION_MATRIX: 161 testcases = os.path.join( 162 testcase_dir, client_matrix.TESTCASES_VERSION_MATRIX[lang_version]) 163 else: 164 testcases = os.path.join(testcase_dir, '%s__master' % filename_prefix) 165 166 job_spec_list = [] 167 try: 168 with open(testcases) as f: 169 # Only line start with 'docker run' are test cases. 170 for line in f.readlines(): 171 if line.startswith('docker run'): 172 m = re.search('--test_case=(.*)"', line) 173 shortname = m.group(1) if m else 'unknown_test' 174 m = re.search( 175 '--server_host_override=(.*).sandbox.googleapis.com', 176 line) 177 server = m.group(1) if m else 'unknown_server' 178 179 # If server_host arg is not None, replace the original 180 # server_host with the one provided or append to the end of 181 # the command if server_host does not appear originally. 182 if args.server_host: 183 if line.find('--server_host=') > -1: 184 line = re.sub('--server_host=[^ ]*', 185 '--server_host=%s' % args.server_host, 186 line) 187 else: 188 line = '%s --server_host=%s"' % (line[:-1], 189 args.server_host) 190 print(line) 191 192 spec = jobset.JobSpec( 193 cmdline=line, 194 shortname='%s:%s:%s:%s' % (suite_name, lang, server, 195 shortname), 196 timeout_seconds=_TEST_TIMEOUT, 197 shell=True, 198 flake_retries=5 if args.allow_flakes else 0) 199 job_spec_list.append(spec) 200 jobset.message( 201 'START', 202 'Loaded %s tests from %s' % (len(job_spec_list), testcases), 203 do_newline=True) 204 except IOError as err: 205 jobset.message('FAILED', err, do_newline=True) 206 return job_spec_list 207 208 209_xml_report_tree = report_utils.new_junit_xml_tree() 210 211 212def run_tests_for_lang(lang, runtime, images): 213 """Find and run all test cases for a language. 214 215 images is a list of (<release-tag>, <image-full-path>) tuple. 216 """ 217 total_num_failures = 0 218 for image_tuple in images: 219 release, image = image_tuple 220 jobset.message('START', 'Testing %s' % image, do_newline=True) 221 # Download the docker image before running each test case. 222 subprocess.check_call(['gcloud', 'docker', '--', 'pull', image]) 223 suite_name = '%s__%s_%s' % (lang, runtime, release) 224 job_spec_list = find_test_cases(lang, runtime, release, suite_name) 225 226 if not job_spec_list: 227 jobset.message( 228 'FAILED', 'No test cases were found.', do_newline=True) 229 return 1 230 231 num_failures, resultset = jobset.run( 232 job_spec_list, 233 newline_on_success=True, 234 add_env={'docker_image': image}, 235 maxjobs=args.jobs) 236 if args.bq_result_table and resultset: 237 upload_test_results.upload_interop_results_to_bq( 238 resultset, args.bq_result_table) 239 if num_failures: 240 jobset.message('FAILED', 'Some tests failed', do_newline=True) 241 total_num_failures += num_failures 242 else: 243 jobset.message('SUCCESS', 'All tests passed', do_newline=True) 244 245 report_utils.append_junit_xml_results(_xml_report_tree, resultset, 246 'grpc_interop_matrix', suite_name, 247 str(uuid.uuid4())) 248 249 if not args.keep: 250 cleanup(image) 251 252 return total_num_failures 253 254 255def cleanup(image): 256 jobset.message('START', 'Cleanup docker image %s' % image, do_newline=True) 257 dockerjob.remove_image(image, skip_nonexistent=True) 258 259 260languages = args.language if args.language != ['all'] else _LANGUAGES 261total_num_failures = 0 262for lang in languages: 263 docker_images = find_all_images_for_lang(lang) 264 for runtime in sorted(docker_images.keys()): 265 total_num_failures += run_tests_for_lang(lang, runtime, 266 docker_images[runtime]) 267 268report_utils.create_xml_report_file(_xml_report_tree, args.report_file) 269 270if total_num_failures: 271 sys.exit(1) 272sys.exit(0) 273