• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# Copyright 2017 gRPC authors.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#     http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15"""Helper to upload Jenkins test results to BQ"""
16
17from __future__ import print_function
18
19import os
20import six
21import sys
22import time
23import uuid
24
25gcp_utils_dir = os.path.abspath(
26    os.path.join(os.path.dirname(__file__), '../../gcp/utils'))
27sys.path.append(gcp_utils_dir)
28import big_query_utils
29
30_DATASET_ID = 'jenkins_test_results'
31_DESCRIPTION = 'Test results from master job run on Jenkins'
32# 365 days in milliseconds
33_EXPIRATION_MS = 365 * 24 * 60 * 60 * 1000
34_PARTITION_TYPE = 'DAY'
35_PROJECT_ID = 'grpc-testing'
36_RESULTS_SCHEMA = [
37    ('job_name', 'STRING', 'Name of Jenkins job'),
38    ('build_id', 'INTEGER', 'Build ID of Jenkins job'),
39    ('build_url', 'STRING', 'URL of Jenkins job'),
40    ('test_name', 'STRING', 'Individual test name'),
41    ('language', 'STRING', 'Language of test'),
42    ('platform', 'STRING', 'Platform used for test'),
43    ('config', 'STRING', 'Config used for test'),
44    ('compiler', 'STRING', 'Compiler used for test'),
45    ('iomgr_platform', 'STRING', 'Iomgr used for test'),
46    ('result', 'STRING', 'Test result: PASSED, TIMEOUT, FAILED, or SKIPPED'),
47    ('timestamp', 'TIMESTAMP', 'Timestamp of test run'),
48    ('elapsed_time', 'FLOAT', 'How long test took to run'),
49    ('cpu_estimated', 'FLOAT', 'Estimated CPU usage of test'),
50    ('cpu_measured', 'FLOAT', 'Actual CPU usage of test'),
51    ('return_code', 'INTEGER', 'Exit code of test'),
52]
53_INTEROP_RESULTS_SCHEMA = [
54    ('job_name', 'STRING', 'Name of Jenkins/Kokoro job'),
55    ('build_id', 'INTEGER', 'Build ID of Jenkins/Kokoro job'),
56    ('build_url', 'STRING', 'URL of Jenkins/Kokoro job'),
57    ('test_name', 'STRING',
58     'Unique test name combining client, server, and test_name'),
59    ('suite', 'STRING',
60     'Test suite: cloud_to_cloud, cloud_to_prod, or cloud_to_prod_auth'),
61    ('client', 'STRING', 'Client language'),
62    ('server', 'STRING', 'Server host name'),
63    ('test_case', 'STRING', 'Name of test case'),
64    ('result', 'STRING', 'Test result: PASSED, TIMEOUT, FAILED, or SKIPPED'),
65    ('timestamp', 'TIMESTAMP', 'Timestamp of test run'),
66    ('elapsed_time', 'FLOAT', 'How long test took to run'),
67]
68
69
70def _get_build_metadata(test_results):
71    """Add Kokoro build metadata to test_results based on environment
72  variables set by Kokoro.
73  """
74    build_id = os.getenv('KOKORO_BUILD_NUMBER')
75    build_url = 'https://source.cloud.google.com/results/invocations/%s' % os.getenv(
76        'KOKORO_BUILD_ID')
77    job_name = os.getenv('KOKORO_JOB_NAME')
78
79    if build_id:
80        test_results['build_id'] = build_id
81    if build_url:
82        test_results['build_url'] = build_url
83    if job_name:
84        test_results['job_name'] = job_name
85
86
87def _insert_rows_with_retries(bq, bq_table, bq_rows):
88    """Insert rows to bq table. Retry on error."""
89    # BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time.
90    for i in range((len(bq_rows) / 1000) + 1):
91        max_retries = 3
92        for attempt in range(max_retries):
93            if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID,
94                                           bq_table,
95                                           bq_rows[i * 1000:(i + 1) * 1000]):
96                break
97            else:
98                if attempt < max_retries - 1:
99                    print('Error uploading result to bigquery, will retry.')
100                else:
101                    print(
102                        'Error uploading result to bigquery, all attempts failed.'
103                    )
104                    sys.exit(1)
105
106
107def upload_results_to_bq(resultset, bq_table, extra_fields):
108    """Upload test results to a BQ table.
109
110  Args:
111      resultset: dictionary generated by jobset.run
112      bq_table: string name of table to create/upload results to in BQ
113      extra_fields: dict with extra values that will be uploaded along with the results
114  """
115    bq = big_query_utils.create_big_query()
116    big_query_utils.create_partitioned_table(bq,
117                                             _PROJECT_ID,
118                                             _DATASET_ID,
119                                             bq_table,
120                                             _RESULTS_SCHEMA,
121                                             _DESCRIPTION,
122                                             partition_type=_PARTITION_TYPE,
123                                             expiration_ms=_EXPIRATION_MS)
124
125    bq_rows = []
126    for shortname, results in six.iteritems(resultset):
127        for result in results:
128            test_results = {}
129            _get_build_metadata(test_results)
130            test_results['cpu_estimated'] = result.cpu_estimated
131            test_results['cpu_measured'] = result.cpu_measured
132            test_results['elapsed_time'] = '%.2f' % result.elapsed_time
133            test_results['result'] = result.state
134            test_results['return_code'] = result.returncode
135            test_results['test_name'] = shortname
136            test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')
137            for field_name, field_value in six.iteritems(extra_fields):
138                test_results[field_name] = field_value
139            row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
140            bq_rows.append(row)
141    _insert_rows_with_retries(bq, bq_table, bq_rows)
142
143
144def upload_interop_results_to_bq(resultset, bq_table):
145    """Upload interop test results to a BQ table.
146
147  Args:
148      resultset: dictionary generated by jobset.run
149      bq_table: string name of table to create/upload results to in BQ
150  """
151    bq = big_query_utils.create_big_query()
152    big_query_utils.create_partitioned_table(bq,
153                                             _PROJECT_ID,
154                                             _DATASET_ID,
155                                             bq_table,
156                                             _INTEROP_RESULTS_SCHEMA,
157                                             _DESCRIPTION,
158                                             partition_type=_PARTITION_TYPE,
159                                             expiration_ms=_EXPIRATION_MS)
160
161    bq_rows = []
162    for shortname, results in six.iteritems(resultset):
163        for result in results:
164            test_results = {}
165            _get_build_metadata(test_results)
166            test_results['elapsed_time'] = '%.2f' % result.elapsed_time
167            test_results['result'] = result.state
168            test_results['test_name'] = shortname
169            test_results['suite'] = shortname.split(':')[0]
170            test_results['client'] = shortname.split(':')[1]
171            test_results['server'] = shortname.split(':')[2]
172            test_results['test_case'] = shortname.split(':')[3]
173            test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')
174            row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
175            bq_rows.append(row)
176    _insert_rows_with_retries(bq, bq_table, bq_rows)
177