upload_test_results.py
Go to the documentation of this file.
1 #!/usr/bin/env python3
2 # Copyright 2017 gRPC authors.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15 """Helper to upload Jenkins test results to BQ"""
16 
17 from __future__ import print_function
18 
19 import os
20 import sys
21 import time
22 import uuid
23 
24 import six
25 
26 gcp_utils_dir = os.path.abspath(
27  os.path.join(os.path.dirname(__file__), '../../gcp/utils'))
28 sys.path.append(gcp_utils_dir)
29 import big_query_utils
30 
31 _DATASET_ID = 'jenkins_test_results'
32 _DESCRIPTION = 'Test results from master job run on Jenkins'
33 # 365 days in milliseconds
34 _EXPIRATION_MS = 365 * 24 * 60 * 60 * 1000
35 _PARTITION_TYPE = 'DAY'
36 _PROJECT_ID = 'grpc-testing'
37 _RESULTS_SCHEMA = [
38  ('job_name', 'STRING', 'Name of Jenkins job'),
39  ('build_id', 'INTEGER', 'Build ID of Jenkins job'),
40  ('build_url', 'STRING', 'URL of Jenkins job'),
41  ('test_name', 'STRING', 'Individual test name'),
42  ('language', 'STRING', 'Language of test'),
43  ('platform', 'STRING', 'Platform used for test'),
44  ('config', 'STRING', 'Config used for test'),
45  ('compiler', 'STRING', 'Compiler used for test'),
46  ('iomgr_platform', 'STRING', 'Iomgr used for test'),
47  ('result', 'STRING', 'Test result: PASSED, TIMEOUT, FAILED, or SKIPPED'),
48  ('timestamp', 'TIMESTAMP', 'Timestamp of test run'),
49  ('elapsed_time', 'FLOAT', 'How long test took to run'),
50  ('cpu_estimated', 'FLOAT', 'Estimated CPU usage of test'),
51  ('cpu_measured', 'FLOAT', 'Actual CPU usage of test'),
52  ('return_code', 'INTEGER', 'Exit code of test'),
53 ]
54 _INTEROP_RESULTS_SCHEMA = [
55  ('job_name', 'STRING', 'Name of Jenkins/Kokoro job'),
56  ('build_id', 'INTEGER', 'Build ID of Jenkins/Kokoro job'),
57  ('build_url', 'STRING', 'URL of Jenkins/Kokoro job'),
58  ('test_name', 'STRING',
59  'Unique test name combining client, server, and test_name'),
60  ('suite', 'STRING',
61  'Test suite: cloud_to_cloud, cloud_to_prod, or cloud_to_prod_auth'),
62  ('client', 'STRING', 'Client language'),
63  ('server', 'STRING', 'Server host name'),
64  ('test_case', 'STRING', 'Name of test case'),
65  ('result', 'STRING', 'Test result: PASSED, TIMEOUT, FAILED, or SKIPPED'),
66  ('timestamp', 'TIMESTAMP', 'Timestamp of test run'),
67  ('elapsed_time', 'FLOAT', 'How long test took to run'),
68 ]
69 
70 
71 def _get_build_metadata(test_results):
72  """Add Kokoro build metadata to test_results based on environment
73  variables set by Kokoro.
74  """
75  build_id = os.getenv('KOKORO_BUILD_NUMBER')
76  build_url = 'https://source.cloud.google.com/results/invocations/%s' % os.getenv(
77  'KOKORO_BUILD_ID')
78  job_name = os.getenv('KOKORO_JOB_NAME')
79 
80  if build_id:
81  test_results['build_id'] = build_id
82  if build_url:
83  test_results['build_url'] = build_url
84  if job_name:
85  test_results['job_name'] = job_name
86 
87 
88 def _insert_rows_with_retries(bq, bq_table, bq_rows):
89  """Insert rows to bq table. Retry on error."""
90  # BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time.
91  for i in range((len(bq_rows) // 1000) + 1):
92  max_retries = 3
93  for attempt in range(max_retries):
94  if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID,
95  bq_table,
96  bq_rows[i * 1000:(i + 1) * 1000]):
97  break
98  else:
99  if attempt < max_retries - 1:
100  print('Error uploading result to bigquery, will retry.')
101  else:
102  print(
103  'Error uploading result to bigquery, all attempts failed.'
104  )
105  sys.exit(1)
106 
107 
108 def upload_results_to_bq(resultset, bq_table, extra_fields):
109  """Upload test results to a BQ table.
110 
111  Args:
112  resultset: dictionary generated by jobset.run
113  bq_table: string name of table to create/upload results to in BQ
114  extra_fields: dict with extra values that will be uploaded along with the results
115  """
118  _PROJECT_ID,
119  _DATASET_ID,
120  bq_table,
121  _RESULTS_SCHEMA,
122  _DESCRIPTION,
123  partition_type=_PARTITION_TYPE,
124  expiration_ms=_EXPIRATION_MS)
125 
126  bq_rows = []
127  for shortname, results in six.iteritems(resultset):
128  for result in results:
129  test_results = {}
130  _get_build_metadata(test_results)
131  test_results['cpu_estimated'] = result.cpu_estimated
132  test_results['cpu_measured'] = result.cpu_measured
133  test_results['elapsed_time'] = '%.2f' % result.elapsed_time
134  test_results['result'] = result.state
135  test_results['return_code'] = result.returncode
136  test_results['test_name'] = shortname
137  test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')
138  for field_name, field_value in six.iteritems(extra_fields):
139  test_results[field_name] = field_value
140  row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
141  bq_rows.append(row)
142  _insert_rows_with_retries(bq, bq_table, bq_rows)
143 
144 
145 def upload_interop_results_to_bq(resultset, bq_table):
146  """Upload interop test results to a BQ table.
147 
148  Args:
149  resultset: dictionary generated by jobset.run
150  bq_table: string name of table to create/upload results to in BQ
151  """
154  _PROJECT_ID,
155  _DATASET_ID,
156  bq_table,
157  _INTEROP_RESULTS_SCHEMA,
158  _DESCRIPTION,
159  partition_type=_PARTITION_TYPE,
160  expiration_ms=_EXPIRATION_MS)
161 
162  bq_rows = []
163  for shortname, results in six.iteritems(resultset):
164  for result in results:
165  test_results = {}
166  _get_build_metadata(test_results)
167  test_results['elapsed_time'] = '%.2f' % result.elapsed_time
168  test_results['result'] = result.state
169  test_results['test_name'] = shortname
170  test_results['suite'] = shortname.split(':')[0]
171  test_results['client'] = shortname.split(':')[1]
172  test_results['server'] = shortname.split(':')[2]
173  test_results['test_case'] = shortname.split(':')[3]
174  test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')
175  row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
176  bq_rows.append(row)
177  _insert_rows_with_retries(bq, bq_table, bq_rows)
xds_interop_client.str
str
Definition: xds_interop_client.py:487
capstone.range
range
Definition: third_party/bloaty/third_party/capstone/bindings/python/capstone/__init__.py:6
python_utils.upload_test_results._insert_rows_with_retries
def _insert_rows_with_retries(bq, bq_table, bq_rows)
Definition: upload_test_results.py:88
big_query_utils.create_big_query
def create_big_query()
Definition: tools/gcp/utils/big_query_utils.py:31
big_query_utils.make_row
def make_row(unique_row_id, row_values_dict)
Definition: tools/gcp/utils/big_query_utils.py:201
python_utils.upload_test_results.upload_interop_results_to_bq
def upload_interop_results_to_bq(resultset, bq_table)
Definition: upload_test_results.py:145
len
int len
Definition: abseil-cpp/absl/base/internal/low_level_alloc_test.cc:46
python_utils.upload_test_results._get_build_metadata
def _get_build_metadata(test_results)
Definition: upload_test_results.py:71
big_query_utils.insert_rows
def insert_rows(big_query, project_id, dataset_id, table_id, rows_list)
Definition: tools/gcp/utils/big_query_utils.py:167
python_utils.upload_test_results.upload_results_to_bq
def upload_results_to_bq(resultset, bq_table, extra_fields)
Definition: upload_test_results.py:108
big_query_utils.create_partitioned_table
def create_partitioned_table(big_query, project_id, dataset_id, table_id, table_schema, description, partition_type='DAY', expiration_ms=_EXPIRATION_MS)
Definition: tools/gcp/utils/big_query_utils.py:76


grpc
Author(s):
autogenerated on Fri May 16 2025 03:00:48