15 """Uploads RBE results to BigQuery"""
27 gcp_utils_dir = os.path.abspath(
28 os.path.join(os.path.dirname(__file__),
'../../gcp/utils'))
29 sys.path.append(gcp_utils_dir)
30 import big_query_utils
32 _DATASET_ID =
'jenkins_test_results'
33 _DESCRIPTION =
'Test results from master RBE builds on Kokoro'
35 _EXPIRATION_MS = 365 * 24 * 60 * 60 * 1000
36 _PARTITION_TYPE =
'DAY'
37 _PROJECT_ID =
'grpc-testing'
39 (
'job_name',
'STRING',
'Name of Kokoro job'),
40 (
'build_id',
'INTEGER',
'Build ID of Kokoro job'),
41 (
'build_url',
'STRING',
'URL of Kokoro build'),
42 (
'test_target',
'STRING',
'Bazel target path'),
43 (
'test_class_name',
'STRING',
'Name of test class'),
44 (
'test_case',
'STRING',
'Name of test case'),
45 (
'result',
'STRING',
'Test or build result'),
46 (
'timestamp',
'TIMESTAMP',
'Timestamp of test run'),
47 (
'duration',
'FLOAT',
'Duration of the test run'),
49 _TABLE_ID =
'rbe_test_results'
53 """Returns string with API key to access ResultStore.
54 Intended to be used in Kokoro environment."""
55 api_key_directory = os.getenv(
'KOKORO_GFILE_DIR')
56 api_key_file = os.path.join(api_key_directory,
'resultstore_api_key')
57 assert os.path.isfile(api_key_file),
'Must add --api_key arg if not on ' \
58 'Kokoro or Kokoro environment is not set up properly.'
59 with open(api_key_file,
'r')
as f:
60 return f.read().replace(
'\n',
'')
64 """Returns String of Bazel invocation ID. Intended to be used in
65 Kokoro environment."""
66 bazel_id_directory = os.getenv(
'KOKORO_ARTIFACTS_DIR')
67 bazel_id_file = os.path.join(bazel_id_directory,
'bazel_invocation_ids')
68 assert os.path.isfile(bazel_id_file),
'bazel_invocation_ids file, written ' \
69 'by RBE initialization script, expected but not found.'
70 with open(bazel_id_file,
'r')
as f:
71 return f.read().replace(
'\n',
'')
75 """Parse test duration string in '123.567s' format"""
77 if duration_str.endswith(
's'):
78 duration_str = duration_str[:-1]
79 return float(duration_str)
85 """Upload test results to a BQ table.
88 rows: A list of dictionaries containing data for each row to insert
97 partition_type=_PARTITION_TYPE,
98 expiration_ms=_EXPIRATION_MS)
101 for attempt
in range(max_retries):
106 if attempt < max_retries - 1:
107 print(
'Error uploading result to bigquery, will retry.')
110 'Error uploading result to bigquery, all attempts failed.')
115 """Returns dictionary of test results by querying ResultStore API.
117 api_key: String of ResultStore API key
118 invocation_id: String of ResultStore invocation ID to results from
126 req = urllib.request.Request(
128 'https://resultstore.googleapis.com/v2/invocations/%s/targets/-/configuredTargets/-/actions?key=%s&pageToken=%s&fields=next_page_token,actions.id,actions.status_attributes,actions.timing,actions.test_action'
129 % (invocation_id, api_key, page_token),
130 headers={
'Content-Type':
'application/json'})
132 if os.getenv(
"PYTHONHTTPSVERIFY") ==
"0":
133 ctx = ssl.create_default_context()
134 ctx.check_hostname =
False
135 ctx.verify_mode = ssl.CERT_NONE
136 ctx_dict = {
"context": ctx}
137 raw_resp = urllib.request.urlopen(req, **ctx_dict).
read()
138 decoded_resp = raw_resp
if isinstance(
139 raw_resp, str)
else raw_resp.decode(
'utf-8',
'ignore')
140 results = json.loads(decoded_resp)
141 all_actions.extend(results[
'actions'])
142 if 'nextPageToken' not in results:
144 page_token = results[
'nextPageToken']
148 if __name__ ==
"__main__":
150 argp = argparse.ArgumentParser(
152 'Fetches results for given RBE invocation and uploads them to BigQuery table.'
154 argp.add_argument(
'--api_key',
157 help=
'The API key to read from ResultStore API')
158 argp.add_argument(
'--invocation_id',
161 help=
'UUID of bazel invocation to fetch.')
162 argp.add_argument(
'--bq_dump_file',
165 help=
'Dump JSON data to file just before uploading')
166 argp.add_argument(
'--resultstore_dump_file',
169 help=
'Dump JSON data as received from ResultStore API')
170 argp.add_argument(
'--skip_upload',
172 action=
'store_const',
174 help=
'Skip uploading to bigquery')
175 args = argp.parse_args()
181 if args.resultstore_dump_file:
182 with open(args.resultstore_dump_file,
'w')
as f:
183 json.dump(resultstore_actions, f, indent=4, sort_keys=
True)
185 (
'Dumped resultstore data to file %s' % args.resultstore_dump_file))
190 for index, action
in enumerate(resultstore_actions):
192 if 'testAction' not in action:
196 if 'fileProcessingErrors' in action:
199 'caseName':
str(action[
'id'][
'actionId']),
204 elif action[
'statusAttributes'][
'status'] ==
'TIMED_OUT':
207 'caseName':
str(action[
'id'][
'actionId']),
215 elif action[
'statusAttributes'][
'status'] ==
'UNKNOWN':
218 'caseName':
str(action[
'id'][
'actionId']),
226 resultstore_actions[index - 1][
'timing'][
'startTime']
228 elif 'testSuite' not in action[
'testAction']:
230 elif 'tests' not in action[
'testAction'][
'testSuite']:
234 for tests_item
in action[
'testAction'][
'testSuite'][
'tests']:
235 test_cases += tests_item[
'testSuite'][
'tests']
236 for test_case
in test_cases:
237 if any(s
in test_case[
'testCase']
for s
in [
'errors',
'failures']):
239 elif 'timedOut' in test_case[
'testCase']:
241 elif 'unknown' in test_case[
'testCase']:
247 'insertId':
str(uuid.uuid4()),
250 os.getenv(
'KOKORO_JOB_NAME'),
252 os.getenv(
'KOKORO_BUILD_NUMBER'),
254 'https://source.cloud.google.com/results/invocations/%s'
257 action[
'id'][
'targetId'],
259 test_case[
'testCase'].
get(
'className',
''),
261 test_case[
'testCase'][
'caseName'],
265 action[
'timing'][
'startTime'],
270 except Exception
as e:
271 print((
'Failed to parse test result. Error: %s' %
str(e)))
272 print((json.dumps(test_case, indent=4)))
274 'insertId':
str(uuid.uuid4()),
277 os.getenv(
'KOKORO_JOB_NAME'),
279 os.getenv(
'KOKORO_BUILD_NUMBER'),
281 'https://source.cloud.google.com/results/invocations/%s'
284 action[
'id'][
'targetId'],
296 if args.bq_dump_file:
297 with open(args.bq_dump_file,
'w')
as f:
298 json.dump(bq_rows, f, indent=4, sort_keys=
True)
299 print((
'Dumped BQ data to file %s' % args.bq_dump_file))
301 if not args.skip_upload:
304 for i
in range(0,
len(bq_rows), MAX_ROWS):
307 print(
'Skipped upload to bigquery.')