18 from __future__
import print_function
28 sys.path.append(os.path.dirname(os.path.abspath(__file__)))
29 import massage_qps_stats
31 gcp_utils_dir = os.path.abspath(
32 os.path.join(os.path.dirname(__file__),
'../../gcp/utils'))
33 sys.path.append(gcp_utils_dir)
34 import big_query_utils
36 _PROJECT_ID =
'grpc-testing'
40 with open(result_file,
'r')
as f:
41 (col1, col2, col3) = f.read().
split(
',')
42 latency50 = float(col1.strip()) * 1000
43 latency90 = float(col2.strip()) * 1000
44 latency99 = float(col3.strip()) * 1000
48 'name':
'netperf_tcp_rr'
51 'latency50': latency50,
52 'latency90': latency90,
53 'latency99': latency99
61 bq, dataset_id, table_id, scenario_result, flatten=
False):
62 print(
'Error uploading result to bigquery.')
67 metadata_file, node_info_file,
68 prometheus_query_results_file):
69 with open(result_file,
'r')
as f:
70 scenario_result = json.loads(f.read())
76 metadata_file, node_info_file,
77 prometheus_query_results_file):
78 print(
'Error uploading result to bigquery.')
97 prometheus_query_results_file,
104 prometheus_query_results_file)
111 with open(os.path.dirname(__file__) +
'/scenario_result_schema.json',
113 table_schema = json.loads(f.read())
114 desc =
'Results of performance benchmarks.'
120 """Bigquery is not really great for handling deeply nested data
121 and repeated fields. To maintain values of some fields while keeping
122 the schema relatively simple, we artificially leave some of the fields
125 scenario_result[
'scenario'][
'clientConfig'] = json.dumps(
126 scenario_result[
'scenario'][
'clientConfig'])
127 scenario_result[
'scenario'][
'serverConfig'] = json.dumps(
128 scenario_result[
'scenario'][
'serverConfig'])
129 scenario_result[
'latencies'] = json.dumps(scenario_result[
'latencies'])
130 scenario_result[
'serverCpuStats'] = []
131 for stats
in scenario_result[
'serverStats']:
132 scenario_result[
'serverCpuStats'].append(dict())
133 scenario_result[
'serverCpuStats'][-1][
'totalCpuTime'] = stats.pop(
134 'totalCpuTime',
None)
135 scenario_result[
'serverCpuStats'][-1][
'idleCpuTime'] = stats.pop(
137 for stats
in scenario_result[
'clientStats']:
138 stats[
'latencies'] = json.dumps(stats[
'latencies'])
139 stats.pop(
'requestResults',
None)
140 scenario_result[
'serverCores'] = json.dumps(scenario_result[
'serverCores'])
141 scenario_result[
'clientSuccess'] = json.dumps(
142 scenario_result[
'clientSuccess'])
143 scenario_result[
'serverSuccess'] = json.dumps(
144 scenario_result[
'serverSuccess'])
145 scenario_result[
'requestResults'] = json.dumps(
146 scenario_result.get(
'requestResults', []))
147 scenario_result[
'serverCpuUsage'] = scenario_result[
'summary'].pop(
148 'serverCpuUsage',
None)
149 scenario_result[
'summary'].pop(
'successfulRequestsPerSecond',
None)
150 scenario_result[
'summary'].pop(
'failedRequestsPerSecond',
None)
151 massage_qps_stats.massage_qps_stats(scenario_result)
155 """Populates metadata based on environment variables set by Jenkins."""
159 build_number = os.getenv(
'KOKORO_BUILD_NUMBER')
160 build_url =
'https://source.cloud.google.com/results/invocations/%s' % os.getenv(
162 job_name = os.getenv(
'KOKORO_JOB_NAME')
163 git_commit = os.getenv(
'KOKORO_GIT_COMMIT')
166 git_actual_commit = os.getenv(
'ghprbActualCommit')
168 utc_timestamp =
str(calendar.timegm(time.gmtime()))
169 metadata = {
'created': utc_timestamp}
172 metadata[
'buildNumber'] = build_number
174 metadata[
'buildUrl'] = build_url
176 metadata[
'jobName'] = job_name
178 metadata[
'gitCommit'] = git_commit
179 if git_actual_commit:
180 metadata[
'gitActualCommit'] = git_actual_commit
182 scenario_result[
'metadata'] = metadata
186 utc_timestamp =
str(calendar.timegm(time.gmtime()))
187 metadata = {
'created': utc_timestamp}
189 _annotation_to_bq_metadata_key_map = {
190 'ci_' + key: key
for key
in (
199 if os.access(test_metadata_file, os.R_OK):
200 with open(test_metadata_file,
'r')
as f:
201 test_metadata = json.loads(f.read())
204 if 'managedFields' in test_metadata:
205 del test_metadata[
'managedFields']
207 annotations = test_metadata.get(
'annotations', {})
212 if 'kubectl.kubernetes.io/last-applied-configuration' in annotations:
213 del annotations[
'kubectl.kubernetes.io/last-applied-configuration']
216 scenario_result[
'testMetadata'] = json.dumps(test_metadata)
217 for key, value
in _annotation_to_bq_metadata_key_map.items():
218 if key
in annotations:
219 metadata[value] = annotations[key]
221 scenario_result[
'metadata'] = metadata
225 node_metadata = {
'driver': {},
'servers': [],
'clients': []}
226 _node_info_to_bq_node_metadata_key_map = {
229 'NodeName':
'nodeName',
232 if os.access(node_info_file, os.R_OK):
233 with open(node_info_file,
'r')
as f:
234 file_metadata = json.loads(f.read())
235 for key, value
in _node_info_to_bq_node_metadata_key_map.items():
236 node_metadata[
'driver'][value] = file_metadata[
'Driver'][key]
237 for clientNodeInfo
in file_metadata[
'Clients']:
238 node_metadata[
'clients'].append({
239 value: clientNodeInfo[key]
for key, value
in
240 _node_info_to_bq_node_metadata_key_map.items()
242 for serverNodeInfo
in file_metadata[
'Servers']:
243 node_metadata[
'servers'].append({
244 value: serverNodeInfo[key]
for key, value
in
245 _node_info_to_bq_node_metadata_key_map.items()
248 scenario_result[
'nodeMetadata'] = node_metadata
252 prometheus_query_result_file):
253 """Populate the results from Prometheus query to Bigquery table """
254 if os.access(prometheus_query_result_file, os.R_OK):
255 with open(prometheus_query_result_file,
'r', encoding=
'utf8')
as f:
256 file_query_results = json.loads(f.read())
258 scenario_result[
'testDurationSeconds'] = file_query_results[
259 'testDurationSeconds']
260 clientsPrometheusData = []
261 if 'clients' in file_query_results:
262 for client_name, client_data
in file_query_results[
264 clientPrometheusData = {
'name': client_name}
265 containersPrometheusData = []
266 for container_name, container_data
in client_data.items():
267 containerPrometheusData = {
268 'name': container_name,
269 'cpuSeconds': container_data[
'cpuSeconds'],
270 'memoryMean': container_data[
'memoryMean'],
272 containersPrometheusData.append(containerPrometheusData)
273 clientPrometheusData[
274 'containers'] = containersPrometheusData
275 clientsPrometheusData.append(clientPrometheusData)
276 scenario_result[
'clientsPrometheusData'] = clientsPrometheusData
278 serversPrometheusData = []
279 if 'servers' in file_query_results:
280 for server_name, server_data
in file_query_results[
282 serverPrometheusData = {
'name': server_name}
283 containersPrometheusData = []
284 for container_name, container_data
in server_data.items():
285 containerPrometheusData = {
286 'name': container_name,
287 'cpuSeconds': container_data[
'cpuSeconds'],
288 'memoryMean': container_data[
'memoryMean'],
290 containersPrometheusData.append(containerPrometheusData)
291 serverPrometheusData[
292 'containers'] = containersPrometheusData
293 serversPrometheusData.append(serverPrometheusData)
294 scenario_result[
'serversPrometheusData'] = serversPrometheusData
297 argp = argparse.ArgumentParser(description=
'Upload result to big query.')
298 argp.add_argument(
'--bq_result_table',
302 help=
'Bigquery "dataset.table" to upload results to.')
303 argp.add_argument(
'--file_to_upload',
304 default=
'scenario_result.json',
306 help=
'Report file to upload.')
307 argp.add_argument(
'--metadata_file_to_upload',
308 default=
'metadata.json',
310 help=
'Metadata file to upload.')
311 argp.add_argument(
'--node_info_file_to_upload',
312 default=
'node_info.json',
314 help=
'Node information file to upload.')
315 argp.add_argument(
'--prometheus_query_results_to_upload',
316 default=
'prometheus_query_result.json',
318 help=
'Prometheus query result file to upload.')
319 argp.add_argument(
'--file_format',
320 choices=[
'scenario_result',
'netperf_latency_csv'],
321 default=
'scenario_result',
322 help=
'Format of the file to upload.')
324 args = argp.parse_args()
326 dataset_id, table_id = args.bq_result_table.split(
'.', 2)
328 if args.file_format ==
'netperf_latency_csv':
334 args.metadata_file_to_upload,
335 args.node_info_file_to_upload,
336 args.prometheus_query_results_to_upload)
337 print(
'Successfully uploaded %s, %s, %s and %s to BigQuery.\n' %
338 (args.file_to_upload, args.metadata_file_to_upload,
339 args.node_info_file_to_upload, args.prometheus_query_results_to_upload))