1 from __future__
import print_function
2 from __future__
import absolute_import
12 from util
import big_query_utils
13 from util
import result_parser
15 _PROJECT_ID =
'grpc-testing'
16 _DATASET =
'protobuf_benchmark_result'
17 _TABLE =
'opensource_result_v2'
18 _NOW =
"%d%02d%02d" % (datetime.datetime.now().year,
19 datetime.datetime.now().month,
20 datetime.datetime.now().day)
22 _INITIAL_TIME = calendar.timegm(time.gmtime())
25 build_number = os.getenv(
'BUILD_NUMBER')
26 build_url = os.getenv(
'BUILD_URL')
27 job_name = os.getenv(
'JOB_NAME')
28 git_commit = os.getenv(
'GIT_COMMIT')
30 git_actual_commit = os.getenv(
'ghprbActualCommit')
32 utc_timestamp =
str(calendar.timegm(time.gmtime()))
33 metadata = {
'created': utc_timestamp}
36 metadata[
'buildNumber'] = build_number
38 metadata[
'buildUrl'] = build_url
40 metadata[
'jobName'] = job_name
42 metadata[
'gitCommit'] = git_commit
44 metadata[
'gitActualCommit'] = git_actual_commit
50 for result
in result_list:
52 new_result[
"metric"] =
"throughput"
53 new_result[
"value"] = result[
"throughput"]
54 new_result[
"unit"] =
"MB/s"
55 new_result[
"test"] =
"protobuf_benchmark"
56 new_result[
"product_name"] =
"protobuf"
59 labels_string +=
",|%s:%s|" % (key, result[key])
60 new_result[
"labels"] = labels_string[1:]
61 new_result[
"timestamp"] = _INITIAL_TIME
64 bq = big_query_utils.create_big_query()
65 row = big_query_utils.make_row(
str(uuid.uuid4()), new_result)
66 if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET,
69 print(
'Error when uploading result', new_result)
72 if __name__ ==
"__main__":
73 parser = argparse.ArgumentParser()
74 parser.add_argument(
"-cpp",
"--cpp_input_file",
75 help=
"The CPP benchmark result file's name",
77 parser.add_argument(
"-java",
"--java_input_file",
78 help=
"The Java benchmark result file's name",
80 parser.add_argument(
"-python",
"--python_input_file",
81 help=
"The Python benchmark result file's name",
83 parser.add_argument(
"-go",
"--go_input_file",
84 help=
"The golang benchmark result file's name",
86 parser.add_argument(
"-node",
"--node_input_file",
87 help=
"The node.js benchmark result file's name",
89 parser.add_argument(
"-php",
"--php_input_file",
90 help=
"The pure php benchmark result file's name",
92 parser.add_argument(
"-php_c",
"--php_c_input_file",
93 help=
"The php with c ext benchmark result file's name",
95 args = parser.parse_args()
98 print(
"uploading results...")
100 cpp_file=args.cpp_input_file,
101 java_file=args.java_input_file,
102 python_file=args.python_input_file,
103 go_file=args.go_input_file,
104 node_file=args.node_input_file,
105 php_file=args.php_input_file,
106 php_c_file=args.php_c_input_file,