19 #ifndef BENCHMARK_OS_WINDOWS
20 #include <sys/resource.h>
27 #include <condition_variable>
50 "Print a list of benchmarks. This option overrides all other "
54 "A regular expression that specifies the set of benchmarks "
55 "to execute. If this flag is empty, no benchmarks are run. "
56 "If this flag is the string \"all\", all benchmarks linked "
57 "into the process are run.");
60 "Minimum number of seconds we should run benchmark before "
61 "results are considered significant. For cpu-time based "
62 "tests, this is the lower bound on the total cpu time "
63 "used by all threads that make up the test. For real-time "
64 "based tests, this is the lower bound on the elapsed time "
65 "of the benchmark execution, regardless of number of "
69 "The number of runs of each benchmark. If greater than 1, the "
70 "mean and standard deviation of the runs will be reported.");
72 DEFINE_bool(benchmark_report_aggregates_only,
false,
73 "Report the result of each benchmark repetitions. When 'true' is "
74 "specified only the mean, standard deviation, and other statistics "
75 "are reported for repeated benchmarks.");
78 "The format to use for console output. Valid values are "
79 "'console', 'json', or 'csv'.");
82 "The format to use for file output. Valid values are "
83 "'console', 'json', or 'csv'.");
85 DEFINE_string(benchmark_out,
"",
"The file to write additonal output to");
88 "Whether to use colors in the output. Valid values: "
89 "'true'/'yes'/1, 'false'/'no'/0, and 'auto'. 'auto' means to use "
90 "colors if the output is being sent to a terminal and the TERM "
91 "environment variable is set to a terminal type that supports "
95 "Whether to use tabular format when printing user counters to "
96 "the console. Valid values: 'true'/'yes'/1, 'false'/'no'/0."
97 "Defaults to false.");
110 static const size_t kMaxIterations = 1000000000;
140 [
this]() { return alive_threads_ == 0; });
194 return real_time_used_;
200 return cpu_time_used_;
206 return manual_time_used_;
210 bool running_ =
false;
211 double start_real_time_ = 0;
212 double start_cpu_time_ = 0;
215 double real_time_used_ = 0;
216 double cpu_time_used_ = 0;
218 double manual_time_used_ = 0;
235 report.
iterations =
static_cast<int64_t
>(iters) *
b.threads;
239 double bytes_per_second = 0;
240 if (
results.bytes_processed > 0 && seconds > 0.0) {
241 bytes_per_second = (
results.bytes_processed / seconds);
243 double items_per_second = 0;
244 if (
results.items_processed > 0 && seconds > 0.0) {
245 items_per_second = (
results.items_processed / seconds);
248 if (
b.use_manual_time) {
268 size_t iters,
int thread_id,
271 State st(iters,
b->arg, thread_id,
b->threads, &timer, manager);
272 b->benchmark->Run(st);
273 CHECK(st.iterations() == st.max_iterations)
274 <<
"Benchmark returned before State::KeepRunning() returned false!";
281 results.bytes_processed += st.bytes_processed();
282 results.items_processed += st.items_processed();
283 results.complexity_n += st.complexity_length_n();
289 std::vector<BenchmarkReporter::Run> RunBenchmark(
291 std::vector<BenchmarkReporter::Run>* complexity_reports) {
292 std::vector<BenchmarkReporter::Run> reports;
294 const bool has_explicit_iteration_count =
b.iterations != 0;
295 size_t iters = has_explicit_iteration_count ?
b.iterations : 1;
296 std::unique_ptr<internal::ThreadManager> manager;
297 std::vector<std::thread>
pool(
b.threads - 1);
299 b.repetitions != 0 ?
b.repetitions : FLAGS_benchmark_repetitions;
300 const bool report_aggregates_only =
303 ? FLAGS_benchmark_report_aggregates_only
305 for (
int repetition_num = 0; repetition_num < repeats; repetition_num++) {
308 VLOG(2) <<
"Running " <<
b.name <<
" for " << iters <<
"\n";
310 manager.reset(
new internal::ThreadManager(
b.threads));
311 for (std::size_t ti = 0; ti <
pool.size(); ++ti) {
312 pool[ti] = std::thread(&RunInThread, &
b, iters,
313 static_cast<int>(ti + 1), manager.get());
315 RunInThread(&
b, iters, 0, manager.get());
316 manager->WaitForAllThreads();
317 for (std::thread& thread :
pool) thread.join();
318 internal::ThreadManager::Result
results;
320 MutexLock l(manager->GetBenchmarkMutex());
325 results.real_time_used /=
b.threads;
326 results.manual_time_used /=
b.threads;
328 VLOG(2) <<
"Ran in " <<
results.cpu_time_used <<
"/"
329 <<
results.real_time_used <<
"\n";
332 double seconds =
results.cpu_time_used;
333 if (
b.use_manual_time) {
334 seconds =
results.manual_time_used;
335 }
else if (
b.use_real_time) {
336 seconds =
results.real_time_used;
339 const double min_time =
340 !
IsZero(
b.min_time) ?
b.min_time : FLAGS_benchmark_min_time;
344 const bool should_report = repetition_num > 0
345 || has_explicit_iteration_count
347 || iters >= kMaxIterations
348 || seconds >= min_time
352 || ((
results.real_time_used >= 5 * min_time) && !
b.use_manual_time);
356 CreateRunReport(
b,
results, iters, seconds);
357 if (!report.error_occurred &&
b.complexity !=
oNone)
358 complexity_reports->push_back(report);
359 reports.push_back(report);
365 double multiplier = min_time * 1.4 / std::max(seconds, 1e-9);
371 bool is_significant = (seconds / min_time) > 0.1;
372 multiplier = is_significant ? multiplier : std::min(10.0, multiplier);
373 if (multiplier <= 1.0) multiplier = 2.0;
374 double next_iters = std::max(multiplier * iters, iters + 1.0);
375 if (next_iters > kMaxIterations) {
376 next_iters = kMaxIterations;
378 VLOG(3) <<
"Next iters: " << next_iters <<
", " << multiplier <<
"\n";
379 iters =
static_cast<int>(next_iters + 0.5);
384 if ((
b.complexity !=
oNone) &&
b.last_benchmark_instance) {
385 auto additional_run_stats =
ComputeBigO(*complexity_reports);
386 stat_reports.insert(stat_reports.end(), additional_run_stats.begin(),
387 additional_run_stats.end());
388 complexity_reports->clear();
391 if (report_aggregates_only) reports.clear();
392 reports.insert(reports.end(), stat_reports.begin(), stat_reports.end());
399 State::State(
size_t max_iters,
const std::vector<int>& ranges,
int thread_i,
404 total_iterations_(0),
409 error_occurred_(
false),
411 thread_index(thread_i),
413 max_iterations(max_iters),
436 if (
manager_->results.has_error_ ==
false) {
437 manager_->results.error_message_ = msg;
438 manager_->results.has_error_ =
true;
475 void RunBenchmarks(
const std::vector<Benchmark::Instance>&
benchmarks,
479 CHECK(console_reporter !=
nullptr);
482 bool has_repetitions = FLAGS_benchmark_repetitions > 1;
483 size_t name_field_width = 10;
486 std::max<size_t>(name_field_width,
benchmark.name.size());
487 has_repetitions |=
benchmark.repetitions > 1;
489 if (has_repetitions) name_field_width += std::strlen(
"_stddev");
492 BenchmarkReporter::Context context;
497 context.name_field_width = name_field_width;
500 std::vector<BenchmarkReporter::Run> complexity_reports;
504 auto flushStreams = [](BenchmarkReporter* reporter) {
505 if (!reporter)
return;
506 std::flush(reporter->GetOutputStream());
507 std::flush(reporter->GetErrorStream());
512 flushStreams(console_reporter);
513 flushStreams(file_reporter);
515 std::vector<BenchmarkReporter::Run> reports =
516 RunBenchmark(
benchmark, &complexity_reports);
518 if (file_reporter) file_reporter->
ReportRuns(reports);
519 flushStreams(console_reporter);
520 flushStreams(file_reporter);
524 if (file_reporter) file_reporter->
Finalize();
525 flushStreams(console_reporter);
526 flushStreams(file_reporter);
529 std::unique_ptr<BenchmarkReporter> CreateReporter(
531 typedef std::unique_ptr<BenchmarkReporter> PtrType;
532 if (
name ==
"console") {
533 return PtrType(
new ConsoleReporter(output_opts));
534 }
else if (
name ==
"json") {
535 return PtrType(
new JSONReporter);
536 }
else if (
name ==
"csv") {
537 return PtrType(
new CSVReporter);
539 std::cerr <<
"Unexpected format: '" <<
name <<
"'\n";
547 return std::abs(
n) < std::numeric_limits<double>::epsilon();
561 if(FLAGS_benchmark_counters_tabular) {
582 if (spec.empty() || spec ==
"all")
586 std::ofstream output_file;
587 std::unique_ptr<BenchmarkReporter> default_console_reporter;
588 std::unique_ptr<BenchmarkReporter> default_file_reporter;
589 if (!console_reporter) {
590 default_console_reporter = internal::CreateReporter(
592 console_reporter = default_console_reporter.get();
598 if (fname.empty() && file_reporter) {
599 Err <<
"A custom file reporter was provided but "
600 "--benchmark_out=<file> was not specified."
604 if (!fname.empty()) {
605 output_file.open(fname);
606 if (!output_file.is_open()) {
607 Err <<
"invalid file name: '" << fname << std::endl;
610 if (!file_reporter) {
611 default_file_reporter = internal::CreateReporter(
613 file_reporter = default_file_reporter.get();
619 std::vector<internal::Benchmark::Instance>
benchmarks;
623 Err <<
"Failed to match any benchmarks against regex: " << spec <<
"\n";
627 if (FLAGS_benchmark_list_tests) {
630 internal::RunBenchmarks(
benchmarks, console_reporter, file_reporter);
641 " [--benchmark_list_tests={true|false}]\n"
642 " [--benchmark_filter=<regex>]\n"
643 " [--benchmark_min_time=<min_time>]\n"
644 " [--benchmark_repetitions=<num_repetitions>]\n"
645 " [--benchmark_report_aggregates_only={true|false}\n"
646 " [--benchmark_format=<console|json|csv>]\n"
647 " [--benchmark_out=<filename>]\n"
648 " [--benchmark_out_format=<json|console|csv>]\n"
649 " [--benchmark_color={auto|true|false}]\n"
650 " [--benchmark_counters_tabular={true|false}]\n"
651 " [--v=<verbosity>]\n");
657 for (
int i = 1;
i < *argc; ++
i) {
659 &FLAGS_benchmark_list_tests) ||
662 &FLAGS_benchmark_min_time) ||
664 &FLAGS_benchmark_repetitions) ||
666 &FLAGS_benchmark_report_aggregates_only) ||
670 &FLAGS_benchmark_out_format) ||
676 &FLAGS_benchmark_counters_tabular) ||
678 for (
int j =
i; j != *argc - 1; ++j) argv[j] = argv[j + 1];
682 }
else if (
IsFlag(argv[
i],
"help")) {
686 for (
auto const* flag :
687 {&FLAGS_benchmark_format, &FLAGS_benchmark_out_format})
688 if (*flag !=
"console" && *flag !=
"json" && *flag !=
"csv") {
691 if (FLAGS_benchmark_color.empty()) {
709 for (
int i = 1;
i < argc; ++
i) {
710 fprintf(stderr,
"%s: error: unrecognized command-line flag: %s\n", argv[0], argv[
i]);