15 #include "benchmark/benchmark.h"
21 #ifndef BENCHMARK_OS_WINDOWS
22 #ifndef BENCHMARK_OS_FUCHSIA
23 #include <sys/resource.h>
31 #include <condition_variable>
140 : total_iterations_(0),
142 max_iterations(max_iters),
145 error_occurred_(
false),
149 thread_index_(thread_i),
153 perf_counters_measurement_(perf_counters_measurement) {
156 <<
"thread_index must be less than threads";
164 #if defined(__INTEL_COMPILER)
166 #pragma warning(disable : 1875)
167 #elif defined(__GNUC__)
168 #pragma GCC diagnostic push
169 #pragma GCC diagnostic ignored "-Winvalid-offsetof"
172 const int cache_line_size = 64;
176 #if defined(__INTEL_COMPILER)
178 #elif defined(__GNUC__)
179 #pragma GCC diagnostic pop
189 for (
const auto& name_and_measurement : measurements) {
190 auto name = name_and_measurement.first;
191 auto measurement = name_and_measurement.second;
211 if (
manager_->results.has_error_ ==
false) {
213 manager_->results.has_error_ =
true;
254 if (!reporter)
return;
260 void Report(BenchmarkReporter* display_reporter,
261 BenchmarkReporter* file_reporter,
const RunResults& run_results) {
262 auto report_one = [](BenchmarkReporter* reporter,
bool aggregates_only,
266 aggregates_only &= !
results.aggregates_only.empty();
267 if (!aggregates_only) reporter->ReportRuns(
results.non_aggregates);
268 if (!
results.aggregates_only.empty())
269 reporter->ReportRuns(
results.aggregates_only);
272 report_one(display_reporter, run_results.display_report_aggregates_only,
275 report_one(file_reporter, run_results.file_report_aggregates_only,
278 FlushStreams(display_reporter);
279 FlushStreams(file_reporter);
282 void RunBenchmarks(
const std::vector<BenchmarkInstance>&
benchmarks,
283 BenchmarkReporter* display_reporter,
284 BenchmarkReporter* file_reporter) {
286 BM_CHECK(display_reporter !=
nullptr);
289 bool might_have_aggregates = FLAGS_benchmark_repetitions > 1;
290 size_t name_field_width = 10;
291 size_t stat_field_width = 0;
294 std::max<size_t>(name_field_width,
benchmark.name().str().size());
295 might_have_aggregates |=
benchmark.repetitions() > 1;
298 stat_field_width = std::max<size_t>(stat_field_width,
Stat.name_.size());
300 if (might_have_aggregates) name_field_width += 1 + stat_field_width;
303 BenchmarkReporter::Context
context;
304 context.name_field_width = name_field_width;
307 std::map<
int , BenchmarkReporter::PerFamilyRunReports>
310 if (display_reporter->ReportContext(
context) &&
311 (!file_reporter || file_reporter->ReportContext(
context))) {
312 FlushStreams(display_reporter);
313 FlushStreams(file_reporter);
315 size_t num_repetitions_total = 0;
317 std::vector<internal::BenchmarkRunner> runners;
320 BenchmarkReporter::PerFamilyRunReports* reports_for_family =
nullptr;
322 reports_for_family = &per_family_reports[
benchmark.family_index()];
324 runners.emplace_back(
benchmark, reports_for_family);
325 int num_repeats_of_this_instance = runners.back().GetNumRepeats();
326 num_repetitions_total += num_repeats_of_this_instance;
327 if (reports_for_family)
328 reports_for_family->num_runs_total += num_repeats_of_this_instance;
330 assert(runners.size() ==
benchmarks.size() &&
"Unexpected runner count.");
332 std::vector<size_t> repetition_indices;
333 repetition_indices.reserve(num_repetitions_total);
334 for (
size_t runner_index = 0, num_runners = runners.size();
335 runner_index != num_runners; ++runner_index) {
336 const internal::BenchmarkRunner&
runner = runners[runner_index];
337 std::fill_n(std::back_inserter(repetition_indices),
338 runner.GetNumRepeats(), runner_index);
340 assert(repetition_indices.size() == num_repetitions_total &&
341 "Unexpected number of repetition indexes.");
343 if (FLAGS_benchmark_enable_random_interleaving) {
344 std::random_device rd;
345 std::mt19937
g(rd());
346 std::shuffle(repetition_indices.begin(), repetition_indices.end(), g);
349 for (
size_t repetition_index : repetition_indices) {
350 internal::BenchmarkRunner&
runner = runners[repetition_index];
352 if (
runner.HasRepeatsRemaining())
continue;
355 RunResults run_results =
runner.GetResults();
358 if (
const auto* reports_for_family =
runner.GetReportsForFamily()) {
359 if (reports_for_family->num_runs_done ==
360 reports_for_family->num_runs_total) {
361 auto additional_run_stats =
ComputeBigO(reports_for_family->Runs);
362 run_results.aggregates_only.insert(run_results.aggregates_only.end(),
363 additional_run_stats.begin(),
364 additional_run_stats.end());
365 per_family_reports.erase(
366 (
int)reports_for_family->Runs.front().family_index);
370 Report(display_reporter, file_reporter, run_results);
373 display_reporter->Finalize();
374 if (file_reporter) file_reporter->Finalize();
375 FlushStreams(display_reporter);
376 FlushStreams(file_reporter);
382 #pragma GCC diagnostic push
383 #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
386 std::unique_ptr<BenchmarkReporter> CreateReporter(
388 typedef std::unique_ptr<BenchmarkReporter> PtrType;
389 if (
name ==
"console") {
390 return PtrType(
new ConsoleReporter(output_opts));
391 }
else if (
name ==
"json") {
392 return PtrType(
new JSONReporter);
393 }
else if (
name ==
"csv") {
394 return PtrType(
new CSVReporter);
396 std::cerr <<
"Unexpected format: '" <<
name <<
"'\n";
402 #pragma GCC diagnostic pop
408 return std::abs(
n) < std::numeric_limits<double>::epsilon();
413 auto is_benchmark_color = [force_no_color]() ->
bool {
414 if (force_no_color) {
417 if (FLAGS_benchmark_color ==
"auto") {
422 if (is_benchmark_color()) {
427 if (FLAGS_benchmark_counters_tabular) {
452 std::ofstream output_file;
453 std::unique_ptr<BenchmarkReporter> default_display_reporter;
454 std::unique_ptr<BenchmarkReporter> default_file_reporter;
455 if (!display_reporter) {
456 default_display_reporter = internal::CreateReporter(
458 display_reporter = default_display_reporter.get();
464 if (fname.empty() && file_reporter) {
465 Err <<
"A custom file reporter was provided but "
466 "--benchmark_out=<file> was not specified."
470 if (!fname.empty()) {
471 output_file.open(fname);
472 if (!output_file.is_open()) {
473 Err <<
"invalid file name: '" << fname <<
"'" << std::endl;
476 if (!file_reporter) {
477 default_file_reporter = internal::CreateReporter(
479 file_reporter = default_file_reporter.get();
485 std::vector<internal::BenchmarkInstance>
benchmarks;
489 Err <<
"Failed to match any benchmarks against regex: " <<
spec <<
"\n";
493 if (FLAGS_benchmark_list_tests) {
497 internal::RunBenchmarks(
benchmarks, display_reporter, file_reporter);
512 std::cerr <<
"Failed to add custom context \"" <<
key <<
"\" as it already "
513 <<
"exists with value \"" <<
value <<
"\"\n";
522 " [--benchmark_list_tests={true|false}]\n"
523 " [--benchmark_filter=<regex>]\n"
524 " [--benchmark_min_time=<min_time>]\n"
525 " [--benchmark_repetitions=<num_repetitions>]\n"
526 " [--benchmark_enable_random_interleaving={true|false}]\n"
527 " [--benchmark_report_aggregates_only={true|false}]\n"
528 " [--benchmark_display_aggregates_only={true|false}]\n"
529 " [--benchmark_format=<console|json|csv>]\n"
530 " [--benchmark_out=<filename>]\n"
531 " [--benchmark_out_format=<json|console|csv>]\n"
532 " [--benchmark_color={auto|true|false}]\n"
533 " [--benchmark_counters_tabular={true|false}]\n"
534 " [--benchmark_perf_counters=<counter>,...]\n"
535 " [--benchmark_context=<key>=<value>,...]\n"
536 " [--v=<verbosity>]\n");
543 (argc && *argc > 0) ? argv[0] :
"unknown";
544 for (
int i = 1; argc &&
i < *argc; ++
i) {
546 &FLAGS_benchmark_list_tests) ||
549 &FLAGS_benchmark_min_time) ||
551 &FLAGS_benchmark_repetitions) ||
553 &FLAGS_benchmark_enable_random_interleaving) ||
555 &FLAGS_benchmark_report_aggregates_only) ||
557 &FLAGS_benchmark_display_aggregates_only) ||
561 &FLAGS_benchmark_out_format) ||
567 &FLAGS_benchmark_counters_tabular) ||
569 &FLAGS_benchmark_perf_counters) ||
571 &FLAGS_benchmark_context) ||
573 for (
int j =
i; j != *argc - 1; ++j) argv[j] = argv[j + 1];
577 }
else if (
IsFlag(argv[
i],
"help")) {
581 for (
auto const*
flag :
582 {&FLAGS_benchmark_format, &FLAGS_benchmark_out_format}) {
583 if (*
flag !=
"console" && *
flag !=
"json" && *
flag !=
"csv") {
587 if (FLAGS_benchmark_color.empty()) {
590 for (
const auto& kv : FLAGS_benchmark_context) {
612 for (
int i = 1;
i < argc; ++
i) {
613 fprintf(
stderr,
"%s: error: unrecognized command-line flag: %s\n", argv[0],