benchmark/test/skip_with_error_test.cc
Go to the documentation of this file.
1 
2 #undef NDEBUG
3 #include <cassert>
4 #include <vector>
5 
6 #include "../src/check.h" // NOTE: check.h is for internal use only!
7 #include "benchmark/benchmark.h"
8 
9 namespace {
10 
11 class TestReporter : public benchmark::ConsoleReporter {
12  public:
13  virtual bool ReportContext(const Context& context) BENCHMARK_OVERRIDE {
15  };
16 
17  virtual void ReportRuns(const std::vector<Run>& report) BENCHMARK_OVERRIDE {
18  all_runs_.insert(all_runs_.end(), begin(report), end(report));
20  }
21 
22  TestReporter() {}
23  virtual ~TestReporter() {}
24 
25  mutable std::vector<Run> all_runs_;
26 };
27 
28 struct TestCase {
30  bool error_occurred;
31  std::string error_message;
32 
34 
35  void CheckRun(Run const& run) const {
36  BM_CHECK(name == run.benchmark_name())
37  << "expected " << name << " got " << run.benchmark_name();
38  BM_CHECK(error_occurred == run.error_occurred);
39  BM_CHECK(error_message == run.error_message);
40  if (error_occurred) {
41  // BM_CHECK(run.iterations == 0);
42  } else {
43  BM_CHECK(run.iterations != 0);
44  }
45  }
46 };
47 
48 std::vector<TestCase> ExpectedResults;
49 
50 int AddCases(const char* base_name, std::initializer_list<TestCase> const& v) {
51  for (auto TC : v) {
52  TC.name = base_name + TC.name;
53  ExpectedResults.push_back(std::move(TC));
54  }
55  return 0;
56 }
57 
58 #define CONCAT(x, y) CONCAT2(x, y)
59 #define CONCAT2(x, y) x##y
60 #define ADD_CASES(...) int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
61 
62 } // end namespace
63 
65  state.SkipWithError("error message");
66 }
68 ADD_CASES("BM_error_no_running", {{"", true, "error message"}});
69 
71  state.SkipWithError("error message");
72  while (state.KeepRunning()) {
73  assert(false);
74  }
75 }
77 ADD_CASES("BM_error_before_running", {{"", true, "error message"}});
78 
80  state.SkipWithError("error message");
81  while (state.KeepRunningBatch(17)) {
82  assert(false);
83  }
84 }
86 ADD_CASES("BM_error_before_running_batch", {{"", true, "error message"}});
87 
89  state.SkipWithError("error message");
90  for (auto _ : state) {
91  assert(false);
92  }
93 }
95 ADD_CASES("BM_error_before_running_range_for", {{"", true, "error message"}});
96 
98  int first_iter = true;
99  while (state.KeepRunning()) {
100  if (state.range(0) == 1 && state.thread_index() <= (state.threads() / 2)) {
101  assert(first_iter);
102  first_iter = false;
103  state.SkipWithError("error message");
104  } else {
105  state.PauseTiming();
106  state.ResumeTiming();
107  }
108  }
109 }
110 BENCHMARK(BM_error_during_running)->Arg(1)->Arg(2)->ThreadRange(1, 8);
111 ADD_CASES("BM_error_during_running", {{"/1/threads:1", true, "error message"},
112  {"/1/threads:2", true, "error message"},
113  {"/1/threads:4", true, "error message"},
114  {"/1/threads:8", true, "error message"},
115  {"/2/threads:1", false, ""},
116  {"/2/threads:2", false, ""},
117  {"/2/threads:4", false, ""},
118  {"/2/threads:8", false, ""}});
119 
121  assert(state.max_iterations > 3 && "test requires at least a few iterations");
122  int first_iter = true;
123  // NOTE: Users should not write the for loop explicitly.
124  for (auto It = state.begin(), End = state.end(); It != End; ++It) {
125  if (state.range(0) == 1) {
126  assert(first_iter);
127  first_iter = false;
128  state.SkipWithError("error message");
129  // Test the unfortunate but documented behavior that the ranged-for loop
130  // doesn't automatically terminate when SkipWithError is set.
131  assert(++It != End);
132  break; // Required behavior
133  }
134  }
135 }
136 BENCHMARK(BM_error_during_running_ranged_for)->Arg(1)->Arg(2)->Iterations(5);
137 ADD_CASES("BM_error_during_running_ranged_for",
138  {{"/1/iterations:5", true, "error message"},
139  {"/2/iterations:5", false, ""}});
140 
142  for (auto _ : state) {
143  benchmark::DoNotOptimize(state.iterations());
144  }
145  if (state.thread_index() <= (state.threads() / 2))
146  state.SkipWithError("error message");
147 }
148 BENCHMARK(BM_error_after_running)->ThreadRange(1, 8);
149 ADD_CASES("BM_error_after_running", {{"/threads:1", true, "error message"},
150  {"/threads:2", true, "error message"},
151  {"/threads:4", true, "error message"},
152  {"/threads:8", true, "error message"}});
153 
155  bool first_iter = true;
156  while (state.KeepRunning()) {
157  if (state.range(0) == 1 && state.thread_index() <= (state.threads() / 2)) {
158  assert(first_iter);
159  first_iter = false;
160  state.PauseTiming();
161  state.SkipWithError("error message");
162  } else {
163  state.PauseTiming();
164  state.ResumeTiming();
165  }
166  }
167 }
168 BENCHMARK(BM_error_while_paused)->Arg(1)->Arg(2)->ThreadRange(1, 8);
169 ADD_CASES("BM_error_while_paused", {{"/1/threads:1", true, "error message"},
170  {"/1/threads:2", true, "error message"},
171  {"/1/threads:4", true, "error message"},
172  {"/1/threads:8", true, "error message"},
173  {"/2/threads:1", false, ""},
174  {"/2/threads:2", false, ""},
175  {"/2/threads:4", false, ""},
176  {"/2/threads:8", false, ""}});
177 
178 int main(int argc, char* argv[]) {
179  benchmark::Initialize(&argc, argv);
180 
181  TestReporter test_reporter;
182  benchmark::RunSpecifiedBenchmarks(&test_reporter);
183 
185  auto EB = ExpectedResults.begin();
186 
187  for (Run const& run : test_reporter.all_runs_) {
188  assert(EB != ExpectedResults.end());
189  EB->CheckRun(run);
190  ++EB;
191  }
192  assert(EB == ExpectedResults.end());
193 
194  return 0;
195 }
ADD_CASES
#define ADD_CASES(...)
Definition: benchmark/test/skip_with_error_test.cc:60
begin
char * begin
Definition: abseil-cpp/absl/strings/internal/str_format/float_conversion.cc:1007
BM_error_no_running
void BM_error_no_running(benchmark::State &state)
Definition: benchmark/test/skip_with_error_test.cc:64
BM_error_before_running
void BM_error_before_running(benchmark::State &state)
Definition: benchmark/test/skip_with_error_test.cc:70
TestCase::run
void run()
Definition: pgv_test.cc:32
BM_CHECK
#define BM_CHECK(b)
Definition: benchmark/src/check.h:58
BM_error_before_running_batch
void BM_error_before_running_batch(benchmark::State &state)
Definition: benchmark/test/skip_with_error_test.cc:79
testing::internal::string
::std::string string
Definition: bloaty/third_party/protobuf/third_party/googletest/googletest/include/gtest/internal/gtest-port.h:881
setup.name
name
Definition: setup.py:542
benchmark::DoNotOptimize
BENCHMARK_ALWAYS_INLINE void DoNotOptimize(Tp const &value)
Definition: benchmark/include/benchmark/benchmark.h:375
benchmark::ConsoleReporter::ReportRuns
virtual void ReportRuns(const std::vector< Run > &reports) BENCHMARK_OVERRIDE
Definition: benchmark/src/console_reporter.cc:71
benchmark::RunSpecifiedBenchmarks
size_t RunSpecifiedBenchmarks()
Definition: benchmark/src/benchmark.cc:437
BM_error_during_running
void BM_error_during_running(benchmark::State &state)
Definition: benchmark/test/skip_with_error_test.cc:97
absl::move
constexpr absl::remove_reference_t< T > && move(T &&t) noexcept
Definition: abseil-cpp/absl/utility/utility.h:221
end
char * end
Definition: abseil-cpp/absl/strings/internal/str_format/float_conversion.cc:1008
BENCHMARK_OVERRIDE
#define BENCHMARK_OVERRIDE
Definition: benchmark/include/benchmark/benchmark.h:271
setup.v
v
Definition: third_party/bloaty/third_party/capstone/bindings/python/setup.py:42
gmock_output_test._
_
Definition: bloaty/third_party/googletest/googlemock/test/gmock_output_test.py:175
benchmark::Initialize
void Initialize(int *argc, char **argv)
Definition: benchmark/src/benchmark.cc:602
TestCase
Definition: benchmark/test/output_test.h:31
benchmark::ConsoleReporter
Definition: benchmark/include/benchmark/benchmark.h:1571
BENCHMARK
BENCHMARK(BM_error_no_running)
benchmark::BenchmarkReporter::Run
Definition: benchmark/include/benchmark/benchmark.h:1423
AddCases
int AddCases(TestCaseID ID, std::initializer_list< TestCase > il)
Definition: benchmark/test/output_test_helper.cc:361
BM_error_after_running
void BM_error_after_running(benchmark::State &state)
Definition: benchmark/test/skip_with_error_test.cc:141
benchmark::ConsoleReporter::ReportContext
virtual bool ReportContext(const Context &context) BENCHMARK_OVERRIDE
Definition: benchmark/src/console_reporter.cc:36
client.run
def run()
Definition: examples/python/async_streaming/client.py:109
main
int main(int argc, char *argv[])
Definition: benchmark/test/skip_with_error_test.cc:178
benchmark::State
Definition: benchmark/include/benchmark/benchmark.h:503
BM_error_before_running_range_for
void BM_error_before_running_range_for(benchmark::State &state)
Definition: benchmark/test/skip_with_error_test.cc:88
BM_error_while_paused
void BM_error_while_paused(benchmark::State &state)
Definition: benchmark/test/skip_with_error_test.cc:154
state
Definition: bloaty/third_party/zlib/contrib/blast/blast.c:41
BM_error_during_running_ranged_for
void BM_error_during_running_ranged_for(benchmark::State &state)
Definition: benchmark/test/skip_with_error_test.cc:120
context
grpc::ClientContext context
Definition: istio_echo_server_lib.cc:61
googletest-break-on-failure-unittest.Run
def Run(command)
Definition: bloaty/third_party/googletest/googletest/test/googletest-break-on-failure-unittest.py:76


grpc
Author(s):
autogenerated on Fri May 16 2025 03:00:13