1// Copyright 2015 Google Inc. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include "benchmark/benchmark.h"
16#include "benchmark_api_internal.h"
17#include "benchmark_runner.h"
18#include "internal_macros.h"
19
20#ifndef BENCHMARK_OS_WINDOWS
21#ifndef BENCHMARK_OS_FUCHSIA
22#include <sys/resource.h>
23#endif
24#include <sys/time.h>
25#include <unistd.h>
26#endif
27
28#include <algorithm>
29#include <atomic>
30#include <condition_variable>
31#include <cstdio>
32#include <cstdlib>
33#include <fstream>
34#include <iostream>
35#include <memory>
36#include <string>
37#include <thread>
38#include <utility>
39
40#include "check.h"
41#include "colorprint.h"
42#include "commandlineflags.h"
43#include "complexity.h"
44#include "counter.h"
45#include "internal_macros.h"
46#include "log.h"
47#include "mutex.h"
48#include "re.h"
49#include "statistics.h"
50#include "string_util.h"
51#include "thread_manager.h"
52#include "thread_timer.h"
53
54DEFINE_bool(benchmark_list_tests, false,
55 "Print a list of benchmarks. This option overrides all other "
56 "options.");
57
58DEFINE_string(benchmark_filter, ".",
59 "A regular expression that specifies the set of benchmarks "
60 "to execute. If this flag is empty, or if this flag is the "
61 "string \"all\", all benchmarks linked into the binary are "
62 "run.");
63
64DEFINE_double(benchmark_min_time, 0.5,
65 "Minimum number of seconds we should run benchmark before "
66 "results are considered significant. For cpu-time based "
67 "tests, this is the lower bound on the total cpu time "
68 "used by all threads that make up the test. For real-time "
69 "based tests, this is the lower bound on the elapsed time "
70 "of the benchmark execution, regardless of number of "
71 "threads.");
72
73DEFINE_int32(benchmark_repetitions, 1,
74 "The number of runs of each benchmark. If greater than 1, the "
75 "mean and standard deviation of the runs will be reported.");
76
77DEFINE_bool(
78 benchmark_report_aggregates_only, false,
79 "Report the result of each benchmark repetitions. When 'true' is specified "
80 "only the mean, standard deviation, and other statistics are reported for "
81 "repeated benchmarks. Affects all reporters.");
82
83DEFINE_bool(
84 benchmark_display_aggregates_only, false,
85 "Display the result of each benchmark repetitions. When 'true' is "
86 "specified only the mean, standard deviation, and other statistics are "
87 "displayed for repeated benchmarks. Unlike "
88 "benchmark_report_aggregates_only, only affects the display reporter, but "
89 "*NOT* file reporter, which will still contain all the output.");
90
91DEFINE_string(benchmark_format, "console",
92 "The format to use for console output. Valid values are "
93 "'console', 'json', or 'csv'.");
94
95DEFINE_string(benchmark_out_format, "json",
96 "The format to use for file output. Valid values are "
97 "'console', 'json', or 'csv'.");
98
99DEFINE_string(benchmark_out, "", "The file to write additional output to");
100
101DEFINE_string(benchmark_color, "auto",
102 "Whether to use colors in the output. Valid values: "
103 "'true'/'yes'/1, 'false'/'no'/0, and 'auto'. 'auto' means to use "
104 "colors if the output is being sent to a terminal and the TERM "
105 "environment variable is set to a terminal type that supports "
106 "colors.");
107
108DEFINE_bool(benchmark_counters_tabular, false,
109 "Whether to use tabular format when printing user counters to "
110 "the console. Valid values: 'true'/'yes'/1, 'false'/'no'/0."
111 "Defaults to false.");
112
113DEFINE_int32(v, 0, "The level of verbose logging to output");
114
115namespace benchmark {
116
117namespace internal {
118
119// FIXME: wouldn't LTO mess this up?
120void UseCharPointer(char const volatile*) {}
121
122} // namespace internal
123
124State::State(IterationCount max_iters, const std::vector<int64_t>& ranges,
125 int thread_i, int n_threads, internal::ThreadTimer* timer,
126 internal::ThreadManager* manager)
127 : total_iterations_(0),
128 batch_leftover_(0),
129 max_iterations(max_iters),
130 started_(false),
131 finished_(false),
132 error_occurred_(false),
133 range_(ranges),
134 complexity_n_(0),
135 counters(),
136 thread_index(thread_i),
137 threads(n_threads),
138 timer_(timer),
139 manager_(manager) {
140 CHECK(max_iterations != 0) << "At least one iteration must be run";
141 CHECK_LT(thread_index, threads) << "thread_index must be less than threads";
142
143 // Note: The use of offsetof below is technically undefined until C++17
144 // because State is not a standard layout type. However, all compilers
145 // currently provide well-defined behavior as an extension (which is
146 // demonstrated since constexpr evaluation must diagnose all undefined
147 // behavior). However, GCC and Clang also warn about this use of offsetof,
148 // which must be suppressed.
149#if defined(__INTEL_COMPILER)
150#pragma warning push
151#pragma warning(disable:1875)
152#elif defined(__GNUC__)
153#pragma GCC diagnostic push
154#pragma GCC diagnostic ignored "-Winvalid-offsetof"
155#endif
156 // Offset tests to ensure commonly accessed data is on the first cache line.
157 const int cache_line_size = 64;
158 static_assert(offsetof(State, error_occurred_) <=
159 (cache_line_size - sizeof(error_occurred_)),
160 "");
161#if defined(__INTEL_COMPILER)
162#pragma warning pop
163#elif defined(__GNUC__)
164#pragma GCC diagnostic pop
165#endif
166}
167
168void State::PauseTiming() {
169 // Add in time accumulated so far
170 CHECK(started_ && !finished_ && !error_occurred_);
171 timer_->StopTimer();
172}
173
174void State::ResumeTiming() {
175 CHECK(started_ && !finished_ && !error_occurred_);
176 timer_->StartTimer();
177}
178
179void State::SkipWithError(const char* msg) {
180 CHECK(msg);
181 error_occurred_ = true;
182 {
183 MutexLock l(manager_->GetBenchmarkMutex());
184 if (manager_->results.has_error_ == false) {
185 manager_->results.error_message_ = msg;
186 manager_->results.has_error_ = true;
187 }
188 }
189 total_iterations_ = 0;
190 if (timer_->running()) timer_->StopTimer();
191}
192
193void State::SetIterationTime(double seconds) {
194 timer_->SetIterationTime(seconds);
195}
196
197void State::SetLabel(const char* label) {
198 MutexLock l(manager_->GetBenchmarkMutex());
199 manager_->results.report_label_ = label;
200}
201
202void State::StartKeepRunning() {
203 CHECK(!started_ && !finished_);
204 started_ = true;
205 total_iterations_ = error_occurred_ ? 0 : max_iterations;
206 manager_->StartStopBarrier();
207 if (!error_occurred_) ResumeTiming();
208}
209
210void State::FinishKeepRunning() {
211 CHECK(started_ && (!finished_ || error_occurred_));
212 if (!error_occurred_) {
213 PauseTiming();
214 }
215 // Total iterations has now wrapped around past 0. Fix this.
216 total_iterations_ = 0;
217 finished_ = true;
218 manager_->StartStopBarrier();
219}
220
221namespace internal {
222namespace {
223
224void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
225 BenchmarkReporter* display_reporter,
226 BenchmarkReporter* file_reporter) {
227 // Note the file_reporter can be null.
228 CHECK(display_reporter != nullptr);
229
230 // Determine the width of the name field using a minimum width of 10.
231 bool might_have_aggregates = FLAGS_benchmark_repetitions > 1;
232 size_t name_field_width = 10;
233 size_t stat_field_width = 0;
234 for (const BenchmarkInstance& benchmark : benchmarks) {
235 name_field_width =
236 std::max<size_t>(name_field_width, benchmark.name.str().size());
237 might_have_aggregates |= benchmark.repetitions > 1;
238
239 for (const auto& Stat : *benchmark.statistics)
240 stat_field_width = std::max<size_t>(stat_field_width, Stat.name_.size());
241 }
242 if (might_have_aggregates) name_field_width += 1 + stat_field_width;
243
244 // Print header here
245 BenchmarkReporter::Context context;
246 context.name_field_width = name_field_width;
247
248 // Keep track of running times of all instances of current benchmark
249 std::vector<BenchmarkReporter::Run> complexity_reports;
250
251 // We flush streams after invoking reporter methods that write to them. This
252 // ensures users get timely updates even when streams are not line-buffered.
253 auto flushStreams = [](BenchmarkReporter* reporter) {
254 if (!reporter) return;
255 std::flush(reporter->GetOutputStream());
256 std::flush(reporter->GetErrorStream());
257 };
258
259 if (display_reporter->ReportContext(context) &&
260 (!file_reporter || file_reporter->ReportContext(context))) {
261 flushStreams(display_reporter);
262 flushStreams(file_reporter);
263
264 for (const auto& benchmark : benchmarks) {
265 RunResults run_results = RunBenchmark(benchmark, &complexity_reports);
266
267 auto report = [&run_results](BenchmarkReporter* reporter,
268 bool report_aggregates_only) {
269 assert(reporter);
270 // If there are no aggregates, do output non-aggregates.
271 report_aggregates_only &= !run_results.aggregates_only.empty();
272 if (!report_aggregates_only)
273 reporter->ReportRuns(run_results.non_aggregates);
274 if (!run_results.aggregates_only.empty())
275 reporter->ReportRuns(run_results.aggregates_only);
276 };
277
278 report(display_reporter, run_results.display_report_aggregates_only);
279 if (file_reporter)
280 report(file_reporter, run_results.file_report_aggregates_only);
281
282 flushStreams(display_reporter);
283 flushStreams(file_reporter);
284 }
285 }
286 display_reporter->Finalize();
287 if (file_reporter) file_reporter->Finalize();
288 flushStreams(display_reporter);
289 flushStreams(file_reporter);
290}
291
292std::unique_ptr<BenchmarkReporter> CreateReporter(
293 std::string const& name, ConsoleReporter::OutputOptions output_opts) {
294 typedef std::unique_ptr<BenchmarkReporter> PtrType;
295 if (name == "console") {
296 return PtrType(new ConsoleReporter(output_opts));
297 } else if (name == "json") {
298 return PtrType(new JSONReporter);
299 } else if (name == "csv") {
300 return PtrType(new CSVReporter);
301 } else {
302 std::cerr << "Unexpected format: '" << name << "'\n";
303 std::exit(1);
304 }
305}
306
307} // end namespace
308
309bool IsZero(double n) {
310 return std::abs(n) < std::numeric_limits<double>::epsilon();
311}
312
313ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color) {
314 int output_opts = ConsoleReporter::OO_Defaults;
315 auto is_benchmark_color = [force_no_color] () -> bool {
316 if (force_no_color) {
317 return false;
318 }
319 if (FLAGS_benchmark_color == "auto") {
320 return IsColorTerminal();
321 }
322 return IsTruthyFlagValue(FLAGS_benchmark_color);
323 };
324 if (is_benchmark_color()) {
325 output_opts |= ConsoleReporter::OO_Color;
326 } else {
327 output_opts &= ~ConsoleReporter::OO_Color;
328 }
329 if (FLAGS_benchmark_counters_tabular) {
330 output_opts |= ConsoleReporter::OO_Tabular;
331 } else {
332 output_opts &= ~ConsoleReporter::OO_Tabular;
333 }
334 return static_cast<ConsoleReporter::OutputOptions>(output_opts);
335}
336
337} // end namespace internal
338
339size_t RunSpecifiedBenchmarks() {
340 return RunSpecifiedBenchmarks(nullptr, nullptr);
341}
342
343size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter) {
344 return RunSpecifiedBenchmarks(display_reporter, nullptr);
345}
346
347size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
348 BenchmarkReporter* file_reporter) {
349 std::string spec = FLAGS_benchmark_filter;
350 if (spec.empty() || spec == "all")
351 spec = "."; // Regexp that matches all benchmarks
352
353 // Setup the reporters
354 std::ofstream output_file;
355 std::unique_ptr<BenchmarkReporter> default_display_reporter;
356 std::unique_ptr<BenchmarkReporter> default_file_reporter;
357 if (!display_reporter) {
358 default_display_reporter = internal::CreateReporter(
359 FLAGS_benchmark_format, internal::GetOutputOptions());
360 display_reporter = default_display_reporter.get();
361 }
362 auto& Out = display_reporter->GetOutputStream();
363 auto& Err = display_reporter->GetErrorStream();
364
365 std::string const& fname = FLAGS_benchmark_out;
366 if (fname.empty() && file_reporter) {
367 Err << "A custom file reporter was provided but "
368 "--benchmark_out=<file> was not specified."
369 << std::endl;
370 std::exit(1);
371 }
372 if (!fname.empty()) {
373 output_file.open(fname);
374 if (!output_file.is_open()) {
375 Err << "invalid file name: '" << fname << std::endl;
376 std::exit(1);
377 }
378 if (!file_reporter) {
379 default_file_reporter = internal::CreateReporter(
380 FLAGS_benchmark_out_format, ConsoleReporter::OO_None);
381 file_reporter = default_file_reporter.get();
382 }
383 file_reporter->SetOutputStream(&output_file);
384 file_reporter->SetErrorStream(&output_file);
385 }
386
387 std::vector<internal::BenchmarkInstance> benchmarks;
388 if (!FindBenchmarksInternal(spec, &benchmarks, &Err)) return 0;
389
390 if (benchmarks.empty()) {
391 Err << "Failed to match any benchmarks against regex: " << spec << "\n";
392 return 0;
393 }
394
395 if (FLAGS_benchmark_list_tests) {
396 for (auto const& benchmark : benchmarks)
397 Out << benchmark.name.str() << "\n";
398 } else {
399 internal::RunBenchmarks(benchmarks, display_reporter, file_reporter);
400 }
401
402 return benchmarks.size();
403}
404
405void RegisterMemoryManager(MemoryManager* manager) {
406 internal::memory_manager = manager;
407}
408
409namespace internal {
410
411void PrintUsageAndExit() {
412 fprintf(stdout,
413 "benchmark"
414 " [--benchmark_list_tests={true|false}]\n"
415 " [--benchmark_filter=<regex>]\n"
416 " [--benchmark_min_time=<min_time>]\n"
417 " [--benchmark_repetitions=<num_repetitions>]\n"
418 " [--benchmark_report_aggregates_only={true|false}]\n"
419 " [--benchmark_display_aggregates_only={true|false}]\n"
420 " [--benchmark_format=<console|json|csv>]\n"
421 " [--benchmark_out=<filename>]\n"
422 " [--benchmark_out_format=<json|console|csv>]\n"
423 " [--benchmark_color={auto|true|false}]\n"
424 " [--benchmark_counters_tabular={true|false}]\n"
425 " [--v=<verbosity>]\n");
426 exit(0);
427}
428
429void ParseCommandLineFlags(int* argc, char** argv) {
430 using namespace benchmark;
431 BenchmarkReporter::Context::executable_name =
432 (argc && *argc > 0) ? argv[0] : "unknown";
433 for (int i = 1; i < *argc; ++i) {
434 if (ParseBoolFlag(argv[i], "benchmark_list_tests",
435 &FLAGS_benchmark_list_tests) ||
436 ParseStringFlag(argv[i], "benchmark_filter", &FLAGS_benchmark_filter) ||
437 ParseDoubleFlag(argv[i], "benchmark_min_time",
438 &FLAGS_benchmark_min_time) ||
439 ParseInt32Flag(argv[i], "benchmark_repetitions",
440 &FLAGS_benchmark_repetitions) ||
441 ParseBoolFlag(argv[i], "benchmark_report_aggregates_only",
442 &FLAGS_benchmark_report_aggregates_only) ||
443 ParseBoolFlag(argv[i], "benchmark_display_aggregates_only",
444 &FLAGS_benchmark_display_aggregates_only) ||
445 ParseStringFlag(argv[i], "benchmark_format", &FLAGS_benchmark_format) ||
446 ParseStringFlag(argv[i], "benchmark_out", &FLAGS_benchmark_out) ||
447 ParseStringFlag(argv[i], "benchmark_out_format",
448 &FLAGS_benchmark_out_format) ||
449 ParseStringFlag(argv[i], "benchmark_color", &FLAGS_benchmark_color) ||
450 // "color_print" is the deprecated name for "benchmark_color".
451 // TODO: Remove this.
452 ParseStringFlag(argv[i], "color_print", &FLAGS_benchmark_color) ||
453 ParseBoolFlag(argv[i], "benchmark_counters_tabular",
454 &FLAGS_benchmark_counters_tabular) ||
455 ParseInt32Flag(argv[i], "v", &FLAGS_v)) {
456 for (int j = i; j != *argc - 1; ++j) argv[j] = argv[j + 1];
457
458 --(*argc);
459 --i;
460 } else if (IsFlag(argv[i], "help")) {
461 PrintUsageAndExit();
462 }
463 }
464 for (auto const* flag :
465 {&FLAGS_benchmark_format, &FLAGS_benchmark_out_format})
466 if (*flag != "console" && *flag != "json" && *flag != "csv") {
467 PrintUsageAndExit();
468 }
469 if (FLAGS_benchmark_color.empty()) {
470 PrintUsageAndExit();
471 }
472}
473
474int InitializeStreams() {
475 static std::ios_base::Init init;
476 return 0;
477}
478
479} // end namespace internal
480
481void Initialize(int* argc, char** argv) {
482 internal::ParseCommandLineFlags(argc, argv);
483 internal::LogLevel() = FLAGS_v;
484}
485
486bool ReportUnrecognizedArguments(int argc, char** argv) {
487 for (int i = 1; i < argc; ++i) {
488 fprintf(stderr, "%s: error: unrecognized command-line flag: %s\n", argv[0],
489 argv[i]);
490 }
491 return argc > 1;
492}
493
494} // end namespace benchmark
495