Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow specifying number of iterations via --benchmark_min_time. #1525

Merged
merged 35 commits into from
Feb 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
04f53c4
Allow specifying number of iterations via --benchmark_min_time.
oontvoo Jan 11, 2023
7e2aeaa
forgot to change flag type to string
oontvoo Jan 11, 2023
0ba28c5
used tagged union instead of std::variant, which is not available pre…
oontvoo Jan 11, 2023
de4c454
update decl in benchmark_runner.h too
oontvoo Jan 11, 2023
fab9054
fixed errors
oontvoo Jan 11, 2023
6d98c38
refactor
oontvoo Jan 11, 2023
2d7206d
backward compat
oontvoo Jan 11, 2023
807322b
typo
oontvoo Jan 11, 2023
80c349f
use IterationCount type
oontvoo Jan 11, 2023
2432374
fixed test
oontvoo Jan 11, 2023
cacbc83
const_cast
oontvoo Jan 11, 2023
774dbae
ret type
oontvoo Jan 11, 2023
26455dc
remove extra _
oontvoo Jan 11, 2023
f64dc35
debug
oontvoo Jan 11, 2023
d145ff9
fixed bug from reporting that caused the new configs not to be includ…
oontvoo Jan 12, 2023
d7f8701
addressed review comments
oontvoo Jan 12, 2023
c37b2a1
restore unnecessary changes in test/BUILD
oontvoo Jan 12, 2023
48c7d82
fix float comparisons warnings from Release builds
oontvoo Jan 12, 2023
0d69ed0
clang format
oontvoo Jan 12, 2023
bce6413
fix visibility warning
oontvoo Jan 12, 2023
860b2b6
remove misc file
oontvoo Jan 12, 2023
3dc210d
removed backup files
oontvoo Jan 12, 2023
46b2e7b
addressed review comments
oontvoo Jan 13, 2023
7886a3c
fix shorten in warning
oontvoo Jan 13, 2023
9d17cb3
use suffix for existing min_time specs to silent warnings in tests
oontvoo Jan 13, 2023
35481c5
fix leaks
oontvoo Jan 13, 2023
2664a17
use default min-time value in flag decl for consistency
oontvoo Jan 17, 2023
e96b8c6
removed double kMinTimeDecl from benchmark.h
oontvoo Jan 17, 2023
f0c1285
dont need to preserve errno
oontvoo Jan 19, 2023
4a22d68
add death tests
oontvoo Jan 24, 2023
ee9f1c8
Add BENCHMARK_EXPORT to hopefully fix missing def errors
oontvoo Jan 24, 2023
245e160
only enable death tests in debug mode because bm_check is no-op in re…
oontvoo Jan 24, 2023
340c65b
guard death tests with additional support-check macros
oontvoo Jan 24, 2023
a902c7f
Add additional guard to prevent running in Release mode
oontvoo Jan 24, 2023
94a06c7
Merge branch 'main' into min_time
dmah42 Feb 7, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion include/benchmark/benchmark.h
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,9 @@ BENCHMARK(BM_test)->Unit(benchmark::kMillisecond);
namespace benchmark {
class BenchmarkReporter;

// Default number of minimum benchmark running time in seconds.
const char kDefaultMinTimeStr[] = "0.5s";
oontvoo marked this conversation as resolved.
Show resolved Hide resolved

BENCHMARK_EXPORT void PrintDefaultHelp();

BENCHMARK_EXPORT void Initialize(int* argc, char** argv,
Expand Down Expand Up @@ -1099,11 +1102,12 @@ class BENCHMARK_EXPORT Benchmark {
Benchmark* MinWarmUpTime(double t);

// Specify the amount of iterations that should be run by this benchmark.
// This option overrides the `benchmark_min_time` flag.
// REQUIRES: 'n > 0' and `MinTime` has not been called on this benchmark.
//
// NOTE: This function should only be used when *exact* iteration control is
// needed and never to control or limit how long a benchmark runs, where
// `--benchmark_min_time=N` or `MinTime(...)` should be used instead.
// `--benchmark_min_time=<N>s` or `MinTime(...)` should be used instead.
Benchmark* Iterations(IterationCount n);

// Specify the amount of times to repeat this benchmark. This option overrides
Expand Down Expand Up @@ -1739,6 +1743,12 @@ class BENCHMARK_EXPORT BenchmarkReporter {
// to skip runs based on the context information.
virtual bool ReportContext(const Context& context) = 0;

// Called once for each group of benchmark runs, gives information about
// the configurations of the runs.
virtual void ReportRunsConfig(double /*min_time*/,
bool /*has_explicit_iters*/,
IterationCount /*iters*/) {}

// Called once for each group of benchmark runs, gives information about
// cpu-time and heap memory usage during the benchmark run. If the group
// of runs contained more than two entries then 'report' contains additional
Expand Down
25 changes: 20 additions & 5 deletions src/benchmark.cc
Original file line number Diff line number Diff line change
Expand Up @@ -65,12 +65,21 @@ BM_DEFINE_bool(benchmark_list_tests, false);
// linked into the binary are run.
BM_DEFINE_string(benchmark_filter, "");

// Minimum number of seconds we should run benchmark before results are
// considered significant. For cpu-time based tests, this is the lower bound
// Specification of how long to run the benchmark.
//
// It can be either an exact number of iterations (specified as `<integer>x`),
// or a minimum number of seconds (specified as `<float>s`). If the latter
// format (ie., min seconds) is used, the system may run the benchmark longer
// until the results are considered significant.
//
// For backward compatibility, the `s` suffix may be omitted, in which case,
// the specified number is interpreted as the number of seconds.
//
// For cpu-time based tests, this is the lower bound
// on the total cpu time used by all threads that make up the test. For
// real-time based tests, this is the lower bound on the elapsed time of the
// benchmark execution, regardless of number of threads.
BM_DEFINE_double(benchmark_min_time, 0.5);
BM_DEFINE_string(benchmark_min_time, kDefaultMinTimeStr);

// Minimum number of seconds a benchmark should be run before results should be
// taken into account. This e.g can be necessary for benchmarks of code which
Expand Down Expand Up @@ -377,6 +386,12 @@ void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
if (runner.HasRepeatsRemaining()) continue;
// FIXME: report each repetition separately, not all of them in bulk.

display_reporter->ReportRunsConfig(
runner.GetMinTime(), runner.HasExplicitIters(), runner.GetIters());
if (file_reporter)
file_reporter->ReportRunsConfig(
runner.GetMinTime(), runner.HasExplicitIters(), runner.GetIters());

RunResults run_results = runner.GetResults();

// Maybe calculate complexity report
Expand Down Expand Up @@ -610,7 +625,7 @@ void ParseCommandLineFlags(int* argc, char** argv) {
if (ParseBoolFlag(argv[i], "benchmark_list_tests",
&FLAGS_benchmark_list_tests) ||
ParseStringFlag(argv[i], "benchmark_filter", &FLAGS_benchmark_filter) ||
ParseDoubleFlag(argv[i], "benchmark_min_time",
ParseStringFlag(argv[i], "benchmark_min_time",
&FLAGS_benchmark_min_time) ||
ParseDoubleFlag(argv[i], "benchmark_min_warmup_time",
&FLAGS_benchmark_min_warmup_time) ||
Expand Down Expand Up @@ -671,7 +686,7 @@ void PrintDefaultHelp() {
"benchmark"
" [--benchmark_list_tests={true|false}]\n"
" [--benchmark_filter=<regex>]\n"
" [--benchmark_min_time=<min_time>]\n"
" [--benchmark_min_time=`<integer>x` OR `<float>s` ]\n"
" [--benchmark_min_warmup_time=<min_warmup_time>]\n"
" [--benchmark_repetitions=<num_repetitions>]\n"
" [--benchmark_enable_random_interleaving={true|false}]\n"
Expand Down
88 changes: 85 additions & 3 deletions src/benchmark_runner.cc
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,14 @@

#include <algorithm>
#include <atomic>
#include <climits>
#include <cmath>
#include <condition_variable>
#include <cstdio>
#include <cstdlib>
#include <fstream>
#include <iostream>
#include <limits>
#include <memory>
#include <string>
#include <thread>
Expand Down Expand Up @@ -62,6 +65,8 @@ MemoryManager* memory_manager = nullptr;
namespace {

static constexpr IterationCount kMaxIterations = 1000000000;
const double kDefaultMinTime =
std::strtod(::benchmark::kDefaultMinTimeStr, /*p_end*/ nullptr);

BenchmarkReporter::Run CreateRunReport(
const benchmark::internal::BenchmarkInstance& b,
Expand Down Expand Up @@ -140,23 +145,100 @@ void RunInThread(const BenchmarkInstance* b, IterationCount iters,
manager->NotifyThreadComplete();
}

double ComputeMinTime(const benchmark::internal::BenchmarkInstance& b,
const BenchTimeType& iters_or_time) {
if (!IsZero(b.min_time())) return b.min_time();
// If the flag was used to specify number of iters, then return the default
// min_time.
if (iters_or_time.tag == BenchTimeType::ITERS) return kDefaultMinTime;

return iters_or_time.time;
}

IterationCount ComputeIters(const benchmark::internal::BenchmarkInstance& b,
const BenchTimeType& iters_or_time) {
if (b.iterations() != 0) return b.iterations();

// We've already concluded that this flag is currently used to pass
// iters but do a check here again anyway.
BM_CHECK(iters_or_time.tag == BenchTimeType::ITERS);
return iters_or_time.iters;
}

} // end namespace

BenchTimeType ParseBenchMinTime(const std::string& value) {
BenchTimeType ret;

if (value.empty()) {
ret.tag = BenchTimeType::TIME;
ret.time = 0.0;
return ret;
}

if (value.back() == 'x') {
const char* iters_str = value.c_str();
char* p_end;
// Reset errno before it's changed by strtol.
errno = 0;
IterationCount num_iters = std::strtol(iters_str, &p_end, 10);

// After a valid parse, p_end should have been set to
// point to the 'x' suffix.
BM_CHECK(errno == 0 && p_end != nullptr && *p_end == 'x')
<< "Malformed iters value passed to --benchmark_min_time: `" << value
<< "`. Expected --benchmark_min_time=<integer>x.";

ret.tag = BenchTimeType::ITERS;
ret.iters = num_iters;
return ret;
}

const char* time_str = value.c_str();
bool has_suffix = value.back() == 's';
if (!has_suffix) {
BM_VLOG(0) << "Value passed to --benchmark_min_time should have a suffix. "
"Eg., `30s` for 30-seconds.";
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This makes it look like other suffixes (ms?) are supported

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, we could extend the suffixes support later - but for the first iteration, let's keep it simple. (And I'd like to get the interface in place first)

}

char* p_end;
// Reset errno before it's changed by strtod.
errno = 0;
double min_time = std::strtod(time_str, &p_end);

// After a successfull parse, p_end should point to the suffix 's'
// or the end of the string, if the suffix was omitted.
BM_CHECK(errno == 0 && p_end != nullptr &&
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This check is present in release builds, right?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no, it acts like assert and is disabled in NDEBUG builds

(has_suffix && *p_end == 's' || *p_end == '\0'))
<< "Malformed seconds value passed to --benchmark_min_time: `" << value
<< "`. Expected --benchmark_min_time=<float>x.";

ret.tag = BenchTimeType::TIME;
ret.time = min_time;

return ret;
}

BenchmarkRunner::BenchmarkRunner(
const benchmark::internal::BenchmarkInstance& b_,
BenchmarkReporter::PerFamilyRunReports* reports_for_family_)
: b(b_),
reports_for_family(reports_for_family_),
min_time(!IsZero(b.min_time()) ? b.min_time() : FLAGS_benchmark_min_time),
parsed_benchtime_flag(ParseBenchMinTime(FLAGS_benchmark_min_time)),
min_time(ComputeMinTime(b_, parsed_benchtime_flag)),
min_warmup_time((!IsZero(b.min_time()) && b.min_warmup_time() > 0.0)
? b.min_warmup_time()
: FLAGS_benchmark_min_warmup_time),
warmup_done(!(min_warmup_time > 0.0)),
repeats(b.repetitions() != 0 ? b.repetitions()
: FLAGS_benchmark_repetitions),
has_explicit_iteration_count(b.iterations() != 0),
has_explicit_iteration_count(b.iterations() != 0 ||
parsed_benchtime_flag.tag ==
BenchTimeType::ITERS),
pool(b.threads() - 1),
iters(has_explicit_iteration_count ? b.iterations() : 1),
iters(has_explicit_iteration_count
? ComputeIters(b_, parsed_benchtime_flag)
: 1),
perf_counters_measurement(StrSplit(FLAGS_benchmark_perf_counters, ',')),
perf_counters_measurement_ptr(perf_counters_measurement.IsValid()
? &perf_counters_measurement
Expand Down
20 changes: 19 additions & 1 deletion src/benchmark_runner.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

namespace benchmark {

BM_DECLARE_double(benchmark_min_time);
BM_DECLARE_string(benchmark_min_time);
BM_DECLARE_double(benchmark_min_warmup_time);
BM_DECLARE_int32(benchmark_repetitions);
BM_DECLARE_bool(benchmark_report_aggregates_only);
Expand All @@ -44,6 +44,17 @@ struct RunResults {
bool file_report_aggregates_only = false;
};

struct BENCHMARK_EXPORT BenchTimeType {
enum { ITERS, TIME } tag;
union {
IterationCount iters;
double time;
};
};

BENCHMARK_EXPORT
BenchTimeType ParseBenchMinTime(const std::string& value);

class BenchmarkRunner {
public:
BenchmarkRunner(const benchmark::internal::BenchmarkInstance& b_,
Expand All @@ -63,12 +74,19 @@ class BenchmarkRunner {
return reports_for_family;
}

double GetMinTime() const { return min_time; }

bool HasExplicitIters() const { return has_explicit_iteration_count; }

IterationCount GetIters() const { return iters; }

private:
RunResults run_results;

const benchmark::internal::BenchmarkInstance& b;
BenchmarkReporter::PerFamilyRunReports* reports_for_family;

BenchTimeType parsed_benchtime_flag;
const double min_time;
const double min_warmup_time;
bool warmup_done;
Expand Down
22 changes: 11 additions & 11 deletions test/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,14 @@ PER_SRC_COPTS = {
"donotoptimize_test.cc": ["-O3"],
}

TEST_ARGS = ["--benchmark_min_time=0.01"]
TEST_ARGS = ["--benchmark_min_time=0.01s"]
dmah42 marked this conversation as resolved.
Show resolved Hide resolved

PER_SRC_TEST_ARGS = ({
PER_SRC_TEST_ARGS = {
"user_counters_tabular_test.cc": ["--benchmark_counters_tabular=true"],
"repetitions_test.cc": [" --benchmark_repetitions=3"],
"spec_arg_test.cc" : ["--benchmark_filter=BM_NotChosen"],
"spec_arg_verbosity_test.cc" : ["--v=42"],
})
"spec_arg_test.cc": ["--benchmark_filter=BM_NotChosen"],
"spec_arg_verbosity_test.cc": ["--v=42"],
}

cc_library(
name = "output_test_helper",
Expand All @@ -58,14 +58,14 @@ cc_library(
copts = select({
"//:windows": [],
"//conditions:default": TEST_COPTS,
}) + PER_SRC_COPTS.get(test_src, []) ,
}) + PER_SRC_COPTS.get(test_src, []),
deps = [
":output_test_helper",
"//:benchmark",
"//:benchmark_internal_headers",
"@com_google_googletest//:gtest",
"@com_google_googletest//:gtest_main",
]
],
# FIXME: Add support for assembly tests to bazel.
# See Issue #556
# https://github.com/google/benchmark/issues/556
Expand All @@ -85,17 +85,17 @@ cc_test(
size = "small",
srcs = ["cxx03_test.cc"],
copts = TEST_COPTS + ["-std=c++03"],
target_compatible_with = select({
"//:windows": ["@platforms//:incompatible"],
"//conditions:default": [],
}),
deps = [
":output_test_helper",
"//:benchmark",
"//:benchmark_internal_headers",
"@com_google_googletest//:gtest",
"@com_google_googletest//:gtest_main",
],
target_compatible_with = select({
"//:windows": ["@platforms//:incompatible"],
"//conditions:default": [],
})
)

cc_test(
Expand Down
Loading