aboutsummaryrefslogtreecommitdiff
path: root/src/bench/bench.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/bench/bench.cpp')
-rw-r--r--src/bench/bench.cpp140
1 files changed, 36 insertions, 104 deletions
diff --git a/src/bench/bench.cpp b/src/bench/bench.cpp
index 7b93ef688d..01466d0b6f 100644
--- a/src/bench/bench.cpp
+++ b/src/bench/bench.cpp
@@ -8,141 +8,73 @@
#include <test/util/setup_common.h>
#include <validation.h>
-#include <algorithm>
-#include <assert.h>
-#include <iomanip>
-#include <iostream>
-#include <numeric>
#include <regex>
const std::function<void(const std::string&)> G_TEST_LOG_FUN{};
-void benchmark::ConsolePrinter::header()
-{
- std::cout << "# Benchmark, evals, iterations, total, min, max, median" << std::endl;
-}
+namespace {
-void benchmark::ConsolePrinter::result(const State& state)
+void GenerateTemplateResults(const std::vector<ankerl::nanobench::Result>& benchmarkResults, const std::string& filename, const char* tpl)
{
- auto results = state.m_elapsed_results;
- std::sort(results.begin(), results.end());
-
- double total = state.m_num_iters * std::accumulate(results.begin(), results.end(), 0.0);
-
- double front = 0;
- double back = 0;
- double median = 0;
-
- if (!results.empty()) {
- front = results.front();
- back = results.back();
-
- size_t mid = results.size() / 2;
- median = results[mid];
- if (0 == results.size() % 2) {
- median = (results[mid] + results[mid + 1]) / 2;
- }
+ if (benchmarkResults.empty() || filename.empty()) {
+ // nothing to write, bail out
+ return;
}
-
- std::cout << std::setprecision(6);
- std::cout << state.m_name << ", " << state.m_num_evals << ", " << state.m_num_iters << ", " << total << ", " << front << ", " << back << ", " << median << std::endl;
-}
-
-void benchmark::ConsolePrinter::footer() {}
-benchmark::PlotlyPrinter::PlotlyPrinter(std::string plotly_url, int64_t width, int64_t height)
- : m_plotly_url(plotly_url), m_width(width), m_height(height)
-{
-}
-
-void benchmark::PlotlyPrinter::header()
-{
- std::cout << "<html><head>"
- << "<script src=\"" << m_plotly_url << "\"></script>"
- << "</head><body><div id=\"myDiv\" style=\"width:" << m_width << "px; height:" << m_height << "px\"></div>"
- << "<script> var data = ["
- << std::endl;
-}
-
-void benchmark::PlotlyPrinter::result(const State& state)
-{
- std::cout << "{ " << std::endl
- << " name: '" << state.m_name << "', " << std::endl
- << " y: [";
-
- const char* prefix = "";
- for (const auto& e : state.m_elapsed_results) {
- std::cout << prefix << std::setprecision(6) << e;
- prefix = ", ";
+ std::ofstream fout(filename);
+ if (fout.is_open()) {
+ ankerl::nanobench::render(tpl, benchmarkResults, fout);
+ } else {
+ std::cout << "Could write to file '" << filename << "'" << std::endl;
}
- std::cout << "]," << std::endl
- << " boxpoints: 'all', jitter: 0.3, pointpos: 0, type: 'box',"
- << std::endl
- << "}," << std::endl;
-}
-void benchmark::PlotlyPrinter::footer()
-{
- std::cout << "]; var layout = { showlegend: false, yaxis: { rangemode: 'tozero', autorange: true } };"
- << "Plotly.newPlot('myDiv', data, layout);"
- << "</script></body></html>";
+ std::cout << "Created '" << filename << "'" << std::endl;
}
+} // namespace
benchmark::BenchRunner::BenchmarkMap& benchmark::BenchRunner::benchmarks()
{
- static std::map<std::string, Bench> benchmarks_map;
+ static std::map<std::string, BenchFunction> benchmarks_map;
return benchmarks_map;
}
-benchmark::BenchRunner::BenchRunner(std::string name, benchmark::BenchFunction func, uint64_t num_iters_for_one_second)
+benchmark::BenchRunner::BenchRunner(std::string name, benchmark::BenchFunction func)
{
- benchmarks().insert(std::make_pair(name, Bench{func, num_iters_for_one_second}));
+ benchmarks().insert(std::make_pair(name, func));
}
-void benchmark::BenchRunner::RunAll(Printer& printer, uint64_t num_evals, double scaling, const std::string& filter, bool is_list_only)
+void benchmark::BenchRunner::RunAll(const Args& args)
{
- if (!std::ratio_less_equal<benchmark::clock::period, std::micro>::value) {
- std::cerr << "WARNING: Clock precision is worse than microsecond - benchmarks may be less accurate!\n";
- }
-#ifdef DEBUG
- std::cerr << "WARNING: This is a debug build - may result in slower benchmarks.\n";
-#endif
-
- std::regex reFilter(filter);
+ std::regex reFilter(args.regex_filter);
std::smatch baseMatch;
- printer.header();
-
+ std::vector<ankerl::nanobench::Result> benchmarkResults;
for (const auto& p : benchmarks()) {
if (!std::regex_match(p.first, baseMatch, reFilter)) {
continue;
}
- uint64_t num_iters = static_cast<uint64_t>(p.second.num_iters_for_one_second * scaling);
- if (0 == num_iters) {
- num_iters = 1;
- }
- State state(p.first, num_evals, num_iters, printer);
- if (!is_list_only) {
- p.second.func(state);
+ if (args.is_list_only) {
+ std::cout << p.first << std::endl;
+ continue;
}
- printer.result(state);
- }
-
- printer.footer();
-}
-
-bool benchmark::State::UpdateTimer(const benchmark::time_point current_time)
-{
- if (m_start_time != time_point()) {
- std::chrono::duration<double> diff = current_time - m_start_time;
- m_elapsed_results.push_back(diff.count() / m_num_iters);
- if (m_elapsed_results.size() == m_num_evals) {
- return false;
+ Bench bench;
+ bench.name(p.first);
+ if (args.asymptote.empty()) {
+ p.second(bench);
+ } else {
+ for (auto n : args.asymptote) {
+ bench.complexityN(n);
+ p.second(bench);
+ }
+ std::cout << bench.complexityBigO() << std::endl;
}
+ benchmarkResults.push_back(bench.results().back());
}
- m_num_iters_left = m_num_iters - 1;
- return true;
+ GenerateTemplateResults(benchmarkResults, args.output_csv, "# Benchmark, evals, iterations, total, min, max, median\n"
+ "{{#result}}{{name}}, {{epochs}}, {{average(iterations)}}, {{sumProduct(iterations, elapsed)}}, {{minimum(elapsed)}}, {{maximum(elapsed)}}, {{median(elapsed)}}\n"
+ "{{/result}}");
+ GenerateTemplateResults(benchmarkResults, args.output_json, ankerl::nanobench::templates::json());
}