aboutsummaryrefslogtreecommitdiff
path: root/src/bench/bench.cpp
blob: 012057e792d2754bb6c3adf247ed77c296b53ad4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
// Copyright (c) 2015-2020 The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.

#include <bench/bench.h>

#include <chainparams.h>
#include <test/util/setup_common.h>
#include <validation.h>

#include <regex>

const std::function<void(const std::string&)> G_TEST_LOG_FUN{};

namespace {

void GenerateTemplateResults(const std::vector<ankerl::nanobench::Result>& benchmarkResults, const std::string& filename, const char* tpl)
{
    if (benchmarkResults.empty() || filename.empty()) {
        // nothing to write, bail out
        return;
    }
    std::ofstream fout(filename);
    if (fout.is_open()) {
        ankerl::nanobench::render(tpl, benchmarkResults, fout);
    } else {
        std::cout << "Could write to file '" << filename << "'" << std::endl;
    }

    std::cout << "Created '" << filename << "'" << std::endl;
}

} // namespace

benchmark::BenchRunner::BenchmarkMap& benchmark::BenchRunner::benchmarks()
{
    static std::map<std::string, BenchFunction> benchmarks_map;
    return benchmarks_map;
}

benchmark::BenchRunner::BenchRunner(std::string name, benchmark::BenchFunction func)
{
    benchmarks().insert(std::make_pair(name, func));
}

void benchmark::BenchRunner::RunAll(const Args& args)
{
    std::regex reFilter(args.regex_filter);
    std::smatch baseMatch;

    std::vector<ankerl::nanobench::Result> benchmarkResults;
    for (const auto& p : benchmarks()) {
        if (!std::regex_match(p.first, baseMatch, reFilter)) {
            continue;
        }

        if (args.is_list_only) {
            std::cout << p.first << std::endl;
            continue;
        }

        Bench bench;
        bench.name(p.first);
        if (args.asymptote.empty()) {
            p.second(bench);
        } else {
            for (auto n : args.asymptote) {
                bench.complexityN(n);
                p.second(bench);
            }
            std::cout << bench.complexityBigO() << std::endl;
        }

        if (!bench.results().empty()) {
            benchmarkResults.push_back(bench.results().back());
        }
    }

    GenerateTemplateResults(benchmarkResults, args.output_csv, "# Benchmark, evals, iterations, total, min, max, median\n"
                                                               "{{#result}}{{name}}, {{epochs}}, {{average(iterations)}}, {{sumProduct(iterations, elapsed)}}, {{minimum(elapsed)}}, {{maximum(elapsed)}}, {{median(elapsed)}}\n"
                                                               "{{/result}}");
    GenerateTemplateResults(benchmarkResults, args.output_json, ankerl::nanobench::templates::json());
}