1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
|
// Copyright (c) 2015-2022 The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <bench/bench.h>
#include <test/util/setup_common.h> // IWYU pragma: keep
#include <tinyformat.h>
#include <util/fs.h>
#include <util/string.h>
#include <chrono>
#include <compare>
#include <fstream>
#include <functional>
#include <iostream>
#include <map>
#include <ratio>
#include <regex>
#include <set>
#include <stdexcept>
#include <string>
#include <vector>
using namespace std::chrono_literals;
using util::Join;
const std::function<void(const std::string&)> G_TEST_LOG_FUN{};
const std::function<std::vector<const char*>()> G_TEST_COMMAND_LINE_ARGUMENTS{};
const std::function<std::string()> G_TEST_GET_FULL_NAME{};
namespace {
void GenerateTemplateResults(const std::vector<ankerl::nanobench::Result>& benchmarkResults, const fs::path& file, const char* tpl)
{
if (benchmarkResults.empty() || file.empty()) {
// nothing to write, bail out
return;
}
std::ofstream fout{file};
if (fout.is_open()) {
ankerl::nanobench::render(tpl, benchmarkResults, fout);
std::cout << "Created " << file << std::endl;
} else {
std::cout << "Could not write to file " << file << std::endl;
}
}
} // namespace
namespace benchmark {
// map a label to one or multiple priority levels
std::map<std::string, uint8_t> map_label_priority = {
{"high", PriorityLevel::HIGH},
{"low", PriorityLevel::LOW},
{"all", 0xff}
};
std::string ListPriorities()
{
using item_t = std::pair<std::string, uint8_t>;
auto sort_by_priority = [](item_t a, item_t b){ return a.second < b.second; };
std::set<item_t, decltype(sort_by_priority)> sorted_priorities(map_label_priority.begin(), map_label_priority.end(), sort_by_priority);
return Join(sorted_priorities, ',', [](const auto& entry){ return entry.first; });
}
uint8_t StringToPriority(const std::string& str)
{
auto it = map_label_priority.find(str);
if (it == map_label_priority.end()) throw std::runtime_error(strprintf("Unknown priority level %s", str));
return it->second;
}
BenchRunner::BenchmarkMap& BenchRunner::benchmarks()
{
static BenchmarkMap benchmarks_map;
return benchmarks_map;
}
BenchRunner::BenchRunner(std::string name, BenchFunction func, PriorityLevel level)
{
benchmarks().insert(std::make_pair(name, std::make_pair(func, level)));
}
void BenchRunner::RunAll(const Args& args)
{
std::regex reFilter(args.regex_filter);
std::smatch baseMatch;
if (args.sanity_check) {
std::cout << "Running with -sanity-check option, output is being suppressed as benchmark results will be useless." << std::endl;
}
std::vector<ankerl::nanobench::Result> benchmarkResults;
for (const auto& [name, bench_func] : benchmarks()) {
const auto& [func, priority_level] = bench_func;
if (!(priority_level & args.priority)) {
continue;
}
if (!std::regex_match(name, baseMatch, reFilter)) {
continue;
}
if (args.is_list_only) {
std::cout << name << std::endl;
continue;
}
Bench bench;
if (args.sanity_check) {
bench.epochs(1).epochIterations(1);
bench.output(nullptr);
}
bench.name(name);
if (args.min_time > 0ms) {
// convert to nanos before dividing to reduce rounding errors
std::chrono::nanoseconds min_time_ns = args.min_time;
bench.minEpochTime(min_time_ns / bench.epochs());
}
if (args.asymptote.empty()) {
func(bench);
} else {
for (auto n : args.asymptote) {
bench.complexityN(n);
func(bench);
}
std::cout << bench.complexityBigO() << std::endl;
}
if (!bench.results().empty()) {
benchmarkResults.push_back(bench.results().back());
}
}
GenerateTemplateResults(benchmarkResults, args.output_csv, "# Benchmark, evals, iterations, total, min, max, median\n"
"{{#result}}{{name}}, {{epochs}}, {{average(iterations)}}, {{sumProduct(iterations, elapsed)}}, {{minimum(elapsed)}}, {{maximum(elapsed)}}, {{median(elapsed)}}\n"
"{{/result}}");
GenerateTemplateResults(benchmarkResults, args.output_json, ankerl::nanobench::templates::json());
}
} // namespace benchmark
|