Sequoia
Loading...
Searching...
No Matches
PerformanceTestCore.hpp
Go to the documentation of this file.
1
2// Copyright Oliver J. Rosten 2020. //
3// Distributed under the GNU GENERAL PUBLIC LICENSE, Version 3.0. //
4// (See accompanying file LICENSE.md or copy at //
5// https://www.gnu.org/licenses/gpl-3.0.en.html) //
7
8#pragma once
9
17
18#include <chrono>
19#include <random>
20#include <future>
21#include <thread>
22
23namespace sequoia::testing
24{
25 template<std::invocable Task>
26 [[nodiscard]]
27 std::chrono::duration<double> profile(Task task)
28 {
29 const timer t{};
30 task();
31
32 return t.time_elapsed();
33 }
34
77 template<test_mode Mode, std::invocable F, std::invocable S>
78 bool check_relative_performance(std::string_view description, test_logger<Mode>& logger, F fast, S slow, const double minSpeedUp, const double maxSpeedUp, const std::size_t trials, const double num_sds, const std::size_t maxAttempts)
79 {
80 if((minSpeedUp <= 1) || (maxSpeedUp <= 1))
81 throw std::logic_error{"Relative performance test requires speed-up factors > 1"};
82
83 if(minSpeedUp > maxSpeedUp)
84 throw std::logic_error{"maxSpeedUp must be >= minSpeedUp"};
85
86 if(num_sds <=1)
87 throw std::logic_error{"Number of standard deviations is required to be > 1"};
88
89 if(!maxAttempts)
90 throw std::logic_error{"Number of attempts is required to be > 0"};
91
92 if(trials < 5)
93 throw std::logic_error{"Number of trials is required to be > 4"};
94
95 using namespace std::chrono;
96 using namespace maths;
97
98 std::string summary{};
99 std::size_t remainingAttempts{maxAttempts};
100 bool passed{};
101
102 auto timer{
103 [](auto task, std::vector<double>& timings){
104 timings.push_back(profile(task).count());
105 }
106 };
107
108 while(remainingAttempts > 0)
109 {
110 const auto adjustedTrials{trials*(maxAttempts - remainingAttempts + 1)};
111
112 std::vector<double> fastData, slowData;
113 fastData.reserve(adjustedTrials);
114 slowData.reserve(adjustedTrials);
115
116 std::random_device generator;
117 for(std::size_t i{}; i < adjustedTrials; ++i)
118 {
119 std::uniform_real_distribution<double> distribution{0.0, 1.0};
120 const bool fastFirst{(distribution(generator) < 0.5)};
121
122 if(fastFirst)
123 {
124 timer(fast, fastData);
125 timer(slow, slowData);
126 }
127 else
128 {
129 timer(slow, slowData);
130 timer(fast, fastData);
131 }
132 }
133
134 auto compute_stats{
135 [](auto first, auto last) {
136 const auto data{sample_standard_deviation(first, last)};
137 return std::make_pair(data.first.value(), data.second.value());
138 }
139 };
140
141 std::ranges::sort(fastData);
142 std::ranges::sort(slowData);
143
144 const auto [sig_f, m_f]{compute_stats(fastData.cbegin()+1, fastData.cend()-1)};
145 const auto [sig_s, m_s]{compute_stats(slowData.cbegin()+1, slowData.cend()-1)};
146
147 if(m_f + sig_f < m_s - sig_s)
148 {
149 if(sig_f >= sig_s)
150 {
151 passed = (minSpeedUp * m_f <= (m_s + num_sds * sig_s))
152 && (maxSpeedUp * m_f >= (m_s - num_sds * sig_s));
153 }
154 else
155 {
156 passed = (m_s / maxSpeedUp <= (m_f + num_sds * sig_f))
157 && (m_s / minSpeedUp >= (m_f - num_sds * sig_f));
158 }
159 }
160 else
161 {
162 passed = false;
163 }
164
165 auto stats{
166 [num_sds](std::string_view prefix, const auto mean, const auto sig){
167
168 std::ostringstream message{};
169 message << mean << "s" << " +- " << num_sds << " * " << sig << "s";
170
171 return std::string{prefix}.append(" Task duration: ").append(message.str());
172 }
173 };
174
175 auto summarizer{
176 [m_f{m_f},m_s{m_s},minSpeedUp,maxSpeedUp](){
177 std::ostringstream message{};
178 message << " [" << m_s / m_f << "; (" << minSpeedUp << ", " << maxSpeedUp << ")]";
179
180 return message.str();
181 }
182 };
183
184 summary = append_lines(stats("Fast", m_f, sig_f), stats("Slow", m_s, sig_s)).append(summarizer());
185
186 if((test_logger<Mode>::mode == test_mode::false_negative) ? !passed : passed)
187 {
188 break;
189 }
190
191 --remainingAttempts;
192 }
193
194 sentinel<Mode> sentry{logger, append_lines(description, summary)};
195 sentry.log_performance_check();
196
197 if(!passed)
198 {
199 sentry.log_performance_failure("");
200 }
201
202 return passed;
203 }
204
205 template<class T, class Period>
206 [[nodiscard]]
207 std::chrono::duration<T, Period> calibrate(std::chrono::duration<T, Period> target)
208 {
209 using namespace std::chrono;
210
211 std::array<double, 7> timings{};
212 for (auto& t : timings)
213 {
214 t = profile([target]() { std::this_thread::sleep_for(target); }).count();
215 }
216
217 std::ranges::sort(timings);
218 const auto [sig_f, m_f] {maths::sample_standard_deviation(timings.cbegin() + 1, timings.cend() - 1)};
219 if (sig_f && m_f)
220 {
221 if ((m_f.value() - sig_f.value()) > duration_cast<duration<double>>(target).count())
222 {
223 constexpr auto inverse{Period::den / Period::num};
224 return std::chrono::duration<T, Period>{static_cast<T>(std::ceil(inverse* (m_f.value() + 5* sig_f.value())))};
225 }
226 }
227
228 return target;
229 }
230
234 template<test_mode Mode>
236 {
237 public:
238 constexpr static test_mode mode{Mode};
239
240 performance_extender() = default;
241
242 template<class Self, std::invocable F, std::invocable S>
243 bool check_relative_performance(this Self& self, const reporter& description, F fast, S slow, const double minSpeedUp, const double maxSpeedUp, const std::size_t trials=5, const double num_sds=4)
244 {
245 return testing::check_relative_performance(self.report(description), self.m_Logger, fast, slow, minSpeedUp, maxSpeedUp, trials, num_sds, 3);
246 }
247 protected:
248 ~performance_extender() = default;
249
250 performance_extender(performance_extender&&) noexcept = default;
251 performance_extender& operator=(performance_extender&&) noexcept = default;
252 };
253
254 [[nodiscard]]
255 std::string_view postprocess(std::string_view testOutput, std::string_view referenceOutput);
256
259 template<test_mode Mode>
260 class basic_performance_test : public basic_test<Mode, performance_extender<Mode>>
261 {
262 public:
264 using duration = typename base_type::duration;
265
266 using base_type::base_type;
267
268 [[nodiscard]]
269 log_summary summarize(duration delta) const;
270 protected:
271 ~basic_performance_test() = default;
272
274 basic_performance_test& operator=(basic_performance_test&&) noexcept = default;
275 };
276
281
282 template<concrete_test T>
283 requires std::is_base_of_v<basic_performance_test<T::mode>, T>
284 struct is_parallelizable<T> : std::false_type {};
285}
Contains utilities for automatically editing certain files as part of the test creation process.
bool check_relative_performance(std::string_view description, test_logger< Mode > &logger, F fast, S slow, const double minSpeedUp, const double maxSpeedUp, const std::size_t trials, const double num_sds, const std::size_t maxAttempts)
Function for comparing the performance of a fast task to a slow task.
Definition: PerformanceTestCore.hpp:78
Utilities for checking regular semantics.
Tools for statistical analysis.
class template from which all concrete tests should derive
Definition: PerformanceTestCore.hpp:261
class template from which all concrete tests should derive.
Definition: FreeTestCore.hpp:144
Summaries data generated by the logger, for the purposes of reporting.
Definition: TestLogger.hpp:299
class template for plugging into the checker class template
Definition: PerformanceTestCore.hpp:236
Definition: Output.hpp:186
Definition: TestLogger.hpp:277
Definition: TestLogger.hpp:183
Definition: FreeTestCore.hpp:31
Definition: FreeTestCore.hpp:241