tiny_bench/
benching.rs

1use crate::output::analysis::criterion::calculate_iterations;
2use crate::output::{
3    fallback_to_anonymous_on_invalid_label, fmt_num, fmt_time, wrap_bold_green,
4    wrap_high_intensity_white, Output,
5};
6use crate::{black_box, BenchmarkConfig};
7use std::time::{Duration, Instant};
8
9/// Will run the closure and print statistics from the benchmarking to stdout.
10/// Will persist results under the anonymous label which is shared, making comparisons impossible
11/// if running more than one (different) benchmark on the same project, ie. benching two different
12/// functions
13/// ```no_run
14/// use tiny_bench::bench;
15/// bench(|| {
16///     // Some code that should be benched
17/// })
18/// ```
19pub fn bench<T, F: FnMut() -> T>(closure: F) {
20    bench_with_configuration(&BenchmarkConfig::default(), closure);
21}
22
23/// Will run the closure with a label, running with a label enables comparisons for subsequent runs.
24/// ```no_run
25/// use tiny_bench::bench_labeled;
26/// bench_labeled("my_benchmark", || {
27///     // Some code that should be benched
28/// })
29/// ```
30pub fn bench_labeled<T, F: FnMut() -> T>(label: &'static str, closure: F) {
31    bench_with_configuration_labeled(label, &BenchmarkConfig::default(), closure);
32}
33
34/// Will run the benchmark with the supplied configuration
35/// ```no_run
36/// use std::time::Duration;
37/// use tiny_bench::{bench_with_configuration, BenchmarkConfig};
38/// bench_with_configuration(&BenchmarkConfig {
39///     measurement_time: Duration::from_secs(10),
40///     ..BenchmarkConfig::default()
41/// }, || {
42///     // Some code that should be benched
43/// })
44/// ```
45pub fn bench_with_configuration<T, F: FnMut() -> T>(cfg: &BenchmarkConfig, closure: F) {
46    bench_with_configuration_labeled("anonymous", cfg, closure);
47}
48
49/// Will run the benchmark with the supplied configuration and a label
50/// ```no_run
51/// use tiny_bench::{bench_with_configuration_labeled, BenchmarkConfig};
52/// bench_with_configuration_labeled("my_benchmark", &BenchmarkConfig::default(), || {
53///     // Some code that should be benched
54/// })
55/// ```
56pub fn bench_with_configuration_labeled<T, F: FnMut() -> T>(
57    label: &'static str,
58    cfg: &BenchmarkConfig,
59    mut closure: F,
60) {
61    let label = fallback_to_anonymous_on_invalid_label(label);
62    println!(
63        "{} warming up for {}",
64        wrap_bold_green(label),
65        wrap_high_intensity_white(&fmt_time(cfg.warm_up_time.as_nanos() as f64))
66    );
67    let wu = run_warm_up(&mut closure, cfg.warm_up_time);
68    let mean_execution_time = wu.elapsed.as_nanos() as f64 / wu.iterations as f64;
69    let sample_size = cfg.num_samples as u64;
70    let (iters, total_iters) =
71        calculate_iters_and_total_iters(cfg, mean_execution_time, sample_size);
72
73    println!(
74        "{} mean warm up execution time {} running {} iterations",
75        wrap_bold_green(label),
76        wrap_high_intensity_white(&fmt_time(mean_execution_time)),
77        wrap_high_intensity_white(&fmt_num(total_iters as f64))
78    );
79    let sampling_data = run(iters, closure);
80    if cfg.dump_results_to_disk {
81        crate::output::ComparedStdout.dump_sampling_data(label, &sampling_data, cfg, total_iters);
82    } else {
83        crate::output::SimpleStdout.dump_sampling_data(label, &sampling_data, cfg, total_iters);
84    }
85}
86
87fn calculate_iters_and_total_iters(
88    cfg: &BenchmarkConfig,
89    mut mean_execution_time_ns: f64,
90    sample_size: u64,
91) -> (Vec<u64>, u128) {
92    if let Some(max_it) = cfg.max_iterations {
93        (vec![max_it], u128::from(max_it))
94    } else {
95        // This can be arbitrarily small, causing an absurd amount of iterations.
96        // Raise it to 1 nano -> max 5B iterations
97        mean_execution_time_ns = mean_execution_time_ns.max(1.0);
98        let iters = calculate_iterations(mean_execution_time_ns, sample_size, cfg.measurement_time);
99        let mut total_iters = 0u128;
100        for count in iters.iter().copied() {
101            total_iters = total_iters.saturating_add(u128::from(count));
102        }
103        (iters, total_iters)
104    }
105}
106
107fn run<T, F: FnMut() -> T>(sample_sizes: Vec<u64>, mut closure: F) -> SamplingData {
108    let times = sample_sizes
109        .iter()
110        .copied()
111        .map(|it_count| {
112            let start = Instant::now();
113            for _ in 0..it_count {
114                black_box(closure());
115            }
116            start.elapsed().as_nanos()
117        })
118        .collect();
119    SamplingData {
120        samples: sample_sizes,
121        times,
122    }
123}
124
125/// Fitting if some setup for the benchmark is required, and that setup should not be timed.
126/// The setup will be run prior to each benchmarking run.
127/// ```no_run
128/// use tiny_bench::{bench_with_configuration_labeled, BenchmarkConfig};
129/// bench_with_configuration_labeled("my_benchmark", &BenchmarkConfig::default(), || {
130///     // Some code that should be benched
131/// })
132/// ```
133pub fn bench_with_setup<T, R, F: FnMut(R) -> T, S: FnMut() -> R>(setup: S, closure: F) {
134    bench_with_setup_configuration_labeled(
135        "anonymous",
136        &BenchmarkConfig::default(),
137        setup,
138        closure,
139    );
140}
141
142/// Run bench with setup and a label
143/// ```no_run
144/// use std::time::Duration;
145/// use tiny_bench::{bench_with_setup_labeled, BenchmarkConfig};
146/// bench_with_setup_labeled("my_benchmark", || std::thread::sleep(Duration::from_micros(5)), |_| {
147///     // Some code that should be benched
148/// })
149/// ```
150pub fn bench_with_setup_labeled<T, R, F: FnMut(R) -> T, S: FnMut() -> R>(
151    label: &'static str,
152    setup: S,
153    closure: F,
154) {
155    bench_with_setup_configuration_labeled(label, &BenchmarkConfig::default(), setup, closure);
156}
157
158/// Run bench with setup and configuration
159/// ```no_run
160/// use std::time::Duration;
161/// use tiny_bench::{bench_with_setup_configuration, BenchmarkConfig};
162/// bench_with_setup_configuration(&BenchmarkConfig::default(), || std::thread::sleep(Duration::from_micros(5)), |_| {
163///     // Some code that should be benched
164/// })
165/// ```
166pub fn bench_with_setup_configuration<T, R, F: FnMut(R) -> T, S: FnMut() -> R>(
167    cfg: &BenchmarkConfig,
168    setup: S,
169    closure: F,
170) {
171    bench_with_setup_configuration_labeled("anonymous", cfg, setup, closure);
172}
173
174/// Run bench with setup, configuration, and a label
175/// ```no_run
176/// use std::time::Duration;
177/// use tiny_bench::{bench_with_setup_configuration_labeled, BenchmarkConfig};
178/// bench_with_setup_configuration_labeled("my_benchmark", &BenchmarkConfig::default(), || std::thread::sleep(Duration::from_micros(5)), |_| {
179///     // Some code that should be benched
180/// })
181/// ```
182pub fn bench_with_setup_configuration_labeled<T, R, F: FnMut(R) -> T, S: FnMut() -> R>(
183    label: &'static str,
184    cfg: &BenchmarkConfig,
185    mut setup: S,
186    mut closure: F,
187) {
188    let label = fallback_to_anonymous_on_invalid_label(label);
189    let mut wu_routine = || {
190        let input = (setup)();
191        (closure)(input);
192    };
193    println!(
194        "{} warming up for {}",
195        wrap_bold_green(label),
196        wrap_high_intensity_white(&fmt_time(cfg.warm_up_time.as_nanos() as f64))
197    );
198    let wu = run_warm_up(&mut wu_routine, cfg.warm_up_time);
199    let mean_execution_time = wu.elapsed.as_nanos() as f64 / wu.iterations as f64;
200
201    let sample_size = cfg.num_samples as u64;
202
203    let (iters, total_iters) =
204        calculate_iters_and_total_iters(cfg, mean_execution_time, sample_size);
205    println!(
206        "{} mean warm up execution time {} running {} iterations",
207        wrap_bold_green(label),
208        wrap_high_intensity_white(&fmt_time(mean_execution_time)),
209        wrap_high_intensity_white(&fmt_num(total_iters as f64))
210    );
211    let sampling_data = run_with_setup(iters, setup, closure);
212    if cfg.dump_results_to_disk {
213        crate::output::ComparedStdout.dump_sampling_data(label, &sampling_data, cfg, total_iters);
214    } else {
215        crate::output::SimpleStdout.dump_sampling_data(label, &sampling_data, cfg, total_iters);
216    }
217}
218
219fn run_with_setup<T, R, F: FnMut(R) -> T, S: FnMut() -> R>(
220    sample_sizes: Vec<u64>,
221    mut setup: S,
222    mut closure: F,
223) -> SamplingData {
224    const BATCH_SIZE: usize = 10_000;
225    let times = sample_sizes
226        .iter()
227        .copied()
228        .map(|it_count| {
229            if it_count < BATCH_SIZE as u64 {
230                let inputs = (0..it_count).map(|_| setup()).collect::<Vec<_>>();
231                let start = Instant::now();
232                for i in inputs {
233                    black_box(closure(i));
234                }
235                start.elapsed().as_nanos()
236            } else {
237                let mut elapsed = Duration::ZERO;
238                let mut batch = Vec::with_capacity(BATCH_SIZE);
239                for _ in 0..it_count {
240                    batch.push(setup());
241                    if batch.len() >= BATCH_SIZE {
242                        let start = Instant::now();
243                        for i in batch.drain(..) {
244                            black_box(closure(i));
245                        }
246                        elapsed += start.elapsed();
247                    }
248                }
249                if !batch.is_empty() {
250                    let start = Instant::now();
251                    for i in batch {
252                        black_box(closure(i));
253                    }
254                    elapsed += start.elapsed();
255                }
256                elapsed.as_nanos()
257            }
258        })
259        .collect();
260    SamplingData {
261        samples: sample_sizes,
262        times,
263    }
264}
265
266fn run_warm_up<T, F: FnMut() -> T>(closure: &mut F, warmup_time: Duration) -> WarmupResults {
267    let mut elapsed = Duration::ZERO;
268    let mut iterations = 0u128;
269    let mut run_iterations = 1u64;
270    loop {
271        let start = Instant::now();
272        for _ in 0..run_iterations {
273            closure();
274        }
275        elapsed += start.elapsed();
276        iterations += u128::from(run_iterations);
277        run_iterations = run_iterations.wrapping_mul(2);
278        if elapsed >= warmup_time {
279            return WarmupResults {
280                iterations,
281                elapsed,
282            };
283        }
284    }
285}
286
287#[derive(Debug)]
288struct WarmupResults {
289    iterations: u128,
290    elapsed: Duration,
291}
292
293#[derive(Debug)]
294#[cfg(feature = "bench")]
295#[cfg_attr(test, derive(Eq, PartialEq))]
296pub(crate) struct SamplingData {
297    pub(crate) samples: Vec<u64>,
298    pub(crate) times: Vec<u128>,
299}
300
301#[cfg(test)]
302mod tests {
303    use super::*;
304    use std::time::Duration;
305
306    #[test]
307    fn benches() {
308        let closure = || {
309            let mut sum = 0;
310            for _ in 0..100 {
311                sum += black_box(1);
312            }
313            assert_eq!(black_box(100), sum);
314        };
315        let cfg = BenchmarkConfig {
316            measurement_time: Duration::from_millis(10),
317            warm_up_time: Duration::from_millis(5),
318            ..BenchmarkConfig::default()
319        };
320        bench_with_configuration(&cfg, closure);
321    }
322}