1 // Copyright © 2022 Intel Corporation
2 //
3 // SPDX-License-Identifier: Apache-2.0
4 //
5
6 // Custom harness to run performance tests
7 extern crate test_infra;
8
9 mod performance_tests;
10
11 use std::process::Command;
12 use std::sync::mpsc::channel;
13 use std::sync::Arc;
14 use std::time::Duration;
15 use std::{env, fmt, thread};
16
17 use clap::{Arg, ArgAction, Command as ClapCommand};
18 use performance_tests::*;
19 use serde::{Deserialize, Serialize};
20 use test_infra::FioOps;
21 use thiserror::Error;
22
23 #[derive(Error, Debug)]
24 enum Error {
25 #[error("Error: test timed-out")]
26 TestTimeout,
27 #[error("Error: test failed")]
28 TestFailed,
29 }
30
31 #[derive(Deserialize, Serialize)]
32 pub struct PerformanceTestResult {
33 name: String,
34 mean: f64,
35 std_dev: f64,
36 max: f64,
37 min: f64,
38 }
39
40 #[derive(Deserialize, Serialize)]
41 pub struct MetricsReport {
42 pub git_human_readable: String,
43 pub git_revision: String,
44 pub git_commit_date: String,
45 pub date: String,
46 pub results: Vec<PerformanceTestResult>,
47 }
48
49 impl Default for MetricsReport {
default() -> Self50 fn default() -> Self {
51 let mut git_human_readable = "".to_string();
52 if let Ok(git_out) = Command::new("git").args(["describe", "--dirty"]).output() {
53 if git_out.status.success() {
54 git_human_readable = String::from_utf8(git_out.stdout)
55 .unwrap()
56 .trim()
57 .to_string();
58 } else {
59 eprintln!(
60 "Error generating human readable git reference: {}",
61 String::from_utf8(git_out.stderr).unwrap()
62 );
63 }
64 }
65
66 let mut git_revision = "".to_string();
67 if let Ok(git_out) = Command::new("git").args(["rev-parse", "HEAD"]).output() {
68 if git_out.status.success() {
69 git_revision = String::from_utf8(git_out.stdout)
70 .unwrap()
71 .trim()
72 .to_string();
73 } else {
74 eprintln!(
75 "Error generating git reference: {}",
76 String::from_utf8(git_out.stderr).unwrap()
77 );
78 }
79 }
80
81 let mut git_commit_date = "".to_string();
82 if let Ok(git_out) = Command::new("git")
83 .args(["show", "-s", "--format=%cd"])
84 .output()
85 {
86 if git_out.status.success() {
87 git_commit_date = String::from_utf8(git_out.stdout)
88 .unwrap()
89 .trim()
90 .to_string();
91 } else {
92 eprintln!(
93 "Error generating git commit date: {}",
94 String::from_utf8(git_out.stderr).unwrap()
95 );
96 }
97 }
98
99 MetricsReport {
100 git_human_readable,
101 git_revision,
102 git_commit_date,
103 date: date(),
104 results: Vec::new(),
105 }
106 }
107 }
108
109 #[derive(Default)]
110 pub struct PerformanceTestOverrides {
111 test_iterations: Option<u32>,
112 test_timeout: Option<u32>,
113 }
114
115 impl fmt::Display for PerformanceTestOverrides {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result116 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
117 if let Some(test_iterations) = self.test_iterations {
118 write!(f, "test_iterations = {test_iterations}, ")?;
119 }
120 if let Some(test_timeout) = self.test_timeout {
121 write!(f, "test_timeout = {test_timeout}")?;
122 }
123
124 Ok(())
125 }
126 }
127
128 #[derive(Clone)]
129 pub struct PerformanceTestControl {
130 test_timeout: u32,
131 test_iterations: u32,
132 num_queues: Option<u32>,
133 queue_size: Option<u32>,
134 net_control: Option<(bool, bool)>, // First bool is for RX(true)/TX(false), second bool is for bandwidth or PPS
135 fio_control: Option<(FioOps, bool)>, // Second parameter controls whether we want bandwidth or IOPS
136 num_boot_vcpus: Option<u8>,
137 }
138
139 impl fmt::Display for PerformanceTestControl {
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result140 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
141 let mut output = format!(
142 "test_timeout = {}s, test_iterations = {}",
143 self.test_timeout, self.test_iterations
144 );
145 if let Some(o) = self.num_queues {
146 output = format!("{output}, num_queues = {o}");
147 }
148 if let Some(o) = self.queue_size {
149 output = format!("{output}, queue_size = {o}");
150 }
151 if let Some(o) = self.net_control {
152 let (rx, bw) = o;
153 output = format!("{output}, rx = {rx}, bandwidth = {bw}");
154 }
155 if let Some(o) = &self.fio_control {
156 let (ops, bw) = o;
157 output = format!("{output}, fio_ops = {ops}, bandwidth = {bw}");
158 }
159
160 write!(f, "{output}")
161 }
162 }
163
164 impl PerformanceTestControl {
default() -> Self165 const fn default() -> Self {
166 Self {
167 test_timeout: 10,
168 test_iterations: 5,
169 num_queues: None,
170 queue_size: None,
171 net_control: None,
172 fio_control: None,
173 num_boot_vcpus: Some(1),
174 }
175 }
176 }
177
178 /// A performance test should finish within the a certain time-out and
179 /// return a performance metrics number (including the average number and
180 /// standard deviation)
181 struct PerformanceTest {
182 pub name: &'static str,
183 pub func_ptr: fn(&PerformanceTestControl) -> f64,
184 pub control: PerformanceTestControl,
185 unit_adjuster: fn(f64) -> f64,
186 }
187
188 impl PerformanceTest {
run(&self, overrides: &PerformanceTestOverrides) -> PerformanceTestResult189 pub fn run(&self, overrides: &PerformanceTestOverrides) -> PerformanceTestResult {
190 let mut metrics = Vec::new();
191 for _ in 0..overrides
192 .test_iterations
193 .unwrap_or(self.control.test_iterations)
194 {
195 // update the timeout in control if passed explicitly and run testcase with it
196 if let Some(test_timeout) = overrides.test_timeout {
197 let mut control: PerformanceTestControl = self.control.clone();
198 control.test_timeout = test_timeout;
199 metrics.push((self.func_ptr)(&control));
200 } else {
201 metrics.push((self.func_ptr)(&self.control));
202 }
203 }
204
205 let mean = (self.unit_adjuster)(mean(&metrics).unwrap());
206 let std_dev = (self.unit_adjuster)(std_deviation(&metrics).unwrap());
207 let max = (self.unit_adjuster)(metrics.clone().into_iter().reduce(f64::max).unwrap());
208 let min = (self.unit_adjuster)(metrics.clone().into_iter().reduce(f64::min).unwrap());
209
210 PerformanceTestResult {
211 name: self.name.to_string(),
212 mean,
213 std_dev,
214 max,
215 min,
216 }
217 }
218
219 // Calculate the timeout for each test
220 // Note: To cover the setup/cleanup time, 20s is added for each iteration of the test
calc_timeout(&self, test_iterations: &Option<u32>, test_timeout: &Option<u32>) -> u64221 pub fn calc_timeout(&self, test_iterations: &Option<u32>, test_timeout: &Option<u32>) -> u64 {
222 ((test_timeout.unwrap_or(self.control.test_timeout) + 20)
223 * test_iterations.unwrap_or(self.control.test_iterations)) as u64
224 }
225 }
226
mean(data: &[f64]) -> Option<f64>227 fn mean(data: &[f64]) -> Option<f64> {
228 let count = data.len();
229
230 if count > 0 {
231 Some(data.iter().sum::<f64>() / count as f64)
232 } else {
233 None
234 }
235 }
236
std_deviation(data: &[f64]) -> Option<f64>237 fn std_deviation(data: &[f64]) -> Option<f64> {
238 let count = data.len();
239
240 if count > 0 {
241 let mean = mean(data).unwrap();
242 let variance = data
243 .iter()
244 .map(|value| {
245 let diff = mean - *value;
246 diff * diff
247 })
248 .sum::<f64>()
249 / count as f64;
250
251 Some(variance.sqrt())
252 } else {
253 None
254 }
255 }
256
257 mod adjuster {
identity(v: f64) -> f64258 pub fn identity(v: f64) -> f64 {
259 v
260 }
261
s_to_ms(v: f64) -> f64262 pub fn s_to_ms(v: f64) -> f64 {
263 v * 1000.0
264 }
265
bps_to_gbps(v: f64) -> f64266 pub fn bps_to_gbps(v: f64) -> f64 {
267 v / (1_000_000_000_f64)
268 }
269
270 #[allow(non_snake_case)]
Bps_to_MiBps(v: f64) -> f64271 pub fn Bps_to_MiBps(v: f64) -> f64 {
272 v / (1 << 20) as f64
273 }
274 }
275
276 const TEST_LIST: [PerformanceTest; 30] = [
277 PerformanceTest {
278 name: "boot_time_ms",
279 func_ptr: performance_boot_time,
280 control: PerformanceTestControl {
281 test_timeout: 2,
282 test_iterations: 10,
283 ..PerformanceTestControl::default()
284 },
285 unit_adjuster: adjuster::s_to_ms,
286 },
287 PerformanceTest {
288 name: "boot_time_pmem_ms",
289 func_ptr: performance_boot_time_pmem,
290 control: PerformanceTestControl {
291 test_timeout: 2,
292 test_iterations: 10,
293 ..PerformanceTestControl::default()
294 },
295 unit_adjuster: adjuster::s_to_ms,
296 },
297 PerformanceTest {
298 name: "boot_time_16_vcpus_ms",
299 func_ptr: performance_boot_time,
300 control: PerformanceTestControl {
301 test_timeout: 2,
302 test_iterations: 10,
303 num_boot_vcpus: Some(16),
304 ..PerformanceTestControl::default()
305 },
306 unit_adjuster: adjuster::s_to_ms,
307 },
308 PerformanceTest {
309 name: "restore_latency_time_ms",
310 func_ptr: performance_restore_latency,
311 control: PerformanceTestControl {
312 test_timeout: 2,
313 test_iterations: 10,
314 ..PerformanceTestControl::default()
315 },
316 unit_adjuster: adjuster::identity,
317 },
318 PerformanceTest {
319 name: "boot_time_16_vcpus_pmem_ms",
320 func_ptr: performance_boot_time_pmem,
321 control: PerformanceTestControl {
322 test_timeout: 2,
323 test_iterations: 10,
324 num_boot_vcpus: Some(16),
325 ..PerformanceTestControl::default()
326 },
327 unit_adjuster: adjuster::s_to_ms,
328 },
329 PerformanceTest {
330 name: "virtio_net_latency_us",
331 func_ptr: performance_net_latency,
332 control: PerformanceTestControl {
333 num_queues: Some(2),
334 queue_size: Some(256),
335 ..PerformanceTestControl::default()
336 },
337 unit_adjuster: adjuster::identity,
338 },
339 PerformanceTest {
340 name: "virtio_net_throughput_single_queue_rx_gbps",
341 func_ptr: performance_net_throughput,
342 control: PerformanceTestControl {
343 num_queues: Some(2),
344 queue_size: Some(256),
345 net_control: Some((true, true)),
346 ..PerformanceTestControl::default()
347 },
348 unit_adjuster: adjuster::bps_to_gbps,
349 },
350 PerformanceTest {
351 name: "virtio_net_throughput_single_queue_tx_gbps",
352 func_ptr: performance_net_throughput,
353 control: PerformanceTestControl {
354 num_queues: Some(2),
355 queue_size: Some(256),
356 net_control: Some((false, true)),
357 ..PerformanceTestControl::default()
358 },
359 unit_adjuster: adjuster::bps_to_gbps,
360 },
361 PerformanceTest {
362 name: "virtio_net_throughput_multi_queue_rx_gbps",
363 func_ptr: performance_net_throughput,
364 control: PerformanceTestControl {
365 num_queues: Some(4),
366 queue_size: Some(256),
367 net_control: Some((true, true)),
368 ..PerformanceTestControl::default()
369 },
370 unit_adjuster: adjuster::bps_to_gbps,
371 },
372 PerformanceTest {
373 name: "virtio_net_throughput_multi_queue_tx_gbps",
374 func_ptr: performance_net_throughput,
375 control: PerformanceTestControl {
376 num_queues: Some(4),
377 queue_size: Some(256),
378 net_control: Some((false, true)),
379 ..PerformanceTestControl::default()
380 },
381 unit_adjuster: adjuster::bps_to_gbps,
382 },
383 PerformanceTest {
384 name: "virtio_net_throughput_single_queue_rx_pps",
385 func_ptr: performance_net_throughput,
386 control: PerformanceTestControl {
387 num_queues: Some(2),
388 queue_size: Some(256),
389 net_control: Some((true, false)),
390 ..PerformanceTestControl::default()
391 },
392 unit_adjuster: adjuster::identity,
393 },
394 PerformanceTest {
395 name: "virtio_net_throughput_single_queue_tx_pps",
396 func_ptr: performance_net_throughput,
397 control: PerformanceTestControl {
398 num_queues: Some(2),
399 queue_size: Some(256),
400 net_control: Some((false, false)),
401 ..PerformanceTestControl::default()
402 },
403 unit_adjuster: adjuster::identity,
404 },
405 PerformanceTest {
406 name: "virtio_net_throughput_multi_queue_rx_pps",
407 func_ptr: performance_net_throughput,
408 control: PerformanceTestControl {
409 num_queues: Some(4),
410 queue_size: Some(256),
411 net_control: Some((true, false)),
412 ..PerformanceTestControl::default()
413 },
414 unit_adjuster: adjuster::identity,
415 },
416 PerformanceTest {
417 name: "virtio_net_throughput_multi_queue_tx_pps",
418 func_ptr: performance_net_throughput,
419 control: PerformanceTestControl {
420 num_queues: Some(4),
421 queue_size: Some(256),
422 net_control: Some((false, false)),
423 ..PerformanceTestControl::default()
424 },
425 unit_adjuster: adjuster::identity,
426 },
427 PerformanceTest {
428 name: "block_read_MiBps",
429 func_ptr: performance_block_io,
430 control: PerformanceTestControl {
431 num_queues: Some(1),
432 queue_size: Some(128),
433 fio_control: Some((FioOps::Read, true)),
434 ..PerformanceTestControl::default()
435 },
436 unit_adjuster: adjuster::Bps_to_MiBps,
437 },
438 PerformanceTest {
439 name: "block_write_MiBps",
440 func_ptr: performance_block_io,
441 control: PerformanceTestControl {
442 num_queues: Some(1),
443 queue_size: Some(128),
444 fio_control: Some((FioOps::Write, true)),
445 ..PerformanceTestControl::default()
446 },
447 unit_adjuster: adjuster::Bps_to_MiBps,
448 },
449 PerformanceTest {
450 name: "block_random_read_MiBps",
451 func_ptr: performance_block_io,
452 control: PerformanceTestControl {
453 num_queues: Some(1),
454 queue_size: Some(128),
455 fio_control: Some((FioOps::RandomRead, true)),
456 ..PerformanceTestControl::default()
457 },
458 unit_adjuster: adjuster::Bps_to_MiBps,
459 },
460 PerformanceTest {
461 name: "block_random_write_MiBps",
462 func_ptr: performance_block_io,
463 control: PerformanceTestControl {
464 num_queues: Some(1),
465 queue_size: Some(128),
466 fio_control: Some((FioOps::RandomWrite, true)),
467 ..PerformanceTestControl::default()
468 },
469 unit_adjuster: adjuster::Bps_to_MiBps,
470 },
471 PerformanceTest {
472 name: "block_multi_queue_read_MiBps",
473 func_ptr: performance_block_io,
474 control: PerformanceTestControl {
475 num_queues: Some(2),
476 queue_size: Some(128),
477 fio_control: Some((FioOps::Read, true)),
478 ..PerformanceTestControl::default()
479 },
480 unit_adjuster: adjuster::Bps_to_MiBps,
481 },
482 PerformanceTest {
483 name: "block_multi_queue_write_MiBps",
484 func_ptr: performance_block_io,
485 control: PerformanceTestControl {
486 num_queues: Some(2),
487 queue_size: Some(128),
488 fio_control: Some((FioOps::Write, true)),
489 ..PerformanceTestControl::default()
490 },
491 unit_adjuster: adjuster::Bps_to_MiBps,
492 },
493 PerformanceTest {
494 name: "block_multi_queue_random_read_MiBps",
495 func_ptr: performance_block_io,
496 control: PerformanceTestControl {
497 num_queues: Some(2),
498 queue_size: Some(128),
499 fio_control: Some((FioOps::RandomRead, true)),
500 ..PerformanceTestControl::default()
501 },
502 unit_adjuster: adjuster::Bps_to_MiBps,
503 },
504 PerformanceTest {
505 name: "block_multi_queue_random_write_MiBps",
506 func_ptr: performance_block_io,
507 control: PerformanceTestControl {
508 num_queues: Some(2),
509 queue_size: Some(128),
510 fio_control: Some((FioOps::RandomWrite, true)),
511 ..PerformanceTestControl::default()
512 },
513 unit_adjuster: adjuster::Bps_to_MiBps,
514 },
515 PerformanceTest {
516 name: "block_read_IOPS",
517 func_ptr: performance_block_io,
518 control: PerformanceTestControl {
519 num_queues: Some(1),
520 queue_size: Some(128),
521 fio_control: Some((FioOps::Read, false)),
522 ..PerformanceTestControl::default()
523 },
524 unit_adjuster: adjuster::identity,
525 },
526 PerformanceTest {
527 name: "block_write_IOPS",
528 func_ptr: performance_block_io,
529 control: PerformanceTestControl {
530 num_queues: Some(1),
531 queue_size: Some(128),
532 fio_control: Some((FioOps::Write, false)),
533 ..PerformanceTestControl::default()
534 },
535 unit_adjuster: adjuster::identity,
536 },
537 PerformanceTest {
538 name: "block_random_read_IOPS",
539 func_ptr: performance_block_io,
540 control: PerformanceTestControl {
541 num_queues: Some(1),
542 queue_size: Some(128),
543 fio_control: Some((FioOps::RandomRead, false)),
544 ..PerformanceTestControl::default()
545 },
546 unit_adjuster: adjuster::identity,
547 },
548 PerformanceTest {
549 name: "block_random_write_IOPS",
550 func_ptr: performance_block_io,
551 control: PerformanceTestControl {
552 num_queues: Some(1),
553 queue_size: Some(128),
554 fio_control: Some((FioOps::RandomWrite, false)),
555 ..PerformanceTestControl::default()
556 },
557 unit_adjuster: adjuster::identity,
558 },
559 PerformanceTest {
560 name: "block_multi_queue_read_IOPS",
561 func_ptr: performance_block_io,
562 control: PerformanceTestControl {
563 num_queues: Some(2),
564 queue_size: Some(128),
565 fio_control: Some((FioOps::Read, false)),
566 ..PerformanceTestControl::default()
567 },
568 unit_adjuster: adjuster::identity,
569 },
570 PerformanceTest {
571 name: "block_multi_queue_write_IOPS",
572 func_ptr: performance_block_io,
573 control: PerformanceTestControl {
574 num_queues: Some(2),
575 queue_size: Some(128),
576 fio_control: Some((FioOps::Write, false)),
577 ..PerformanceTestControl::default()
578 },
579 unit_adjuster: adjuster::identity,
580 },
581 PerformanceTest {
582 name: "block_multi_queue_random_read_IOPS",
583 func_ptr: performance_block_io,
584 control: PerformanceTestControl {
585 num_queues: Some(2),
586 queue_size: Some(128),
587 fio_control: Some((FioOps::RandomRead, false)),
588 ..PerformanceTestControl::default()
589 },
590 unit_adjuster: adjuster::identity,
591 },
592 PerformanceTest {
593 name: "block_multi_queue_random_write_IOPS",
594 func_ptr: performance_block_io,
595 control: PerformanceTestControl {
596 num_queues: Some(2),
597 queue_size: Some(128),
598 fio_control: Some((FioOps::RandomWrite, false)),
599 ..PerformanceTestControl::default()
600 },
601 unit_adjuster: adjuster::identity,
602 },
603 ];
604
run_test_with_timeout( test: &'static PerformanceTest, overrides: &Arc<PerformanceTestOverrides>, ) -> Result<PerformanceTestResult, Error>605 fn run_test_with_timeout(
606 test: &'static PerformanceTest,
607 overrides: &Arc<PerformanceTestOverrides>,
608 ) -> Result<PerformanceTestResult, Error> {
609 let (sender, receiver) = channel::<Result<PerformanceTestResult, Error>>();
610 let test_iterations = overrides.test_iterations;
611 let test_timeout = overrides.test_timeout;
612 let overrides = overrides.clone();
613 thread::spawn(move || {
614 println!(
615 "Test '{}' running .. (control: {}, overrides: {})",
616 test.name, test.control, overrides
617 );
618
619 let output = match std::panic::catch_unwind(|| test.run(&overrides)) {
620 Ok(test_result) => {
621 println!(
622 "Test '{}' .. ok: mean = {}, std_dev = {}",
623 test_result.name, test_result.mean, test_result.std_dev
624 );
625 Ok(test_result)
626 }
627 Err(_) => Err(Error::TestFailed),
628 };
629
630 let _ = sender.send(output);
631 });
632
633 // Todo: Need to cleanup/kill all hanging child processes
634 let test_timeout = test.calc_timeout(&test_iterations, &test_timeout);
635 receiver
636 .recv_timeout(Duration::from_secs(test_timeout))
637 .map_err(|_| {
638 eprintln!(
639 "[Error] Test '{}' time-out after {} seconds",
640 test.name, test_timeout
641 );
642 Error::TestTimeout
643 })?
644 }
645
date() -> String646 fn date() -> String {
647 let output = test_infra::exec_host_command_output("date");
648 String::from_utf8_lossy(&output.stdout).trim().to_string()
649 }
650
main()651 fn main() {
652 let cmd_arguments = ClapCommand::new("performance-metrics")
653 .version(env!("CARGO_PKG_VERSION"))
654 .author(env!("CARGO_PKG_AUTHORS"))
655 .about("Generate the performance metrics data for Cloud Hypervisor")
656 .arg(
657 Arg::new("test-filter")
658 .long("test-filter")
659 .help("Filter metrics tests to run based on provided keywords")
660 .num_args(1)
661 .required(false),
662 )
663 .arg(
664 Arg::new("list-tests")
665 .long("list-tests")
666 .help("Print the list of available metrics tests")
667 .num_args(0)
668 .action(ArgAction::SetTrue)
669 .required(false),
670 )
671 .arg(
672 Arg::new("report-file")
673 .long("report-file")
674 .help("Report file. Standard error is used if not specified")
675 .num_args(1),
676 )
677 .arg(
678 Arg::new("iterations")
679 .long("iterations")
680 .help("Override number of test iterations")
681 .num_args(1),
682 )
683 .arg(
684 Arg::new("timeout")
685 .long("timeout")
686 .help("Override test timeout, Ex. --timeout 5")
687 .num_args(1),
688 )
689 .get_matches();
690
691 // It seems that the tool (ethr) used for testing the virtio-net latency
692 // is not stable on AArch64, and therefore the latency test is currently
693 // skipped on AArch64.
694 let test_list: Vec<&PerformanceTest> = TEST_LIST
695 .iter()
696 .filter(|t| !(cfg!(target_arch = "aarch64") && t.name == "virtio_net_latency_us"))
697 .collect();
698
699 if cmd_arguments.get_flag("list-tests") {
700 for test in test_list.iter() {
701 println!("\"{}\" ({})", test.name, test.control);
702 }
703
704 return;
705 }
706
707 let test_filter = match cmd_arguments.get_many::<String>("test-filter") {
708 Some(s) => s.collect(),
709 None => Vec::new(),
710 };
711
712 // Run performance tests sequentially and report results (in both readable/json format)
713 let mut metrics_report: MetricsReport = Default::default();
714
715 init_tests();
716
717 let overrides = Arc::new(PerformanceTestOverrides {
718 test_iterations: cmd_arguments
719 .get_one::<String>("iterations")
720 .map(|s| s.parse())
721 .transpose()
722 .unwrap_or_default(),
723 test_timeout: cmd_arguments
724 .get_one::<String>("timeout")
725 .map(|s| s.parse())
726 .transpose()
727 .unwrap_or_default(),
728 });
729
730 for test in test_list.iter() {
731 if test_filter.is_empty() || test_filter.iter().any(|&s| test.name.contains(s)) {
732 match run_test_with_timeout(test, &overrides) {
733 Ok(r) => {
734 metrics_report.results.push(r);
735 }
736 Err(e) => {
737 eprintln!("Aborting test due to error: '{e:?}'");
738 std::process::exit(1);
739 }
740 };
741 }
742 }
743
744 cleanup_tests();
745
746 let mut report_file: Box<dyn std::io::Write + Send> =
747 if let Some(file) = cmd_arguments.get_one::<String>("report-file") {
748 Box::new(
749 std::fs::File::create(std::path::Path::new(file))
750 .map_err(|e| {
751 eprintln!("Error opening report file: {file}: {e}");
752 std::process::exit(1);
753 })
754 .unwrap(),
755 )
756 } else {
757 Box::new(std::io::stdout())
758 };
759
760 report_file
761 .write_all(
762 serde_json::to_string_pretty(&metrics_report)
763 .unwrap()
764 .as_bytes(),
765 )
766 .map_err(|e| {
767 eprintln!("Error writing report file: {e}");
768 std::process::exit(1);
769 })
770 .unwrap();
771 }
772