1 // Copyright © 2022 Intel Corporation 2 // 3 // SPDX-License-Identifier: Apache-2.0 4 // 5 6 // Custom harness to run performance tests 7 extern crate test_infra; 8 9 mod performance_tests; 10 11 use clap::{Arg, ArgAction, Command as ClapCommand}; 12 use performance_tests::*; 13 use serde::{Deserialize, Serialize}; 14 use std::{ 15 env, fmt, 16 process::Command, 17 sync::{mpsc::channel, Arc}, 18 thread, 19 time::Duration, 20 }; 21 use test_infra::FioOps; 22 use thiserror::Error; 23 24 #[derive(Error, Debug)] 25 enum Error { 26 #[error("Error: test timed-out")] 27 TestTimeout, 28 #[error("Error: test failed")] 29 TestFailed, 30 } 31 32 #[derive(Deserialize, Serialize)] 33 pub struct PerformanceTestResult { 34 name: String, 35 mean: f64, 36 std_dev: f64, 37 max: f64, 38 min: f64, 39 } 40 41 #[derive(Deserialize, Serialize)] 42 pub struct MetricsReport { 43 pub git_human_readable: String, 44 pub git_revision: String, 45 pub git_commit_date: String, 46 pub date: String, 47 pub results: Vec<PerformanceTestResult>, 48 } 49 50 impl Default for MetricsReport { 51 fn default() -> Self { 52 let mut git_human_readable = "".to_string(); 53 if let Ok(git_out) = Command::new("git").args(["describe", "--dirty"]).output() { 54 if git_out.status.success() { 55 git_human_readable = String::from_utf8(git_out.stdout) 56 .unwrap() 57 .trim() 58 .to_string(); 59 } else { 60 eprintln!( 61 "Error generating human readable git reference: {}", 62 String::from_utf8(git_out.stderr).unwrap() 63 ); 64 } 65 } 66 67 let mut git_revision = "".to_string(); 68 if let Ok(git_out) = Command::new("git").args(["rev-parse", "HEAD"]).output() { 69 if git_out.status.success() { 70 git_revision = String::from_utf8(git_out.stdout) 71 .unwrap() 72 .trim() 73 .to_string(); 74 } else { 75 eprintln!( 76 "Error generating git reference: {}", 77 String::from_utf8(git_out.stderr).unwrap() 78 ); 79 } 80 } 81 82 let mut git_commit_date = "".to_string(); 83 if let Ok(git_out) = Command::new("git") 84 .args(["show", "-s", "--format=%cd"]) 85 .output() 86 { 87 if git_out.status.success() { 88 git_commit_date = String::from_utf8(git_out.stdout) 89 .unwrap() 90 .trim() 91 .to_string(); 92 } else { 93 eprintln!( 94 "Error generating git commit date: {}", 95 String::from_utf8(git_out.stderr).unwrap() 96 ); 97 } 98 } 99 100 MetricsReport { 101 git_human_readable, 102 git_revision, 103 git_commit_date, 104 date: date(), 105 results: Vec::new(), 106 } 107 } 108 } 109 110 #[derive(Default)] 111 pub struct PerformanceTestOverrides { 112 test_iterations: Option<u32>, 113 test_timeout: Option<u32>, 114 } 115 116 impl fmt::Display for PerformanceTestOverrides { 117 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 118 if let Some(test_iterations) = self.test_iterations { 119 write!(f, "test_iterations = {test_iterations}, ")?; 120 } 121 if let Some(test_timeout) = self.test_timeout { 122 write!(f, "test_timeout = {test_timeout}")?; 123 } 124 125 Ok(()) 126 } 127 } 128 129 #[derive(Clone)] 130 pub struct PerformanceTestControl { 131 test_timeout: u32, 132 test_iterations: u32, 133 num_queues: Option<u32>, 134 queue_size: Option<u32>, 135 net_control: Option<(bool, bool)>, // First bool is for RX(true)/TX(false), second bool is for bandwidth or PPS 136 fio_control: Option<(FioOps, bool)>, // Second parameter controls whether we want bandwidth or IOPS 137 num_boot_vcpus: Option<u8>, 138 } 139 140 impl fmt::Display for PerformanceTestControl { 141 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 142 let mut output = format!( 143 "test_timeout = {}s, test_iterations = {}", 144 self.test_timeout, self.test_iterations 145 ); 146 if let Some(o) = self.num_queues { 147 output = format!("{output}, num_queues = {o}"); 148 } 149 if let Some(o) = self.queue_size { 150 output = format!("{output}, queue_size = {o}"); 151 } 152 if let Some(o) = self.net_control { 153 let (rx, bw) = o; 154 output = format!("{output}, rx = {rx}, bandwidth = {bw}"); 155 } 156 if let Some(o) = &self.fio_control { 157 let (ops, bw) = o; 158 output = format!("{output}, fio_ops = {ops}, bandwidth = {bw}"); 159 } 160 161 write!(f, "{output}") 162 } 163 } 164 165 impl PerformanceTestControl { 166 const fn default() -> Self { 167 Self { 168 test_timeout: 10, 169 test_iterations: 5, 170 num_queues: None, 171 queue_size: None, 172 net_control: None, 173 fio_control: None, 174 num_boot_vcpus: Some(1), 175 } 176 } 177 } 178 179 /// A performance test should finish within the a certain time-out and 180 /// return a performance metrics number (including the average number and 181 /// standard deviation) 182 struct PerformanceTest { 183 pub name: &'static str, 184 pub func_ptr: fn(&PerformanceTestControl) -> f64, 185 pub control: PerformanceTestControl, 186 unit_adjuster: fn(f64) -> f64, 187 } 188 189 impl PerformanceTest { 190 pub fn run(&self, overrides: &PerformanceTestOverrides) -> PerformanceTestResult { 191 let mut metrics = Vec::new(); 192 for _ in 0..overrides 193 .test_iterations 194 .unwrap_or(self.control.test_iterations) 195 { 196 // update the timeout in control if passed explicitly and run testcase with it 197 if let Some(test_timeout) = overrides.test_timeout { 198 let mut control: PerformanceTestControl = self.control.clone(); 199 control.test_timeout = test_timeout; 200 metrics.push((self.func_ptr)(&control)); 201 } else { 202 metrics.push((self.func_ptr)(&self.control)); 203 } 204 } 205 206 let mean = (self.unit_adjuster)(mean(&metrics).unwrap()); 207 let std_dev = (self.unit_adjuster)(std_deviation(&metrics).unwrap()); 208 let max = (self.unit_adjuster)(metrics.clone().into_iter().reduce(f64::max).unwrap()); 209 let min = (self.unit_adjuster)(metrics.clone().into_iter().reduce(f64::min).unwrap()); 210 211 PerformanceTestResult { 212 name: self.name.to_string(), 213 mean, 214 std_dev, 215 max, 216 min, 217 } 218 } 219 220 // Calculate the timeout for each test 221 // Note: To cover the setup/cleanup time, 20s is added for each iteration of the test 222 pub fn calc_timeout(&self, test_iterations: &Option<u32>, test_timeout: &Option<u32>) -> u64 { 223 ((test_timeout.unwrap_or(self.control.test_timeout) + 20) 224 * test_iterations.unwrap_or(self.control.test_iterations)) as u64 225 } 226 } 227 228 fn mean(data: &[f64]) -> Option<f64> { 229 let count = data.len(); 230 231 if count > 0 { 232 Some(data.iter().sum::<f64>() / count as f64) 233 } else { 234 None 235 } 236 } 237 238 fn std_deviation(data: &[f64]) -> Option<f64> { 239 let count = data.len(); 240 241 if count > 0 { 242 let mean = mean(data).unwrap(); 243 let variance = data 244 .iter() 245 .map(|value| { 246 let diff = mean - *value; 247 diff * diff 248 }) 249 .sum::<f64>() 250 / count as f64; 251 252 Some(variance.sqrt()) 253 } else { 254 None 255 } 256 } 257 258 mod adjuster { 259 pub fn identity(v: f64) -> f64 { 260 v 261 } 262 263 pub fn s_to_ms(v: f64) -> f64 { 264 v * 1000.0 265 } 266 267 pub fn bps_to_gbps(v: f64) -> f64 { 268 v / (1_000_000_000_f64) 269 } 270 271 #[allow(non_snake_case)] 272 pub fn Bps_to_MiBps(v: f64) -> f64 { 273 v / (1 << 20) as f64 274 } 275 } 276 277 const TEST_LIST: [PerformanceTest; 29] = [ 278 PerformanceTest { 279 name: "boot_time_ms", 280 func_ptr: performance_boot_time, 281 control: PerformanceTestControl { 282 test_timeout: 2, 283 test_iterations: 10, 284 ..PerformanceTestControl::default() 285 }, 286 unit_adjuster: adjuster::s_to_ms, 287 }, 288 PerformanceTest { 289 name: "boot_time_pmem_ms", 290 func_ptr: performance_boot_time_pmem, 291 control: PerformanceTestControl { 292 test_timeout: 2, 293 test_iterations: 10, 294 ..PerformanceTestControl::default() 295 }, 296 unit_adjuster: adjuster::s_to_ms, 297 }, 298 PerformanceTest { 299 name: "boot_time_16_vcpus_ms", 300 func_ptr: performance_boot_time, 301 control: PerformanceTestControl { 302 test_timeout: 2, 303 test_iterations: 10, 304 num_boot_vcpus: Some(16), 305 ..PerformanceTestControl::default() 306 }, 307 unit_adjuster: adjuster::s_to_ms, 308 }, 309 PerformanceTest { 310 name: "boot_time_16_vcpus_pmem_ms", 311 func_ptr: performance_boot_time_pmem, 312 control: PerformanceTestControl { 313 test_timeout: 2, 314 test_iterations: 10, 315 num_boot_vcpus: Some(16), 316 ..PerformanceTestControl::default() 317 }, 318 unit_adjuster: adjuster::s_to_ms, 319 }, 320 PerformanceTest { 321 name: "virtio_net_latency_us", 322 func_ptr: performance_net_latency, 323 control: PerformanceTestControl { 324 num_queues: Some(2), 325 queue_size: Some(256), 326 ..PerformanceTestControl::default() 327 }, 328 unit_adjuster: adjuster::identity, 329 }, 330 PerformanceTest { 331 name: "virtio_net_throughput_single_queue_rx_gbps", 332 func_ptr: performance_net_throughput, 333 control: PerformanceTestControl { 334 num_queues: Some(2), 335 queue_size: Some(256), 336 net_control: Some((true, true)), 337 ..PerformanceTestControl::default() 338 }, 339 unit_adjuster: adjuster::bps_to_gbps, 340 }, 341 PerformanceTest { 342 name: "virtio_net_throughput_single_queue_tx_gbps", 343 func_ptr: performance_net_throughput, 344 control: PerformanceTestControl { 345 num_queues: Some(2), 346 queue_size: Some(256), 347 net_control: Some((false, true)), 348 ..PerformanceTestControl::default() 349 }, 350 unit_adjuster: adjuster::bps_to_gbps, 351 }, 352 PerformanceTest { 353 name: "virtio_net_throughput_multi_queue_rx_gbps", 354 func_ptr: performance_net_throughput, 355 control: PerformanceTestControl { 356 num_queues: Some(4), 357 queue_size: Some(256), 358 net_control: Some((true, true)), 359 ..PerformanceTestControl::default() 360 }, 361 unit_adjuster: adjuster::bps_to_gbps, 362 }, 363 PerformanceTest { 364 name: "virtio_net_throughput_multi_queue_tx_gbps", 365 func_ptr: performance_net_throughput, 366 control: PerformanceTestControl { 367 num_queues: Some(4), 368 queue_size: Some(256), 369 net_control: Some((false, true)), 370 ..PerformanceTestControl::default() 371 }, 372 unit_adjuster: adjuster::bps_to_gbps, 373 }, 374 PerformanceTest { 375 name: "virtio_net_throughput_single_queue_rx_pps", 376 func_ptr: performance_net_throughput, 377 control: PerformanceTestControl { 378 num_queues: Some(2), 379 queue_size: Some(256), 380 net_control: Some((true, false)), 381 ..PerformanceTestControl::default() 382 }, 383 unit_adjuster: adjuster::identity, 384 }, 385 PerformanceTest { 386 name: "virtio_net_throughput_single_queue_tx_pps", 387 func_ptr: performance_net_throughput, 388 control: PerformanceTestControl { 389 num_queues: Some(2), 390 queue_size: Some(256), 391 net_control: Some((false, false)), 392 ..PerformanceTestControl::default() 393 }, 394 unit_adjuster: adjuster::identity, 395 }, 396 PerformanceTest { 397 name: "virtio_net_throughput_multi_queue_rx_pps", 398 func_ptr: performance_net_throughput, 399 control: PerformanceTestControl { 400 num_queues: Some(4), 401 queue_size: Some(256), 402 net_control: Some((true, false)), 403 ..PerformanceTestControl::default() 404 }, 405 unit_adjuster: adjuster::identity, 406 }, 407 PerformanceTest { 408 name: "virtio_net_throughput_multi_queue_tx_pps", 409 func_ptr: performance_net_throughput, 410 control: PerformanceTestControl { 411 num_queues: Some(4), 412 queue_size: Some(256), 413 net_control: Some((false, false)), 414 ..PerformanceTestControl::default() 415 }, 416 unit_adjuster: adjuster::identity, 417 }, 418 PerformanceTest { 419 name: "block_read_MiBps", 420 func_ptr: performance_block_io, 421 control: PerformanceTestControl { 422 num_queues: Some(1), 423 queue_size: Some(128), 424 fio_control: Some((FioOps::Read, true)), 425 ..PerformanceTestControl::default() 426 }, 427 unit_adjuster: adjuster::Bps_to_MiBps, 428 }, 429 PerformanceTest { 430 name: "block_write_MiBps", 431 func_ptr: performance_block_io, 432 control: PerformanceTestControl { 433 num_queues: Some(1), 434 queue_size: Some(128), 435 fio_control: Some((FioOps::Write, true)), 436 ..PerformanceTestControl::default() 437 }, 438 unit_adjuster: adjuster::Bps_to_MiBps, 439 }, 440 PerformanceTest { 441 name: "block_random_read_MiBps", 442 func_ptr: performance_block_io, 443 control: PerformanceTestControl { 444 num_queues: Some(1), 445 queue_size: Some(128), 446 fio_control: Some((FioOps::RandomRead, true)), 447 ..PerformanceTestControl::default() 448 }, 449 unit_adjuster: adjuster::Bps_to_MiBps, 450 }, 451 PerformanceTest { 452 name: "block_random_write_MiBps", 453 func_ptr: performance_block_io, 454 control: PerformanceTestControl { 455 num_queues: Some(1), 456 queue_size: Some(128), 457 fio_control: Some((FioOps::RandomWrite, true)), 458 ..PerformanceTestControl::default() 459 }, 460 unit_adjuster: adjuster::Bps_to_MiBps, 461 }, 462 PerformanceTest { 463 name: "block_multi_queue_read_MiBps", 464 func_ptr: performance_block_io, 465 control: PerformanceTestControl { 466 num_queues: Some(2), 467 queue_size: Some(128), 468 fio_control: Some((FioOps::Read, true)), 469 ..PerformanceTestControl::default() 470 }, 471 unit_adjuster: adjuster::Bps_to_MiBps, 472 }, 473 PerformanceTest { 474 name: "block_multi_queue_write_MiBps", 475 func_ptr: performance_block_io, 476 control: PerformanceTestControl { 477 num_queues: Some(2), 478 queue_size: Some(128), 479 fio_control: Some((FioOps::Write, true)), 480 ..PerformanceTestControl::default() 481 }, 482 unit_adjuster: adjuster::Bps_to_MiBps, 483 }, 484 PerformanceTest { 485 name: "block_multi_queue_random_read_MiBps", 486 func_ptr: performance_block_io, 487 control: PerformanceTestControl { 488 num_queues: Some(2), 489 queue_size: Some(128), 490 fio_control: Some((FioOps::RandomRead, true)), 491 ..PerformanceTestControl::default() 492 }, 493 unit_adjuster: adjuster::Bps_to_MiBps, 494 }, 495 PerformanceTest { 496 name: "block_multi_queue_random_write_MiBps", 497 func_ptr: performance_block_io, 498 control: PerformanceTestControl { 499 num_queues: Some(2), 500 queue_size: Some(128), 501 fio_control: Some((FioOps::RandomWrite, true)), 502 ..PerformanceTestControl::default() 503 }, 504 unit_adjuster: adjuster::Bps_to_MiBps, 505 }, 506 PerformanceTest { 507 name: "block_read_IOPS", 508 func_ptr: performance_block_io, 509 control: PerformanceTestControl { 510 num_queues: Some(1), 511 queue_size: Some(128), 512 fio_control: Some((FioOps::Read, false)), 513 ..PerformanceTestControl::default() 514 }, 515 unit_adjuster: adjuster::identity, 516 }, 517 PerformanceTest { 518 name: "block_write_IOPS", 519 func_ptr: performance_block_io, 520 control: PerformanceTestControl { 521 num_queues: Some(1), 522 queue_size: Some(128), 523 fio_control: Some((FioOps::Write, false)), 524 ..PerformanceTestControl::default() 525 }, 526 unit_adjuster: adjuster::identity, 527 }, 528 PerformanceTest { 529 name: "block_random_read_IOPS", 530 func_ptr: performance_block_io, 531 control: PerformanceTestControl { 532 num_queues: Some(1), 533 queue_size: Some(128), 534 fio_control: Some((FioOps::RandomRead, false)), 535 ..PerformanceTestControl::default() 536 }, 537 unit_adjuster: adjuster::identity, 538 }, 539 PerformanceTest { 540 name: "block_random_write_IOPS", 541 func_ptr: performance_block_io, 542 control: PerformanceTestControl { 543 num_queues: Some(1), 544 queue_size: Some(128), 545 fio_control: Some((FioOps::RandomWrite, false)), 546 ..PerformanceTestControl::default() 547 }, 548 unit_adjuster: adjuster::identity, 549 }, 550 PerformanceTest { 551 name: "block_multi_queue_read_IOPS", 552 func_ptr: performance_block_io, 553 control: PerformanceTestControl { 554 num_queues: Some(2), 555 queue_size: Some(128), 556 fio_control: Some((FioOps::Read, false)), 557 ..PerformanceTestControl::default() 558 }, 559 unit_adjuster: adjuster::identity, 560 }, 561 PerformanceTest { 562 name: "block_multi_queue_write_IOPS", 563 func_ptr: performance_block_io, 564 control: PerformanceTestControl { 565 num_queues: Some(2), 566 queue_size: Some(128), 567 fio_control: Some((FioOps::Write, false)), 568 ..PerformanceTestControl::default() 569 }, 570 unit_adjuster: adjuster::identity, 571 }, 572 PerformanceTest { 573 name: "block_multi_queue_random_read_IOPS", 574 func_ptr: performance_block_io, 575 control: PerformanceTestControl { 576 num_queues: Some(2), 577 queue_size: Some(128), 578 fio_control: Some((FioOps::RandomRead, false)), 579 ..PerformanceTestControl::default() 580 }, 581 unit_adjuster: adjuster::identity, 582 }, 583 PerformanceTest { 584 name: "block_multi_queue_random_write_IOPS", 585 func_ptr: performance_block_io, 586 control: PerformanceTestControl { 587 num_queues: Some(2), 588 queue_size: Some(128), 589 fio_control: Some((FioOps::RandomWrite, false)), 590 ..PerformanceTestControl::default() 591 }, 592 unit_adjuster: adjuster::identity, 593 }, 594 ]; 595 596 fn run_test_with_timeout( 597 test: &'static PerformanceTest, 598 overrides: &Arc<PerformanceTestOverrides>, 599 ) -> Result<PerformanceTestResult, Error> { 600 let (sender, receiver) = channel::<Result<PerformanceTestResult, Error>>(); 601 let test_iterations = overrides.test_iterations; 602 let test_timeout = overrides.test_timeout; 603 let overrides = overrides.clone(); 604 thread::spawn(move || { 605 println!( 606 "Test '{}' running .. (control: {}, overrides: {})", 607 test.name, test.control, overrides 608 ); 609 610 let output = match std::panic::catch_unwind(|| test.run(&overrides)) { 611 Ok(test_result) => { 612 println!( 613 "Test '{}' .. ok: mean = {}, std_dev = {}", 614 test_result.name, test_result.mean, test_result.std_dev 615 ); 616 Ok(test_result) 617 } 618 Err(_) => Err(Error::TestFailed), 619 }; 620 621 let _ = sender.send(output); 622 }); 623 624 // Todo: Need to cleanup/kill all hanging child processes 625 let test_timeout = test.calc_timeout(&test_iterations, &test_timeout); 626 receiver 627 .recv_timeout(Duration::from_secs(test_timeout)) 628 .map_err(|_| { 629 eprintln!( 630 "[Error] Test '{}' time-out after {} seconds", 631 test.name, test_timeout 632 ); 633 Error::TestTimeout 634 })? 635 } 636 637 fn date() -> String { 638 let output = test_infra::exec_host_command_output("date"); 639 String::from_utf8_lossy(&output.stdout).trim().to_string() 640 } 641 642 fn main() { 643 let cmd_arguments = ClapCommand::new("performance-metrics") 644 .version(env!("CARGO_PKG_VERSION")) 645 .author(env!("CARGO_PKG_AUTHORS")) 646 .about("Generate the performance metrics data for Cloud Hypervisor") 647 .arg( 648 Arg::new("test-filter") 649 .long("test-filter") 650 .help("Filter metrics tests to run based on provided keywords") 651 .num_args(1) 652 .required(false), 653 ) 654 .arg( 655 Arg::new("list-tests") 656 .long("list-tests") 657 .help("Print the list of available metrics tests") 658 .num_args(0) 659 .action(ArgAction::SetTrue) 660 .required(false), 661 ) 662 .arg( 663 Arg::new("report-file") 664 .long("report-file") 665 .help("Report file. Standard error is used if not specified") 666 .num_args(1), 667 ) 668 .arg( 669 Arg::new("iterations") 670 .long("iterations") 671 .help("Override number of test iterations") 672 .num_args(1), 673 ) 674 .arg( 675 Arg::new("timeout") 676 .long("timeout") 677 .help("Override test timeout, Ex. --timeout 5") 678 .num_args(1), 679 ) 680 .get_matches(); 681 682 // It seems that the tool (ethr) used for testing the virtio-net latency 683 // is not stable on AArch64, and therefore the latency test is currently 684 // skipped on AArch64. 685 let test_list: Vec<&PerformanceTest> = TEST_LIST 686 .iter() 687 .filter(|t| !(cfg!(target_arch = "aarch64") && t.name == "virtio_net_latency_us")) 688 .collect(); 689 690 if cmd_arguments.get_flag("list-tests") { 691 for test in test_list.iter() { 692 println!("\"{}\" ({})", test.name, test.control); 693 } 694 695 return; 696 } 697 698 let test_filter = match cmd_arguments.get_many::<String>("test-filter") { 699 Some(s) => s.collect(), 700 None => Vec::new(), 701 }; 702 703 // Run performance tests sequentially and report results (in both readable/json format) 704 let mut metrics_report: MetricsReport = Default::default(); 705 706 init_tests(); 707 708 let overrides = Arc::new(PerformanceTestOverrides { 709 test_iterations: cmd_arguments 710 .get_one::<String>("iterations") 711 .map(|s| s.parse()) 712 .transpose() 713 .unwrap_or_default(), 714 test_timeout: cmd_arguments 715 .get_one::<String>("timeout") 716 .map(|s| s.parse()) 717 .transpose() 718 .unwrap_or_default(), 719 }); 720 721 for test in test_list.iter() { 722 if test_filter.is_empty() || test_filter.iter().any(|&s| test.name.contains(s)) { 723 match run_test_with_timeout(test, &overrides) { 724 Ok(r) => { 725 metrics_report.results.push(r); 726 } 727 Err(e) => { 728 eprintln!("Aborting test due to error: '{e:?}'"); 729 std::process::exit(1); 730 } 731 }; 732 } 733 } 734 735 cleanup_tests(); 736 737 let mut report_file: Box<dyn std::io::Write + Send> = 738 if let Some(file) = cmd_arguments.get_one::<String>("report-file") { 739 Box::new( 740 std::fs::File::create(std::path::Path::new(file)) 741 .map_err(|e| { 742 eprintln!("Error opening report file: {file}: {e}"); 743 std::process::exit(1); 744 }) 745 .unwrap(), 746 ) 747 } else { 748 Box::new(std::io::stdout()) 749 }; 750 751 report_file 752 .write_all( 753 serde_json::to_string_pretty(&metrics_report) 754 .unwrap() 755 .as_bytes(), 756 ) 757 .map_err(|e| { 758 eprintln!("Error writing report file: {e}"); 759 std::process::exit(1); 760 }) 761 .unwrap(); 762 } 763