HEBench
test_harness/src/main.cpp
Go to the documentation of this file.
1 
2 // Copyright (C) 2021 Intel Corporation
3 // SPDX-License-Identifier: Apache-2.0
4 
5 #include <cassert>
6 #include <chrono>
7 #include <filesystem>
8 #include <iomanip>
9 #include <iostream>
10 #include <limits>
11 #include <ostream>
12 #include <sstream>
13 #include <string>
14 #include <thread>
15 #include <vector>
16 
17 #include "hebench/modules/args_parser/include/args_parser.h"
18 #include "hebench/modules/general/include/error.h"
19 #include "hebench/modules/general/include/hebench_math_utils.h"
20 #include "hebench/modules/general/include/hebench_utilities.h"
21 #include "hebench/modules/logging/include/logging.h"
22 
23 #include "hebench/dynamic_lib_load.h"
25 
26 #include "include/hebench_config.h"
27 #include "include/hebench_engine.h"
30 #include "include/hebench_version.h"
31 
32 // enforce floating point standard compatibility
33 static_assert(std::numeric_limits<float>::is_iec559, "Compiler type `float` does not comply with IEEE 754.");
34 static_assert(std::numeric_limits<double>::is_iec559, "Compiler type `double` does not comply with IEEE 754.");
35 static_assert(sizeof(float) == 4, "Compiler type `float` is not 32 bits.");
36 static_assert(sizeof(double) == 8, "Compiler type `double` is not 64 bits.");
37 
39 {
40  std::filesystem::path backend_lib_path;
41  std::filesystem::path config_file;
46  std::uint64_t random_seed;
47  std::size_t report_delay_ms;
48  std::filesystem::path report_root_path;
51 
52  static constexpr const char *DefaultConfigFile = "";
53  static constexpr std::uint64_t DefaultMinTestTime = 0;
54  static constexpr std::uint64_t DefaultSampleSize = 0;
55  static constexpr std::size_t DefaultReportDelay = 1000;
56  static constexpr const char *DefaultRootPath = ".";
57 
58  void initializeConfig(const hebench::ArgsParser &parser);
59  void showBenchmarkDefaults(std::ostream &os);
60  void showConfig(std::ostream &os) const;
61  static std::ostream &showVersion(std::ostream &os);
62 };
63 
64 void ProgramConfig::initializeConfig(const hebench::ArgsParser &parser)
65 {
66  std::string s_tmp;
67 
68  if (parser.hasArgument("--version"))
69  {
70  showVersion(std::cout);
71  throw hebench::ArgsParser::HelpShown("Version shown.");
72  } // end if
73 
74  parser.getValue<decltype(s_tmp)>(s_tmp, "--benchmark_config_file", DefaultConfigFile);
75  config_file = s_tmp;
76 
77  b_dump_config = parser.hasArgument("--dump_config");
78  if (b_dump_config && config_file.empty())
79  throw std::runtime_error("Dump default benchmark configuration file requested, but no filename given with \"--benchmark_config_file\" parameter.");
80 
81  parser.getValue<decltype(b_force_config)>(b_force_config, "--force_config", true);
82 
83  parser.getValue<decltype(s_tmp)>(s_tmp, "--backend_lib_path");
84  backend_lib_path = s_tmp;
85 
86  parser.getValue<decltype(b_validate_results)>(b_validate_results, "--enable_validation", true);
87 
88  parser.getValue<decltype(random_seed)>(random_seed, "--random_seed", std::chrono::system_clock::now().time_since_epoch().count());
89 
90  parser.getValue<decltype(report_delay_ms)>(report_delay_ms, "--report_delay", DefaultReportDelay);
91 
92  parser.getValue<decltype(s_tmp)>(s_tmp, "--report_root_path", DefaultRootPath);
93  report_root_path = s_tmp;
94  if (!std::filesystem::is_directory(report_root_path) || !std::filesystem::exists(report_root_path))
95  throw std::runtime_error("Specified directory for report output does not exists or is not accessible: " + report_root_path.string());
96  if (!std::filesystem::is_regular_file(backend_lib_path) || !std::filesystem::exists(backend_lib_path))
97  throw std::runtime_error("Specified backend lib does not exists or is not accessible: " + backend_lib_path.string());
98  if (std::filesystem::is_symlink(backend_lib_path))
99  throw std::runtime_error("Backend library error: symbolic links are not allowed as input arguments: " + backend_lib_path.string());
100  if ((std::filesystem::canonical(backend_lib_path).string()).substr(0, 5) == std::string("/tmp/"))
101  throw std::runtime_error("Backend library error: Cannot use files in /tmp/ as arguments: " + backend_lib_path.string());
102 
103  if (!b_dump_config && !config_file.empty())
104  {
105  // reading configuration file
106  if (!std::filesystem::is_regular_file(config_file) || !std::filesystem::exists(config_file))
107  throw std::runtime_error("Specified benchmark configuration file does not exists or is not accessible: " + config_file.string());
108  if (std::filesystem::is_symlink(config_file))
109  throw std::runtime_error("Config file path error: symbolic links are not allowed as input arguments: " + config_file.string());
110  if (std::filesystem::canonical(config_file).string().substr(0, 5) == std::string("/tmp/"))
111  throw std::runtime_error("Config file error: Cannot use files in /tmp/ as arguments: " + config_file.string());
112  }
113 
114  parser.getValue<decltype(b_show_run_overview)>(b_show_run_overview, "--run_overview", true);
115  parser.getValue<decltype(b_compile_reports)>(b_compile_reports, "--compile_reports", true);
116 
117  b_single_path_report = parser.hasArgument("--single_path_report");
118 }
119 
121 {
122  os << "Benchmark defaults:" << std::endl
123  << " Random seed: " << random_seed << std::endl;
124 }
125 
126 void ProgramConfig::showConfig(std::ostream &os) const
127 {
128  os << "Global Configuration:" << std::endl
129  << " Backend library: " << backend_lib_path << std::endl
130  //<< std::endl
131  << " ==================" << std::endl
132  << " Run type: ";
133  if (b_dump_config)
134  {
135  os << "Dumping configuration file!" << std::endl;
136  } // end of
137  else
138  {
139  os << "Benchmark Run." << std::endl
140  << " Validate results: " << (b_validate_results ? "Yes" : "No") << std::endl
141  << " Report delay (ms): " << report_delay_ms << std::endl
142  << " Report Root Path: " << report_root_path << std::endl
143  << " Compile reports: " << (b_compile_reports ? "Yes" : "No") << std::endl
144  << " Show run overview: " << (b_show_run_overview ? "Yes" : "No") << std::endl;
145  } // end if
146  os << " Run configuration file: ";
147  if (config_file.empty())
148  os << "(none)" << std::endl;
149  else
150  {
151  os << config_file << std::endl;
152  os << " Force configuration values: " << (b_force_config ? "Yes" : "No") << std::endl;
153  } // end else
154  os << " ==================" << std::endl;
155 }
156 
157 std::ostream &ProgramConfig::showVersion(std::ostream &os)
158 {
159  os << HEBENCH_TEST_HARNESS_APP_NAME << " v"
160  << HEBENCH_TEST_HARNESS_VERSION_MAJOR << "."
161  << HEBENCH_TEST_HARNESS_VERSION_MINOR << "."
162  << HEBENCH_TEST_HARNESS_VERSION_REVISION << "-"
163  << HEBENCH_TEST_HARNESS_VERSION_BUILD << std::endl
164  << std::endl
165  << "API Bridge version:" << std::endl
166  << " Required: " << HEBENCH_TEST_HARNESS_API_REQUIRED_VERSION_MAJOR << "."
167  << HEBENCH_TEST_HARNESS_API_REQUIRED_VERSION_MINOR << "."
168  << HEBENCH_TEST_HARNESS_API_MIN_REQUIRED_VERSION_REVISION << std::endl
169  << " Current: "
170  << HEBENCH_API_VERSION_MAJOR << "."
171  << HEBENCH_API_VERSION_MINOR << "."
172  << HEBENCH_API_VERSION_REVISION << "-"
173  << HEBENCH_API_VERSION_BUILD << std::endl;
174  return os;
175 }
176 
177 void initArgsParser(hebench::ArgsParser &parser, int argc, char **argv)
178 {
179  parser.addArgument("--backend_lib_path", "--backend", "-b", 1, "<path_to_shared_lib>",
180  " [REQUIRED] Path to backend shared library.\n"
181  " The library file must exist and be accessible for reading.");
182  parser.addArgument("--benchmark_config_file", "--config_file", "-c", 1, "<path_to_config_file>",
183  " [OPTIONAL] Path to benchmark run configuration file.\n"
184  " YAML file specifying the selection of benchmarks and their workload\n"
185  " parameters to run. If not present, all backend benchmarks will be run\n"
186  " with default parameters.");
187  parser.addArgument("--compile_reports", "--compile", "-C", 1, "<bool: 0|false|1|true>",
188  " [OPTIONAL] Enables (TRUE) or disables (FALSE) inline compilation of\n"
189  " benchmark reports into summaries and statistics. Defaults to \"TRUE\".");
190  parser.addArgument("--dump_config", "--dump", 0, "",
191  " [OPTIONAL] If specified, Test Harness will dump a general configuration\n"
192  " file with the possible benchmarks that the backend can run. This file can\n"
193  " be used as starting point template for a benchmark run configuration file.\n"
194  " The destination file is specified by \"--benchmark_config_file\" argument.");
195  parser.addArgument("--enable_validation", "--validation", "-v", 1, "<bool: 0|false|1|true>",
196  " [OPTIONAL] Specifies whether results from benchmarks ran will be validated\n"
197  " against ground truth. Defaults to \"TRUE\".");
198  parser.addArgument("--force_config", 1, "<bool: 0|false|1|true>",
199  " [OPTIONAL] Specifies whether an attempt will be made to force configuration\n"
200  " file values on backend (TRUE) or non-flexible backend values will take\n"
201  " priority (FALSE). Defaults to \"TRUE\".");
202  parser.addArgument("--run_overview", 1, "<bool: 0|false|1|true>",
203  " [OPTIONAL] Specifies whether final summary overview of the benchmarks ran\n"
204  " will be printed in standard output (TRUE) or not (FALSE). Results of the\n"
205  " run will always be saved to storage regardless. Defaults to \"TRUE\".");
206  parser.addArgument("--random_seed", "--seed", 1, "<uint64>",
207  " [OPTIONAL] Specifies the random seed to use for pseudo-random number\n"
208  " generation when none is specified by a benchmark configuration file. If\n"
209  " no seed is specified, the current system clock time will be used as seed.");
210  parser.addArgument("--report_delay", 1, "<delay_in_ms>",
211  " [OPTIONAL] Delay between progress reports. Before each benchmark starts,\n"
212  " Test Harness will pause for this specified number of milliseconds.\n"
213  " Pass 0 to avoid delays. Defaults to 1000 ms.");
214  parser.addArgument("--report_root_path", "--output_dir", 1, "<path_to_directory>",
215  " [OPTIONAL] Directory where to store the report output files.\n"
216  " Must exist and be accessible for writing. Any files with the same name will\n"
217  " be overwritten. Defaults to current working directory \".\"");
218  parser.addArgument("--single_path_report", "--single_path", 0, "",
219  " [OPTIONAL] Allows the user to choose if the benchmark's report (s) will be\n"
220  " created in a single-level directory or not.");
221  parser.addArgument("--version", 0, "",
222  " [OPTIONAL] Outputs Test Harness version, required API Bridge version and\n"
223  " currently linked API Bridge version. Application exits after this.");
224  parser.parse(argc, argv);
225 }
226 
227 void generateOverview(std::ostream &os,
228  const std::vector<std::string> &report_paths,
229  const std::string &input_root_path,
230  bool b_single_path_reports)
231 {
232  // Generates a condensed, pretty print version summarizing each benchmark result.
233 
234  constexpr int ScreenColSize = 80;
235  constexpr int AveWallColSize = ScreenColSize / 8;
236  constexpr int AveCPUColSize = ScreenColSize / 8;
237  constexpr int BenchNameColSize = ScreenColSize - AveWallColSize - AveCPUColSize - 15;
238 
239  std::stringstream ss;
240 
241  os << " " << std::setfill(' ') << std::setw(BenchNameColSize) << std::left << std::string("Benchmark").substr(0, BenchNameColSize) << " | "
242  << std::setw(AveWallColSize + 3) << std::right << std::string("Ave Wall time").substr(0, AveWallColSize + 3) << " | "
243  << std::setw(AveWallColSize + 3) << std::right << std::string("Ave CPU time").substr(0, AveCPUColSize + 3) << std::endl;
244  os << std::setfill('=') << std::setw(ScreenColSize) << std::left << "=" << std::endl;
245 
246  for (std::size_t report_i = 0; report_i < report_paths.size(); ++report_i)
247  {
248  // retrieve the correct input and output paths
249  std::filesystem::path report_location = report_paths[report_i]; // path where report was saved
250  std::filesystem::path report_path;
251 
252  if (report_location.is_absolute())
253  report_path = report_location;
254  else
255  report_path = std::filesystem::canonical(input_root_path) / report_location;
256 
257  if (b_single_path_reports)
259  else
261  // Adding the file extension
262  report_path += ".csv";
263 
264  ss = std::stringstream();
265  ss << (report_i + 1) << ". " << report_location.generic_string();
266  os << " " << std::setfill(' ') << std::setw(BenchNameColSize) << std::left << ss.str().substr(0, BenchNameColSize) << " | ";
267 
268  try
269  {
270  // load input report
273  // generate summary
274  if (report.getEventCount() > 0)
275  {
276  // compute simple stats on the main event for this report
277 
278  hebench::Utilities::Math::EventStats stats_wall;
279  hebench::Utilities::Math::EventStats stats_cpu;
280 
281  for (std::uint64_t i = 0; i < report.getEventCount(); ++i)
282  {
284  report.getEvent(event, i);
285  if (event.event_type_id == report.getMainEventType())
286  {
287  double wall_time = hebench::ReportGen::cpp::TimingReport::computeElapsedWallTime(event) / event.input_sample_count;
288  double cpu_time = hebench::ReportGen::cpp::TimingReport::computeElapsedCPUTime(event) / event.input_sample_count;
289  for (std::uint64_t i = 0; i < event.input_sample_count; ++i)
290  {
291  stats_wall.newEvent(wall_time);
292  stats_cpu.newEvent(cpu_time);
293  } // end for
294  } // end if
295  } // end for
296 
297  // output overview of summary to stdout
299  double elapsed_time_secs;
300  std::string s_elapsed_time;
301 
302  // wall time average
303 
304  elapsed_time_secs = stats_wall.getMean();
305  // convert to timing prefix that fits the value between 1 and 1000
307  ss = std::stringstream();
308  ss << timing_prefix.symbol << "s";
309  // convert to string with, at most, 2 decimal places
310  s_elapsed_time = hebench::Utilities::convertDoubleToStr(timing_prefix.value, 2);
311  // if string doesn't fit in the column, attempt to use scientific notation
312  if (timing_prefix.value < 0.1 || s_elapsed_time.size() > AveWallColSize)
313  s_elapsed_time = hebench::Utilities::convertDoubleToStrScientific(timing_prefix.value, AveWallColSize);
314  // output value
315  os << std::setw(AveWallColSize) << std::right
316  << s_elapsed_time
317  << std::setfill(' ') << std::setw(3) << std::right << ss.str() << " | ";
318 
319  // cpu time average
320 
321  elapsed_time_secs = stats_cpu.getMean();
322  // convert to timing prefix that fits the value between 1 and 1000
324  ss = std::stringstream();
325  ss << timing_prefix.symbol << "s";
326  // convert to string with, at most, 2 decimal places
327  s_elapsed_time = hebench::Utilities::convertDoubleToStr(timing_prefix.value, 2);
328  // if string doesn't fit in the column, attempt to use scientific notation
329  if (timing_prefix.value < 0.1 || s_elapsed_time.size() > AveCPUColSize)
330  s_elapsed_time = hebench::Utilities::convertDoubleToStrScientific(timing_prefix.value, AveCPUColSize);
331  // output value
332  os << std::setw(AveCPUColSize) << std::right
333  << s_elapsed_time
334  << std::setfill(' ') << std::setw(3) << std::right << ss.str() << std::endl;
335  } // end if
336  else
337  os << "Validation Failed" << std::endl;
338  }
339  catch (...)
340  {
341  os << "Load Failed" << std::endl;
342  }
343  os << std::setfill('-') << std::setw(ScreenColSize) << std::left << "-" << std::endl;
344  } // end for
345 }
346 
347 int main(int argc, char **argv)
348 {
349  int retval = 0;
350  ProgramConfig config;
351  std::stringstream ss;
352 
353  std::cout << std::endl
354  << hebench::Logging::GlobalLogger::log(true, "HEBench") << std::endl;
355 
356  hebench::Utilities::BenchmarkSession benchmarks_to_run;
357  std::size_t total_runs = 0;
358  std::vector<std::string> report_paths;
359  std::vector<std::size_t> failed_benchmarks;
360 
361  try
362  {
363  hebench::ArgsParser args_parser;
364  initArgsParser(args_parser, argc, argv);
365  config.initializeConfig(args_parser);
366 
367  ss = std::stringstream();
368  config.showVersion(ss);
369  std::cout << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
370 
371  if (HEBENCH_TEST_HARNESS_API_REQUIRED_VERSION_MAJOR != HEBENCH_API_VERSION_MAJOR
372  || HEBENCH_TEST_HARNESS_API_REQUIRED_VERSION_MINOR != HEBENCH_API_VERSION_MINOR
373  || HEBENCH_TEST_HARNESS_API_MIN_REQUIRED_VERSION_REVISION > HEBENCH_API_VERSION_REVISION)
374  {
375  throw std::runtime_error("Invalid API Bridge version.");
376  } // end if
377 
378  ss = std::stringstream();
379  config.showConfig(ss);
380  std::cout << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
381 
382  // propagating application configuration
384 
385  ss = std::stringstream();
386  ss << "Initializing Backend from shared library:" << std::endl
387  << config.backend_lib_path;
388  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
389  hebench::APIBridge::DynamicLibLoad::loadLibrary(config.backend_lib_path);
390  std::cout << IOS_MSG_OK << hebench::Logging::GlobalLogger::log("Backend loaded successfully.") << std::endl;
391 
392  std::shared_ptr<hebench::Utilities::BenchmarkConfigLoader> p_bench_config_loader;
393  if (!config.config_file.empty() && !config.b_dump_config)
394  {
395  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log("Loading benchmark configuration from file...") << std::endl;
396  p_bench_config_loader = hebench::Utilities::BenchmarkConfigLoader::create(config.config_file, config.random_seed);
397  // update random seed
398  config.random_seed = p_bench_config_loader->getRandomSeed();
399  } // end if
400  std::cout << IOS_MSG_DONE << std::endl;
401 
402  // create engine and register all benchmarks
403  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log("Initializing Backend engine...") << std::endl;
405  if (p_bench_config_loader)
406  p_engine = hebench::TestHarness::Engine::create(p_bench_config_loader->getInitData());
407  else
408  p_engine = hebench::TestHarness::Engine::create(std::vector<std::int8_t>());
409  std::cout << IOS_MSG_OK << std::endl;
410 
411  // broker configuration for benchmarks
412  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log("Retrieving default benchmark configuration from Backend...") << std::endl;
413  std::shared_ptr<hebench::Utilities::BenchmarkConfigBroker> p_bench_broker =
414  std::make_shared<hebench::Utilities::BenchmarkConfigBroker>(p_engine, config.random_seed, config.backend_lib_path);
415 
416  if (config.b_dump_config)
417  {
418  ss = std::stringstream();
419  ss << "Saving default benchmark configuration to storage:" << std::endl
420  << config.config_file;
421  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
422  p_bench_broker->exportConfiguration(config.config_file,
423  p_bench_broker->getDefaultConfiguration());
424  std::cout << IOS_MSG_OK << std::endl;
425 
426  // default config dumped; program completed
427  } // end if
428  else
429  {
430  // initialize benchmarks requested to run
431 
432  if (p_bench_config_loader)
433  {
434  ss = std::stringstream();
435  ss << "Loading benchmark configuration file:" << std::endl
436  << config.config_file;
437  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
438  benchmarks_to_run = p_bench_broker->importConfiguration(*p_bench_config_loader);
439  } // end else
440  else
441  {
442  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log("Loading default benchmark configuration...") << std::endl;
443  benchmarks_to_run = p_bench_broker->getDefaultConfiguration();
444  } // end if
445  } // end else
446 
447  // clean up benchmark configurators
448  p_bench_broker.reset();
449  p_bench_config_loader.reset();
450 
451  if (!benchmarks_to_run.benchmark_requests.empty())
452  {
453  // start benchmarking if there are benchmarks to run
454 
455  ss = std::stringstream();
456  config.showBenchmarkDefaults(ss);
457  std::cout << std::endl
458  << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
459 
461 
462  total_runs = benchmarks_to_run.benchmark_requests.size();
463  ss = std::stringstream();
464  ss << "Benchmarks to run: " << total_runs;
465  std::cout << IOS_MSG_OK << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
466 
467  // iterate through the registered benchmarks and execute them
468  std::size_t run_i = 0;
469  for (std::size_t bench_i = 0; bench_i < benchmarks_to_run.benchmark_requests.size(); ++bench_i)
470  {
471  benchmarks_to_run.benchmark_requests[bench_i].configuration.b_single_path_report = config.b_single_path_report;
472  hebench::Utilities::BenchmarkRequest &benchmark_request = benchmarks_to_run.benchmark_requests[bench_i];
473  bool b_critical_error = false;
474  std::string bench_path;
476  try
477  {
478  ss = std::stringstream();
479  ss << " Progress: " << (run_i * 100 / total_runs) << "%" << std::endl
480  << " " << run_i << "/" << total_runs;
481  std::cout << std::endl
482  << "==================" << std::endl
483  << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl
484  << "==================" << std::endl;
485 
486  if (config.report_delay_ms > 0)
487  std::this_thread::sleep_for(std::chrono::milliseconds(config.report_delay_ms));
488 
489  // obtain the text description of the benchmark to print out
491  p_engine->describeBenchmark(benchmark_request.index, benchmark_request.configuration);
492 
493  bench_path = bench_token->getDescription().path; //description.path;
494 
495  // print header
496 
497  // prints
498  // ===========================
499  // Workload: <workload name>
500  // ===========================
501  std::string s_workload_name = "Workload: " + bench_token->getDescription().workload_name;
502  std::size_t fill_size = s_workload_name.length() + 2;
503  if (fill_size > 79)
504  fill_size = 79;
505  std::cout << std::endl
506  << std::setfill('=') << std::setw(fill_size) << "=" << std::endl
507  << " " << hebench::Logging::GlobalLogger::log(s_workload_name) << std::endl
508  << std::setw(fill_size) << "=" << std::setfill(' ') << std::endl;
509 
510  report.setHeader(bench_token->getDescription().header);
511  if (!bench_token->getBenchmarkConfiguration().dataset_filename.empty())
512  {
513  std::stringstream ss;
514  ss << "Dataset, \"" << bench_token->getBenchmarkConfiguration().dataset_filename << "\"" << std::endl;
515  report.appendFooter(ss.str());
516  } // end if
517 
518  std::cout << std::endl
519  << report.getHeader() << std::endl;
520 
521  // create the benchmark
522  hebench::TestHarness::IBenchmark::Ptr p_bench = p_engine->createBenchmark(bench_token, report);
523 
525  run_config.b_validate_results = config.b_validate_results;
526 
527  // run the workload
528  bool b_succeeded = p_bench->run(report, run_config);
529 
530  if (!b_succeeded)
531  {
532  std::cout << IOS_MSG_FAILED << hebench::Logging::GlobalLogger::log(bench_token->getDescription().workload_name) << std::endl;
533  failed_benchmarks.push_back(report_paths.size());
534  report.clear(); // report event data is no longer valid for a failed run
535  } // end if
536  }
537  catch (hebench::Common::ErrorException &err_num)
538  {
539  if (err_num.getErrorCode() == HEBENCH_ECODE_CRITICAL_ERROR)
540  {
541  b_critical_error = true;
542  throw; // critical failure
543  } // end if
544  else
545  {
546  // no critical error: report and move on to the next benchmark
547 
548  b_critical_error = false;
549 
550  failed_benchmarks.push_back(report_paths.size());
551  report.clear(); // report event data is no longer valid for a failed run
552 
553  ss = std::stringstream();
554  ss << "Workload backend failed with message: " << std::endl
555  << err_num.what();
556  std::cout << std::endl
557  << IOS_MSG_ERROR << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
558  } // end else
559  }
560  catch (...)
561  {
562  b_critical_error = true;
563  throw; // critical failure
564  }
565 
566  report_paths.emplace_back(bench_path);
567 
568  // create the path to output report
569  std::filesystem::path report_filename = bench_path;
570  std::filesystem::path report_path = report_filename.is_absolute() ?
571  report_filename :
572  config.report_root_path / report_filename;
573 
574  // output CSV report
575  report_filename = report_path;
576  if (!config.b_single_path_report)
578  else
580  report_filename += ".csv";
581 
582  ss = std::stringstream();
583  ss << "Saving report to: " << std::endl
584  << report_filename;
585  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
586 
587  if (b_critical_error)
588  {
589  // delete any previous report in this location to signal failure
590  if (std::filesystem::exists(report_filename) && std::filesystem::is_regular_file(report_filename))
591  {
592  std::filesystem::remove(report_filename);
593  }
594  } // end if
595  else
596  {
597  // no need to create dirs if single path is enabled
598  if (!config.b_single_path_report)
599  {
600  std::filesystem::create_directories(report_path);
601  }
602  report.save2CSV(report_filename);
603  } // end else
604 
605  std::cout << IOS_MSG_OK << hebench::Logging::GlobalLogger::log("Report saved.") << std::endl;
606 
607  ++run_i;
608 
609  // benchmark cleaned up here automatically
610  } // end for
611 
612  // clean-up engine before final report (engine can clean up
613  // automatically, but better to release when no longer needed)
614  p_engine.reset();
615 
616  // At this point all benchmarks have been run and reports generated.
617  // All that's left to do is to perform output that summarizes the run.
618 
619  assert(report_paths.size() == total_runs);
620 
621  ss = std::stringstream();
622  ss << " Progress: 100%" << std::endl
623  << " " << total_runs << "/" << total_runs;
624  std::cout << std::endl
625  << "==================" << std::endl
626  << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl
627  << "==================" << std::endl;
628 
629  // benchmark summary list
630 
631  std::cout << std::endl
632  << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log("Generating benchmark list...") << std::endl;
633 
634  std::filesystem::path benchmark_list_filename = std::filesystem::canonical(config.report_root_path);
635  benchmark_list_filename /= "benchmark_list.txt";
636  hebench::Utilities::writeToFile(
637  benchmark_list_filename,
638  [&config, &report_paths](std::ostream &os) {
639  for (std::size_t report_i = 0; report_i < report_paths.size(); ++report_i)
640  {
641  std::filesystem::path report_filename = report_paths[report_i];
642  if (!config.b_single_path_report)
644  else
646  report_filename += ".csv";
647 
648  os << report_filename.string() << std::endl;
649  } // end for
650  },
651  false, false);
652 
653  ss = std::stringstream();
654  ss << "Benchmark list saved to: " << std::endl
655  << benchmark_list_filename;
656  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
657 
658  // compile reports into stats and summary files
659 
660  if (config.b_compile_reports)
661  {
662  std::cout << std::endl
663  << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log("Initializing report compiler...") << std::endl;
664 
666  std::vector<char> c_error_msg(1024, 0);
667  std::string compile_filename = benchmark_list_filename.string();
668  compiler_config.input_file = compile_filename.c_str();
669  compiler_config.b_show_overview = 0; // do not show the overview result file here
670  compiler_config.b_silent = 1;
671  compiler_config.time_unit = 0;
672  compiler_config.time_unit_stats = 0;
673  compiler_config.time_unit_overview = 0;
674  compiler_config.time_unit_summary = 0;
675 
676  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log("Compiling reports using default compiler options...") << std::endl
677  << std::endl;
678  if (!hebench::ReportGen::Compiler::compile(&compiler_config, c_error_msg.data(), c_error_msg.size()))
679  throw std::runtime_error(c_error_msg.data());
680  std::cout << IOS_MSG_DONE << hebench::Logging::GlobalLogger::log("Reports Compiled.") << std::endl
681  << std::endl;
682  } // end if
683 
684  // display overview of the run results
685 
686  if (config.b_show_run_overview)
687  {
688  std::cout << std::endl
689  << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log("Generating overview...") << std::endl
690  << std::endl;
691  generateOverview(std::cout, report_paths, config.report_root_path, config.b_single_path_report);
692 
693  // display any failed benchmarks
694 
695  ss = std::stringstream();
696  ss << "Failed benchmarks: " << failed_benchmarks.size() << std::endl
697  << std::endl;
698  for (std::size_t i = 0; i < failed_benchmarks.size(); ++i)
699  {
700  if (i > 0)
701  ss << std::endl;
702  assert(failed_benchmarks[i] < report_paths.size());
703  ss << i + 1 << ". " << report_paths.at(failed_benchmarks[i]) << std::endl;
704  } // end for
705  } // end if
706  std::cout << std::endl
707  << hebench::Logging::GlobalLogger::log(ss.str());
708 
709  // benchmark overall summary
710 
711  std::cout << std::endl
712  << "=================================" << std::endl
713  << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log("Run Summary") << std::endl;
714  ss = std::stringstream();
715  ss << "Total benchmarks run: " << total_runs;
716  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
717  ss = std::stringstream();
718  ss << "Success: " << total_runs - failed_benchmarks.size();
719  if (!config.b_validate_results)
720  ss << "* (validation skipped)";
721  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
722  ss = std::stringstream();
723  ss << "Failed: " << failed_benchmarks.size();
724  std::cout << IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
725  } // end if
726  }
727  catch (hebench::ArgsParser::HelpShown &)
728  {
729  // do nothing
730  }
731  catch (std::exception &ex)
732  {
733  ss = std::stringstream();
734  ss << "An error occurred with message: " << std::endl
735  << ex.what();
736  std::cout << std::endl
737  << IOS_MSG_ERROR << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
738  retval = -1;
739  }
740  catch (...)
741  {
742  retval = -1;
743  }
744 
745  hebench::APIBridge::DynamicLibLoad::unloadLibrary();
746 
747  if (retval == 0)
748  {
749  std::cout << std::endl
750  << IOS_MSG_DONE << hebench::Logging::GlobalLogger::log(true, "Complete!") << std::endl;
751  } // end if
752  else
753  {
754  ss = std::stringstream();
755  ss << "Terminated with errors. Exit code: " << retval;
756  std::cout << std::endl
757  << IOS_MSG_FAILED << hebench::Logging::GlobalLogger::log(true, ss.str()) << std::endl;
758  } // end else
759 
760  return retval;
761 }
static void computeTimingPrefix(TimingPrefixedSeconds &prefix, double seconds)
Given a time interval in seconds, computes the timing prefix.
static TimingReport loadReportFromCSVFile(const std::string &filename)
void save2CSV(const std::string &filename)
static double computeElapsedWallTime(const TimingReportEventC &event)
void getEvent(TimingReportEventC &p_event, uint64_t index) const
void appendFooter(const std::string &new_header, bool new_line=true)
static double computeElapsedCPUTime(const TimingReportEventC &event)
void setHeader(const std::string &new_header)
static Engine::Ptr create(const std::vector< std::int8_t > &data)
Creates a new backend engine.
std::shared_ptr< Engine > Ptr
std::shared_ptr< IBenchmark > Ptr
bool b_validate_results
Specifies whether the benchmark will validate backend results (true) or it will simply benchmark with...
Provides configuration to and retrieves data from a benchmark run.
static void setForceConfigValues(bool value)
Sets whether frontend will override backend descriptors using configuration data or not.
static std::shared_ptr< BenchmarkConfigLoader > create(const std::string &yaml_filename, std::uint64_t fallback_random_seed)
Loads a benchmark configuration from yaml data contained in a file.
#define IOS_MSG_ERROR
#define IOS_MSG_FAILED
#define IOS_MSG_DONE
#define IOS_MSG_INFO
#define IOS_MSG_OK
char time_unit_overview
Time unit for report overview. If 0, the fallback time_unit will be used.
int32_t b_silent
If non-zero, the run details will be omited. Any warning, error, or important messages are directed t...
const char * input_file
C-string containing the input file name.
char time_unit_stats
Time unit for report statistics. If 0, the fallback time_unit will be used.
int32_t compile(const ReportCompilerConfigC *p_config, char *s_error, size_t s_error_size)
Runs the compiler using the specified configuration.
char time_unit_summary
Time unit for report summaries. If 0, the fallback time_unit will be used.
int32_t b_show_overview
If non-zero, the compiled overview will be output to stdout.
char time_unit
Fallback time unit when no time unit is specified for a specific output.
char symbol[MAX_SYMBOL_BUFFER_SIZE]
Symbol for the prefix.
double value
Value in the specified unit.
uint32_t event_type_id
ID specifying the event type.
constexpr const char * FileNameNoExtReport
std::vector< BenchmarkRequest > benchmark_requests
hebench::TestHarness::BenchmarkDescription::Configuration configuration
static constexpr const char * DefaultConfigFile
std::filesystem::path backend_lib_path
void initializeConfig(const hebench::ArgsParser &parser)
std::size_t report_delay_ms
static std::ostream & showVersion(std::ostream &os)
static constexpr const char * DefaultRootPath
std::filesystem::path config_file
void showConfig(std::ostream &os) const
static constexpr std::uint64_t DefaultSampleSize
void showBenchmarkDefaults(std::ostream &os)
static constexpr std::uint64_t DefaultMinTestTime
std::uint64_t random_seed
std::filesystem::path report_root_path
static constexpr std::size_t DefaultReportDelay
int main(int argc, char **argv)
void initArgsParser(hebench::ArgsParser &parser, int argc, char **argv)
void generateOverview(std::ostream &os, const std::vector< std::string > &report_paths, const std::string &input_root_path, bool b_single_path_reports)
#define HEBENCH_ECODE_CRITICAL_ERROR
Specifies a critical error.
Definition: types.h:50