17 #include "hebench/modules/args_parser/include/args_parser.h" 
   18 #include "hebench/modules/general/include/error.h" 
   19 #include "hebench/modules/general/include/hebench_math_utils.h" 
   20 #include "hebench/modules/general/include/hebench_utilities.h" 
   21 #include "hebench/modules/logging/include/logging.h" 
   23 #include "hebench/dynamic_lib_load.h" 
   30 #include "include/hebench_version.h" 
   33 static_assert(std::numeric_limits<float>::is_iec559, 
"Compiler type `float` does not comply with IEEE 754.");
 
   34 static_assert(std::numeric_limits<double>::is_iec559, 
"Compiler type `double` does not comply with IEEE 754.");
 
   35 static_assert(
sizeof(
float) == 4, 
"Compiler type `float` is not 32 bits.");
 
   36 static_assert(
sizeof(
double) == 8, 
"Compiler type `double` is not 64 bits.");
 
   68     if (parser.hasArgument(
"--version"))
 
   71         throw hebench::ArgsParser::HelpShown(
"Version shown.");
 
   74     parser.getValue<decltype(s_tmp)>(s_tmp, 
"--benchmark_config_file", 
DefaultConfigFile);
 
   79         throw std::runtime_error(
"Dump default benchmark configuration file requested, but no filename given with \"--benchmark_config_file\" parameter.");
 
   83     parser.getValue<decltype(s_tmp)>(s_tmp, 
"--backend_lib_path");
 
   88     parser.getValue<decltype(
random_seed)>(
random_seed, 
"--random_seed", std::chrono::system_clock::now().time_since_epoch().count());
 
   92     parser.getValue<decltype(s_tmp)>(s_tmp, 
"--report_root_path", 
DefaultRootPath);
 
   95         throw std::runtime_error(
"Specified directory for report output does not exists or is not accessible: " + 
report_root_path.string());
 
   97         throw std::runtime_error(
"Specified backend lib does not exists or is not accessible: " + 
backend_lib_path.string());
 
   99         throw std::runtime_error(
"Backend library error: symbolic links are not allowed as input arguments: " + 
backend_lib_path.string());
 
  100     if ((std::filesystem::canonical(
backend_lib_path).
string()).substr(0, 5) == std::string(
"/tmp/"))
 
  101         throw std::runtime_error(
"Backend library error: Cannot use files in /tmp/ as arguments: " + 
backend_lib_path.string());
 
  107             throw std::runtime_error(
"Specified benchmark configuration file does not exists or is not accessible: " + 
config_file.string());
 
  109             throw std::runtime_error(
"Config file path error: symbolic links are not allowed as input arguments: " + 
config_file.string());
 
  110         if (std::filesystem::canonical(
config_file).string().substr(0, 5) == std::string(
"/tmp/"))
 
  111             throw std::runtime_error(
"Config file error: Cannot use files in /tmp/ as arguments: " + 
config_file.string());
 
  122     os << 
"Benchmark defaults:" << std::endl
 
  128     os << 
"Global Configuration:" << std::endl
 
  131        << 
"    ==================" << std::endl
 
  135         os << 
"Dumping configuration file!" << std::endl;
 
  139         os << 
"Benchmark Run." << std::endl
 
  146     os << 
"    Run configuration file: ";
 
  148         os << 
"(none)" << std::endl;
 
  152         os << 
"    Force configuration values: " << (
b_force_config ? 
"Yes" : 
"No") << std::endl;
 
  154     os << 
"    ==================" << std::endl;
 
  159     os << HEBENCH_TEST_HARNESS_APP_NAME << 
" v" 
  160        << HEBENCH_TEST_HARNESS_VERSION_MAJOR << 
"." 
  161        << HEBENCH_TEST_HARNESS_VERSION_MINOR << 
"." 
  162        << HEBENCH_TEST_HARNESS_VERSION_REVISION << 
"-" 
  163        << HEBENCH_TEST_HARNESS_VERSION_BUILD << std::endl
 
  165        << 
"API Bridge version:" << std::endl
 
  166        << 
"  Required: " << HEBENCH_TEST_HARNESS_API_REQUIRED_VERSION_MAJOR << 
"." 
  167        << HEBENCH_TEST_HARNESS_API_REQUIRED_VERSION_MINOR << 
"." 
  168        << HEBENCH_TEST_HARNESS_API_MIN_REQUIRED_VERSION_REVISION << std::endl
 
  170        << HEBENCH_API_VERSION_MAJOR << 
"." 
  171        << HEBENCH_API_VERSION_MINOR << 
"." 
  172        << HEBENCH_API_VERSION_REVISION << 
"-" 
  173        << HEBENCH_API_VERSION_BUILD << std::endl;
 
  179     parser.addArgument(
"--backend_lib_path", 
"--backend", 
"-b", 1, 
"<path_to_shared_lib>",
 
  180                        "   [REQUIRED] Path to backend shared library.\n" 
  181                        "   The library file must exist and be accessible for reading.");
 
  182     parser.addArgument(
"--benchmark_config_file", 
"--config_file", 
"-c", 1, 
"<path_to_config_file>",
 
  183                        "   [OPTIONAL] Path to benchmark run configuration file.\n" 
  184                        "   YAML file specifying the selection of benchmarks and their workload\n" 
  185                        "   parameters to run. If not present, all backend benchmarks will be run\n" 
  186                        "   with default parameters.");
 
  187     parser.addArgument(
"--compile_reports", 
"--compile", 
"-C", 1, 
"<bool: 0|false|1|true>",
 
  188                        "   [OPTIONAL] Enables (TRUE) or disables (FALSE) inline compilation of\n" 
  189                        "   benchmark reports into summaries and statistics. Defaults to \"TRUE\".");
 
  190     parser.addArgument(
"--dump_config", 
"--dump", 0, 
"",
 
  191                        "   [OPTIONAL] If specified, Test Harness will dump a general configuration\n" 
  192                        "   file with the possible benchmarks that the backend can run. This file can\n" 
  193                        "   be used as starting point template for a benchmark run configuration file.\n" 
  194                        "   The destination file is specified by \"--benchmark_config_file\" argument.");
 
  195     parser.addArgument(
"--enable_validation", 
"--validation", 
"-v", 1, 
"<bool: 0|false|1|true>",
 
  196                        "   [OPTIONAL] Specifies whether results from benchmarks ran will be validated\n" 
  197                        "   against ground truth. Defaults to \"TRUE\".");
 
  198     parser.addArgument(
"--force_config", 1, 
"<bool: 0|false|1|true>",
 
  199                        "   [OPTIONAL] Specifies whether an attempt will be made to force configuration\n" 
  200                        "   file values on backend (TRUE) or non-flexible backend values will take\n" 
  201                        "   priority (FALSE). Defaults to \"TRUE\".");
 
  202     parser.addArgument(
"--run_overview", 1, 
"<bool: 0|false|1|true>",
 
  203                        "   [OPTIONAL] Specifies whether final summary overview of the benchmarks ran\n" 
  204                        "   will be printed in standard output (TRUE) or not (FALSE). Results of the\n" 
  205                        "   run will always be saved to storage regardless. Defaults to \"TRUE\".");
 
  206     parser.addArgument(
"--random_seed", 
"--seed", 1, 
"<uint64>",
 
  207                        "   [OPTIONAL] Specifies the random seed to use for pseudo-random number\n" 
  208                        "   generation when none is specified by a benchmark configuration file. If\n" 
  209                        "   no seed is specified, the current system clock time will be used as seed.");
 
  210     parser.addArgument(
"--report_delay", 1, 
"<delay_in_ms>",
 
  211                        "   [OPTIONAL] Delay between progress reports. Before each benchmark starts,\n" 
  212                        "   Test Harness will pause for this specified number of milliseconds.\n" 
  213                        "   Pass 0 to avoid delays. Defaults to 1000 ms.");
 
  214     parser.addArgument(
"--report_root_path", 
"--output_dir", 1, 
"<path_to_directory>",
 
  215                        "   [OPTIONAL] Directory where to store the report output files.\n" 
  216                        "   Must exist and be accessible for writing. Any files with the same name will\n" 
  217                        "   be overwritten. Defaults to current working directory \".\"");
 
  218     parser.addArgument(
"--single_path_report", 
"--single_path", 0, 
"",
 
  219                        "   [OPTIONAL] Allows the user to choose if the benchmark's report (s) will be\n" 
  220                        "   created in a single-level directory or not.");
 
  221     parser.addArgument(
"--version", 0, 
"",
 
  222                        "   [OPTIONAL] Outputs Test Harness version, required API Bridge version and\n" 
  223                        "   currently linked API Bridge version. Application exits after this.");
 
  224     parser.parse(argc, argv);
 
  228                       const std::vector<std::string> &report_paths,
 
  229                       const std::string &input_root_path,
 
  230                       bool b_single_path_reports)
 
  234     constexpr 
int ScreenColSize    = 80;
 
  235     constexpr 
int AveWallColSize   = ScreenColSize / 8;
 
  236     constexpr 
int AveCPUColSize    = ScreenColSize / 8;
 
  237     constexpr 
int BenchNameColSize = ScreenColSize - AveWallColSize - AveCPUColSize - 15;
 
  239     std::stringstream ss;
 
  241     os << 
" " << std::setfill(
' ') << std::setw(BenchNameColSize) << std::left << std::string(
"Benchmark").substr(0, BenchNameColSize) << 
" | " 
  242        << std::setw(AveWallColSize + 3) << std::right << std::string(
"Ave Wall time").substr(0, AveWallColSize + 3) << 
" | " 
  243        << std::setw(AveWallColSize + 3) << std::right << std::string(
"Ave CPU time").substr(0, AveCPUColSize + 3) << std::endl;
 
  244     os << std::setfill(
'=') << std::setw(ScreenColSize) << std::left << 
"=" << std::endl;
 
  246     for (std::size_t report_i = 0; report_i < report_paths.size(); ++report_i)
 
  249         std::filesystem::path report_location = report_paths[report_i]; 
 
  250         std::filesystem::path report_path;
 
  252         if (report_location.is_absolute())
 
  253             report_path = report_location;
 
  255             report_path = std::filesystem::canonical(input_root_path) / report_location;
 
  257         if (b_single_path_reports)
 
  262         report_path += 
".csv";
 
  264         ss = std::stringstream();
 
  265         ss << (report_i + 1) << 
". " << report_location.generic_string();
 
  266         os << 
" " << std::setfill(
' ') << std::setw(BenchNameColSize) << std::left << ss.str().substr(0, BenchNameColSize) << 
" | ";
 
  278                 hebench::Utilities::Math::EventStats stats_wall;
 
  279                 hebench::Utilities::Math::EventStats stats_cpu;
 
  289                         for (std::uint64_t i = 0; i < 
event.input_sample_count; ++i)
 
  291                             stats_wall.newEvent(wall_time);
 
  292                             stats_cpu.newEvent(cpu_time);
 
  299                 double elapsed_time_secs;
 
  300                 std::string s_elapsed_time;
 
  304                 elapsed_time_secs = stats_wall.getMean();
 
  307                 ss = std::stringstream();
 
  308                 ss << timing_prefix.
symbol << 
"s";
 
  310                 s_elapsed_time = hebench::Utilities::convertDoubleToStr(timing_prefix.
value, 2);
 
  312                 if (timing_prefix.
value < 0.1 || s_elapsed_time.size() > AveWallColSize)
 
  313                     s_elapsed_time = hebench::Utilities::convertDoubleToStrScientific(timing_prefix.
value, AveWallColSize);
 
  315                 os << std::setw(AveWallColSize) << std::right
 
  317                    << std::setfill(
' ') << std::setw(3) << std::right << ss.str() << 
" | ";
 
  321                 elapsed_time_secs = stats_cpu.getMean();
 
  324                 ss = std::stringstream();
 
  325                 ss << timing_prefix.
symbol << 
"s";
 
  327                 s_elapsed_time = hebench::Utilities::convertDoubleToStr(timing_prefix.
value, 2);
 
  329                 if (timing_prefix.
value < 0.1 || s_elapsed_time.size() > AveCPUColSize)
 
  330                     s_elapsed_time = hebench::Utilities::convertDoubleToStrScientific(timing_prefix.
value, AveCPUColSize);
 
  332                 os << std::setw(AveCPUColSize) << std::right
 
  334                    << std::setfill(
' ') << std::setw(3) << std::right << ss.str() << std::endl;
 
  337                 os << 
"Validation Failed" << std::endl;
 
  341             os << 
"Load Failed" << std::endl;
 
  343         os << std::setfill(
'-') << std::setw(ScreenColSize) << std::left << 
"-" << std::endl;
 
  347 int main(
int argc, 
char **argv)
 
  351     std::stringstream ss;
 
  353     std::cout << std::endl
 
  354               << hebench::Logging::GlobalLogger::log(
true, 
"HEBench") << std::endl;
 
  357     std::size_t total_runs = 0;
 
  358     std::vector<std::string> report_paths;
 
  359     std::vector<std::size_t> failed_benchmarks;
 
  363         hebench::ArgsParser args_parser;
 
  367         ss = std::stringstream();
 
  369         std::cout << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  371         if (HEBENCH_TEST_HARNESS_API_REQUIRED_VERSION_MAJOR != HEBENCH_API_VERSION_MAJOR
 
  372             || HEBENCH_TEST_HARNESS_API_REQUIRED_VERSION_MINOR != HEBENCH_API_VERSION_MINOR
 
  373             || HEBENCH_TEST_HARNESS_API_MIN_REQUIRED_VERSION_REVISION > HEBENCH_API_VERSION_REVISION)
 
  375             throw std::runtime_error(
"Invalid API Bridge version.");
 
  378         ss = std::stringstream();
 
  380         std::cout << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  385         ss = std::stringstream();
 
  386         ss << 
"Initializing Backend from shared library:" << std::endl
 
  388         std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  390         std::cout << 
IOS_MSG_OK << hebench::Logging::GlobalLogger::log(
"Backend loaded successfully.") << std::endl;
 
  392         std::shared_ptr<hebench::Utilities::BenchmarkConfigLoader> p_bench_config_loader;
 
  395             std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(
"Loading benchmark configuration from file...") << std::endl;
 
  398             config.
random_seed = p_bench_config_loader->getRandomSeed();
 
  403         std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(
"Initializing Backend engine...") << std::endl;
 
  405         if (p_bench_config_loader)
 
  412         std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(
"Retrieving default benchmark configuration from Backend...") << std::endl;
 
  413         std::shared_ptr<hebench::Utilities::BenchmarkConfigBroker> p_bench_broker =
 
  418             ss = std::stringstream();
 
  419             ss << 
"Saving default benchmark configuration to storage:" << std::endl
 
  421             std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  422             p_bench_broker->exportConfiguration(config.
config_file,
 
  423                                                 p_bench_broker->getDefaultConfiguration());
 
  432             if (p_bench_config_loader)
 
  434                 ss = std::stringstream();
 
  435                 ss << 
"Loading benchmark configuration file:" << std::endl
 
  437                 std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  438                 benchmarks_to_run = p_bench_broker->importConfiguration(*p_bench_config_loader);
 
  442                 std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(
"Loading default benchmark configuration...") << std::endl;
 
  443                 benchmarks_to_run = p_bench_broker->getDefaultConfiguration();
 
  448         p_bench_broker.reset();
 
  449         p_bench_config_loader.reset();
 
  455             ss = std::stringstream();
 
  457             std::cout << std::endl
 
  458                       << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  463             ss         = std::stringstream();
 
  464             ss << 
"Benchmarks to run: " << total_runs;
 
  465             std::cout << 
IOS_MSG_OK << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  468             std::size_t run_i = 0;
 
  469             for (std::size_t bench_i = 0; bench_i < benchmarks_to_run.
benchmark_requests.size(); ++bench_i)
 
  473                 bool b_critical_error                                                            = 
false;
 
  474                 std::string bench_path;
 
  478                     ss = std::stringstream();
 
  479                     ss << 
" Progress: " << (run_i * 100 / total_runs) << 
"%" << std::endl
 
  480                        << 
"           " << run_i << 
"/" << total_runs;
 
  481                     std::cout << std::endl
 
  482                               << 
"==================" << std::endl
 
  483                               << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl
 
  484                               << 
"==================" << std::endl;
 
  487                         std::this_thread::sleep_for(std::chrono::milliseconds(config.
report_delay_ms));
 
  491                         p_engine->describeBenchmark(benchmark_request.
index, benchmark_request.
configuration);
 
  493                     bench_path = bench_token->getDescription().path; 
 
  501                     std::string s_workload_name = 
"Workload: " + bench_token->getDescription().workload_name;
 
  502                     std::size_t fill_size       = s_workload_name.length() + 2;
 
  505                     std::cout << std::endl
 
  506                               << std::setfill(
'=') << std::setw(fill_size) << 
"=" << std::endl
 
  507                               << 
" " << hebench::Logging::GlobalLogger::log(s_workload_name) << std::endl
 
  508                               << std::setw(fill_size) << 
"=" << std::setfill(
' ') << std::endl;
 
  510                     report.
setHeader(bench_token->getDescription().header);
 
  511                     if (!bench_token->getBenchmarkConfiguration().dataset_filename.empty())
 
  513                         std::stringstream ss;
 
  514                         ss << 
"Dataset, \"" << bench_token->getBenchmarkConfiguration().dataset_filename << 
"\"" << std::endl;
 
  518                     std::cout << std::endl
 
  528                     bool b_succeeded = p_bench->run(report, run_config);
 
  532                         std::cout << 
IOS_MSG_FAILED << hebench::Logging::GlobalLogger::log(bench_token->getDescription().workload_name) << std::endl;
 
  533                         failed_benchmarks.push_back(report_paths.size());
 
  537                 catch (hebench::Common::ErrorException &err_num)
 
  541                         b_critical_error = 
true;
 
  548                         b_critical_error = 
false;
 
  550                         failed_benchmarks.push_back(report_paths.size());
 
  553                         ss = std::stringstream();
 
  554                         ss << 
"Workload backend failed with message: " << std::endl
 
  556                         std::cout << std::endl
 
  557                                   << 
IOS_MSG_ERROR << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  562                     b_critical_error = 
true;
 
  566                 report_paths.emplace_back(bench_path);
 
  569                 std::filesystem::path report_filename = bench_path;
 
  570                 std::filesystem::path report_path     = report_filename.is_absolute() ?
 
  575                 report_filename = report_path;
 
  580                 report_filename += 
".csv";
 
  582                 ss = std::stringstream();
 
  583                 ss << 
"Saving report to: " << std::endl
 
  585                 std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  587                 if (b_critical_error)
 
  590                     if (std::filesystem::exists(report_filename) && std::filesystem::is_regular_file(report_filename))
 
  592                         std::filesystem::remove(report_filename);
 
  600                         std::filesystem::create_directories(report_path);
 
  605                 std::cout << 
IOS_MSG_OK << hebench::Logging::GlobalLogger::log(
"Report saved.") << std::endl;
 
  619             assert(report_paths.size() == total_runs);
 
  621             ss = std::stringstream();
 
  622             ss << 
" Progress: 100%" << std::endl
 
  623                << 
"           " << total_runs << 
"/" << total_runs;
 
  624             std::cout << std::endl
 
  625                       << 
"==================" << std::endl
 
  626                       << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl
 
  627                       << 
"==================" << std::endl;
 
  631             std::cout << std::endl
 
  632                       << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(
"Generating benchmark list...") << std::endl;
 
  634             std::filesystem::path benchmark_list_filename = std::filesystem::canonical(config.
report_root_path);
 
  635             benchmark_list_filename /= 
"benchmark_list.txt";
 
  636             hebench::Utilities::writeToFile(
 
  637                 benchmark_list_filename,
 
  638                 [&config, &report_paths](std::ostream &os) {
 
  639                     for (std::size_t report_i = 0; report_i < report_paths.size(); ++report_i)
 
  641                         std::filesystem::path report_filename = report_paths[report_i];
 
  646                         report_filename += 
".csv";
 
  648                         os << report_filename.string() << std::endl;
 
  653             ss = std::stringstream();
 
  654             ss << 
"Benchmark list saved to: " << std::endl
 
  655                << benchmark_list_filename;
 
  656             std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  662                 std::cout << std::endl
 
  663                           << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(
"Initializing report compiler...") << std::endl;
 
  666                 std::vector<char> c_error_msg(1024, 0);
 
  667                 std::string compile_filename       = benchmark_list_filename.string();
 
  668                 compiler_config.
input_file         = compile_filename.c_str();
 
  676                 std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(
"Compiling reports using default compiler options...") << std::endl
 
  679                     throw std::runtime_error(c_error_msg.data());
 
  680                 std::cout << 
IOS_MSG_DONE << hebench::Logging::GlobalLogger::log(
"Reports Compiled.") << std::endl
 
  688                 std::cout << std::endl
 
  689                           << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(
"Generating overview...") << std::endl
 
  695                 ss = std::stringstream();
 
  696                 ss << 
"Failed benchmarks: " << failed_benchmarks.size() << std::endl
 
  698                 for (std::size_t i = 0; i < failed_benchmarks.size(); ++i)
 
  702                     assert(failed_benchmarks[i] < report_paths.size());
 
  703                     ss << i + 1 << 
". " << report_paths.at(failed_benchmarks[i]) << std::endl;
 
  706             std::cout << std::endl
 
  707                       << hebench::Logging::GlobalLogger::log(ss.str());
 
  711             std::cout << std::endl
 
  712                       << 
"=================================" << std::endl
 
  713                       << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(
"Run Summary") << std::endl;
 
  714             ss = std::stringstream();
 
  715             ss << 
"Total benchmarks run: " << total_runs;
 
  716             std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  717             ss = std::stringstream();
 
  718             ss << 
"Success: " << total_runs - failed_benchmarks.size();
 
  720                 ss << 
"* (validation skipped)";
 
  721             std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  722             ss = std::stringstream();
 
  723             ss << 
"Failed: " << failed_benchmarks.size();
 
  724             std::cout << 
IOS_MSG_INFO << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  727     catch (hebench::ArgsParser::HelpShown &)
 
  731     catch (std::exception &ex)
 
  733         ss = std::stringstream();
 
  734         ss << 
"An error occurred with message: " << std::endl
 
  736         std::cout << std::endl
 
  737                   << 
IOS_MSG_ERROR << hebench::Logging::GlobalLogger::log(ss.str()) << std::endl;
 
  745     hebench::APIBridge::DynamicLibLoad::unloadLibrary();
 
  749         std::cout << std::endl
 
  750                   << 
IOS_MSG_DONE << hebench::Logging::GlobalLogger::log(
true, 
"Complete!") << std::endl;
 
  754         ss = std::stringstream();
 
  755         ss << 
"Terminated with errors. Exit code: " << retval;
 
  756         std::cout << std::endl
 
  757                   << 
IOS_MSG_FAILED << hebench::Logging::GlobalLogger::log(
true, ss.str()) << std::endl;
 
static void computeTimingPrefix(TimingPrefixedSeconds &prefix, double seconds)
Given a time interval in seconds, computes the timing prefix.
static TimingReport loadReportFromCSVFile(const std::string &filename)
void save2CSV(const std::string &filename)
uint32_t getMainEventType() const
static double computeElapsedWallTime(const TimingReportEventC &event)
void getEvent(TimingReportEventC &p_event, uint64_t index) const
uint64_t getEventCount() const
std::string getHeader() const
void appendFooter(const std::string &new_header, bool new_line=true)
static double computeElapsedCPUTime(const TimingReportEventC &event)
void setHeader(const std::string &new_header)
static Engine::Ptr create(const std::vector< std::int8_t > &data)
Creates a new backend engine.
std::shared_ptr< Engine > Ptr
std::shared_ptr< DescriptionToken > Ptr
std::shared_ptr< IBenchmark > Ptr
bool b_validate_results
Specifies whether the benchmark will validate backend results (true) or it will simply benchmark with...
Provides configuration to and retrieves data from a benchmark run.
static void setForceConfigValues(bool value)
Sets whether frontend will override backend descriptors using configuration data or not.
static std::shared_ptr< BenchmarkConfigLoader > create(const std::string &yaml_filename, std::uint64_t fallback_random_seed)
Loads a benchmark configuration from yaml data contained in a file.
static void setRandomSeed()
char time_unit_overview
Time unit for report overview. If 0, the fallback time_unit will be used.
int32_t b_silent
If non-zero, the run details will be omited. Any warning, error, or important messages are directed t...
const char * input_file
C-string containing the input file name.
char time_unit_stats
Time unit for report statistics. If 0, the fallback time_unit will be used.
int32_t compile(const ReportCompilerConfigC *p_config, char *s_error, size_t s_error_size)
Runs the compiler using the specified configuration.
char time_unit_summary
Time unit for report summaries. If 0, the fallback time_unit will be used.
int32_t b_show_overview
If non-zero, the compiled overview will be output to stdout.
char time_unit
Fallback time unit when no time unit is specified for a specific output.
Configuration for a compiler run.
char symbol[MAX_SYMBOL_BUFFER_SIZE]
Symbol for the prefix.
double value
Value in the specified unit.
uint32_t event_type_id
ID specifying the event type.
constexpr const char * FileNameNoExtReport
std::vector< BenchmarkRequest > benchmark_requests
hebench::TestHarness::BenchmarkDescription::Configuration configuration
static constexpr const char * DefaultConfigFile
std::filesystem::path backend_lib_path
void initializeConfig(const hebench::ArgsParser &parser)
std::size_t report_delay_ms
static std::ostream & showVersion(std::ostream &os)
static constexpr const char * DefaultRootPath
std::filesystem::path config_file
bool b_single_path_report
void showConfig(std::ostream &os) const
static constexpr std::uint64_t DefaultSampleSize
void showBenchmarkDefaults(std::ostream &os)
static constexpr std::uint64_t DefaultMinTestTime
std::uint64_t random_seed
std::filesystem::path report_root_path
static constexpr std::size_t DefaultReportDelay
int main(int argc, char **argv)
void initArgsParser(hebench::ArgsParser &parser, int argc, char **argv)
void generateOverview(std::ostream &os, const std::vector< std::string > &report_paths, const std::string &input_root_path, bool b_single_path_reports)
#define HEBENCH_ECODE_CRITICAL_ERROR
Specifies a critical error.