65#include <boost/program_options.hpp>
67#cmakedefine NEKTAR_TEST_FORCEMPIEXEC 1
73namespace po = boost::program_options;
77int setenv(
const char *
name,
const char *value,
int overwrite)
83 errcode = getenv_s(&envsize, NULL, 0,
name);
84 if (errcode || envsize)
87 return _putenv_s(
name, value);
91int main(
int argc,
char *argv[])
97 po::options_description desc(
"Available options");
98 desc.add_options()(
"help,h",
"Produce this help message.")(
99 "verbose,v",
"Turn on verbosity.")(
"generate-metric,g",
100 po::value<vector<int>>(),
101 "Generate a single metric.")(
102 "generate-all-metrics,a",
"Generate all metrics.")(
103 "executable,e", po::value<string>(),
"Use specified executable.");
105 po::options_description hidden(
"Hidden options");
106 hidden.add_options()(
"input-file", po::value<string>(),
"Input filename");
108 po::options_description cmdline_options(
"Command-line options");
109 cmdline_options.add(hidden).add(desc);
111 po::options_description visible(
"Allowed options");
114 po::positional_options_description
p;
115 p.add(
"input-file", -1);
117 po::variables_map vm;
121 po::store(po::command_line_parser(argc, argv)
122 .options(cmdline_options)
128 catch (
const exception &e)
130 cerr << e.what() << endl;
135 if (vm.count(
"help") || vm.count(
"input-file") != 1)
137 cerr <<
"Usage: Tester [options] input-file.tst" << endl;
142 bool verbose = vm.count(
"verbose");
145 vector<int> metricGenVec;
146 if (vm.count(
"generate-metric"))
148 metricGenVec = vm[
"generate-metric"].as<vector<int>>();
150 set<int> metricGen(metricGenVec.begin(), metricGenVec.end());
153 const fs::path specFile(vm[
"input-file"].as<string>());
156 fs::path specPath = specFile.parent_path();
158 if (specPath.empty())
160 specPath = fs::current_path();
163 string specFileStem = specFile.stem().string();
167 const fs::path masterDir =
171 const fs::path startDir = fs::current_path();
177 cerr <<
"Reading test file definition: " << specFile << endl;
185 cerr <<
"Creating metrics:" << endl;
189 vector<MetricSharedPtr> metrics;
192 set<int>::iterator it = metricGen.find(file.
GetMetricId(i));
194 it != metricGen.end() || (vm.count(
"generate-all-metrics") > 0);
201 cerr <<
" - ID " << metrics.back()->GetID() <<
": "
202 << metrics.back()->GetType() << endl;
205 if (it != metricGen.end())
211 if (metricGen.size() != 0)
213 string s = metricGen.size() == 1 ?
"s" :
"";
214 set<int>::iterator it;
215 cerr <<
"Unable to find metric" + s +
" with ID" + s +
" ";
216 for (it = metricGen.begin(); it != metricGen.end(); ++it)
225 if (fs::exists(masterDir))
227 fs::remove_all(masterDir);
232 cerr <<
"Creating master directory: " << masterDir << endl;
236 fs::create_directory(masterDir);
239 fs::current_path(masterDir);
243 fstream masterOut(
"master.out", ios::out | ios::in | ios::trunc);
244 fstream masterErr(
"master.err", ios::out | ios::in | ios::trunc);
246 if (masterOut.bad() || masterErr.bad())
248 cerr <<
"One or more master output files are unreadable." << endl;
253 vector<fs::path> tmpWorkingDirs;
256 for (
unsigned int i = 0; i < file.
GetNumRuns(); ++i)
262 cerr <<
"Starting run " << i <<
"." << endl;
266 const fs::path tmpDir =
267 masterDir / fs::path(
"run" + std::to_string(i));
268 tmpWorkingDirs.push_back(tmpDir);
272 cerr <<
"Creating working directory: " << tmpDir << endl;
276 fs::create_directory(tmpDir);
279 fs::current_path(tmpDir);
283 cerr <<
"Copying required files: " << endl;
292 fs::path source = specPath / source_file;
293 fs::path dest = tmpDir / source_file.filename();
296 cerr <<
" - " << source <<
" -> " << dest << endl;
299 if (fs::is_directory(source))
301 fs::create_directory(dest);
304 for (
const auto &dirEnt :
305 fs::recursive_directory_iterator{source})
307 fs::path newdest = dest / dirEnt.path().filename();
308 fs::copy_file(dirEnt.path(), newdest);
313 fs::copy_file(source, dest);
318 fs::path source_file(
"test.opt");
319 fs::path source = specPath / source_file;
320 bool HaveOptFile =
false;
321 if (fs::exists(source))
323 fs::path dest = tmpDir / source_file.filename();
326 cerr <<
" - " << source <<
" -> " << dest << endl;
329 if (fs::is_directory(source))
331 fs::create_directory(dest);
334 for (
const auto &dirEnt :
335 fs::recursive_directory_iterator{source})
337 fs::path newdest = dest / dirEnt.path().filename();
338 fs::copy_file(dirEnt.path(), newdest);
343 fs::copy_file(source, dest);
353 setenv(
"PYTHONPATH",
"@NEKPY_BASE_DIR@",
true);
358 bool mpiAdded =
false;
363#ifdef NEKTAR_TEST_FORCEMPIEXEC
374 command +=
"\"@MPIEXEC@\" ";
375 if (std::string(
"@NEKTAR_TEST_USE_HOSTFILE@") ==
"ON")
377 command +=
"-hostfile hostfile ";
378 if (system(
"echo 'localhost slots=12' > hostfile"))
380 cerr <<
"Unable to write 'hostfile' in path '"
381 << fs::current_path() << endl;
388 command +=
"--tag-output ";
411 command +=
"\"@MPIEXEC@\" ";
412 if (std::string(
"@NEKTAR_TEST_USE_HOSTFILE@") ==
"ON")
414 command +=
"-hostfile hostfile ";
423 command +=
"@MPIEXEC_NUMPROC_FLAG@ ";
429 if (!fs::exists(execPath))
439 command +=
"@PYTHON_EXECUTABLE@ ";
443 command += pathString;
446 command +=
" --use-opt-file test.opt ";
451 command +=
" 1>output.out 2>output.err";
458 cerr <<
"Running command: " << command << endl;
462 if (system(command.c_str()))
464 cerr <<
"Error occurred running test:" << endl;
465 cerr <<
"Command: " << command << endl;
470 if (!(fs::exists(
"output.out") && fs::exists(
"output.err")))
472 cerr <<
"One or more test output files are missing." << endl;
477 ifstream vStdout(
"output.out");
478 ifstream vStderr(
"output.err");
479 if (vStdout.bad() || vStderr.bad())
481 cerr <<
"One or more test output files are unreadable." << endl;
488 cerr <<
"Appending run " << i <<
" output and error to master."
492 while (getline(vStdout, line))
494 masterOut << line << endl;
497 while (getline(vStderr, line))
499 masterErr << line << endl;
507 for (
int i = 0; i < metrics.size(); ++i)
509 if (!metrics[i]->SupportsAverage() && file.
GetNumRuns() > 1)
511 cerr <<
"WARNING: Metric " << metrics[i]->GetType()
512 <<
" does not support multiple runs. Test may yield "
513 "unexpected results."
521 if (verbose && metrics.size())
523 cerr <<
"Checking metrics:" << endl;
526 for (
int i = 0; i < metrics.size(); ++i)
529 metricGen.find(metrics[i]->GetID()) != metricGen.end() ||
530 (vm.count(
"generate-all-metrics") > 0);
534 masterOut.seekg(0, ios::beg);
535 masterErr.seekg(0, ios::beg);
539 cerr <<
" - " << (gen ?
"generating" :
"checking")
540 <<
" metric " << metrics[i]->GetID() <<
" ("
541 << metrics[i]->GetType() <<
")... ";
544 if (!metrics[i]->Test(masterOut, masterErr))
549 cerr <<
"failed!" << endl;
554 cerr <<
"passed" << endl;
561 cerr << endl << endl;
565 if (status == 1 || verbose)
569 masterOut.seekg(0, ios::beg);
570 masterErr.seekg(0, ios::beg);
572 cout <<
"=== Output ===" << endl;
573 while (masterOut.good())
575 getline(masterOut, line);
576 cout << line << endl;
578 cout <<
"=== Errors ===" << endl;
579 while (masterErr.good())
581 getline(masterErr, line);
582 cout << line << endl;
591 fs::current_path(startDir);
595 cerr <<
"Removing working directory" << endl;
608 fs::remove_all(masterDir);
611 catch (
const fs::filesystem_error &e)
613 using namespace std::chrono_literals;
614 std::this_thread::sleep_for(1ms);
618 cout <<
"Locked files encountered. "
619 <<
"Retrying after 1ms..." << endl;
631 if (vm.count(
"generate-metric") > 0 ||
632 vm.count(
"generate-all-metrics") > 0)
640 catch (
const fs::filesystem_error &e)
642 cerr <<
"Filesystem operation error occurred:" << endl;
643 cerr <<
" " << e.what() << endl;
644 cerr <<
" Files left in " << masterDir.string() << endl;
648 cerr <<
"Error occurred during test:" << endl;
649 cerr <<
" " << e.what() << endl;
650 cerr <<
" Files left in " << masterDir.string() << endl;
652 catch (
const std::exception &e)
654 cerr <<
"Unhandled exception during test:" << endl;
655 cerr <<
" " << e.what() << endl;
656 cerr <<
" Files left in " << masterDir.string() << endl;
660 cerr <<
"Unknown error during test" << endl;
661 cerr <<
" Files left in " << masterDir.string() << endl;
#define ASSERTL0(condition, msg)
int main(int argc, char *argv[])
MetricSharedPtr CreateInstance(std::string key, TiXmlElement *elmt, bool generate)
The TestData class is responsible for parsing a test XML file and storing the data.
DependentFile GetDependentFile(unsigned int pId) const
unsigned int GetMetricId(unsigned int pId)
Returns the ID of the metric for a given metric ID.
unsigned int GetNumDependentFiles() const
Returns the number of dependent files required for the test.
unsigned int GetNumMetrics() const
Returns the number of metrics to be collected for the test.
unsigned int GetNumRuns() const
Returns the number of runs to be performed for the test.
TiXmlElement * GetMetric(unsigned int pId)
Returns a pointer to the TiXmlElement object representing the metric for a given metric ID.
unsigned int GetNumCommands() const
std::string GetMetricType(unsigned int pId) const
Returns the type of metric to be collected for a given metric ID.
const Command & GetCommand(unsigned int pId) const
static std::string PortablePath(const fs::path &path)
create portable path on different platforms for std::filesystem path.
static fs::path UniquePath(std::string specFileStem)
Create a unique (random) path, based on an input stem string. The returned string is a filename or di...
MetricFactory & GetMetricFactory()
CommandType m_commandType
Subclass of std::runtime_error to handle exceptions raised by Tester.