Refactor arguments management for Experimentation

This commit is contained in:
2025-01-18 18:26:34 +01:00
parent 7aaf6d1bf8
commit 3397d0962f
10 changed files with 325 additions and 420 deletions

View File

@@ -0,0 +1,224 @@
#include "common/Datasets.h"
#include "common/DotEnv.h"
#include "common/Paths.h"
#include "main/Models.h"
#include "main/modelRegister.h"
#include "ArgumentsExperiment.h"
namespace platform {
ArgumentsExperiment::ArgumentsExperiment(argparse::ArgumentParser& program, experiment_t type) : arguments{ program }, type{ type }
{
auto env = platform::DotEnv();
auto datasets = platform::Datasets(false, platform::Paths::datasets());
auto& group = arguments.add_mutually_exclusive_group(true);
group.add_argument("-d", "--dataset")
.help("Dataset file name: " + datasets.toString())
.default_value("all")
.action([](const std::string& value) {
auto datasets = platform::Datasets(false, platform::Paths::datasets());
static std::vector<std::string> choices_datasets(datasets.getNames());
choices_datasets.push_back("all");
if (find(choices_datasets.begin(), choices_datasets.end(), value) != choices_datasets.end()) {
return value;
}
throw std::runtime_error("Dataset must be one of: " + datasets.toString());
}
);
group.add_argument("--datasets").nargs(1, 50).help("Datasets file names 1..50 separated by spaces").default_value(std::vector<std::string>());
group.add_argument("--datasets-file").default_value("").help("Datasets file name. Mutually exclusive with dataset. This file should contain a list of datasets to test.");
arguments.add_argument("--hyperparameters").default_value("{}").help("Hyperparameters passed to the model in Experiment");
arguments.add_argument("--hyper-file").default_value("").help("Hyperparameters file name." \
"Mutually exclusive with hyperparameters. This file should contain hyperparameters for each dataset in json format.");
arguments.add_argument("--hyper-best").default_value(false).help("Use best results of the model as source of hyperparameters").implicit_value(true);
arguments.add_argument("-m", "--model")
.help("Model to use: " + platform::Models::instance()->toString())
.action([](const std::string& value) {
static const std::vector<std::string> choices = platform::Models::instance()->getNames();
if (find(choices.begin(), choices.end(), value) != choices.end()) {
return value;
}
throw std::runtime_error("Model must be one of " + platform::Models::instance()->toString());
}
);
arguments.add_argument("--title").default_value("").help("Experiment title");
arguments.add_argument("--discretize").help("Discretize input dataset").default_value((bool)stoi(env.get("discretize"))).implicit_value(true);
auto valid_choices = env.valid_tokens("discretize_algo");
auto& disc_arg = arguments.add_argument("--discretize-algo").help("Algorithm to use in discretization. Valid values: " + env.valid_values("discretize_algo")).default_value(env.get("discretize_algo"));
for (auto choice : valid_choices) {
disc_arg.choices(choice);
}
valid_choices = env.valid_tokens("smooth_strat");
auto& smooth_arg = arguments.add_argument("--smooth-strat").help("Smooth strategy used in Bayes Network node initialization. Valid values: " + env.valid_values("smooth_strat")).default_value(env.get("smooth_strat"));
for (auto choice : valid_choices) {
smooth_arg.choices(choice);
}
auto& score_arg = arguments.add_argument("-s", "--score").help("Score to use. Valid values: " + env.valid_values("score")).default_value(env.get("score"));
valid_choices = env.valid_tokens("score");
for (auto choice : valid_choices) {
score_arg.choices(choice);
}
arguments.add_argument("--no-train-score").help("Don't compute train score").default_value(false).implicit_value(true);
arguments.add_argument("--quiet").help("Don't display detailed progress").default_value(false).implicit_value(true);
arguments.add_argument("--save").help("Save result (always save even if a dataset is supplied)").default_value(false).implicit_value(true);
arguments.add_argument("--stratified").help("If Stratified KFold is to be done").default_value((bool)stoi(env.get("stratified"))).implicit_value(true);
arguments.add_argument("-f", "--folds").help("Number of folds").default_value(stoi(env.get("n_folds"))).scan<'i', int>().action([](const std::string& value) {
try {
auto k = stoi(value);
if (k < 2) {
throw std::runtime_error("Number of folds must be greater than 1");
}
return k;
}
catch (const runtime_error& err) {
throw std::runtime_error(err.what());
}
catch (...) {
throw std::runtime_error("Number of folds must be an integer");
}});
auto seed_values = env.getSeeds();
arguments.add_argument("--seeds").nargs(1, 10).help("Random seeds. Set to -1 to have pseudo random").scan<'i', int>().default_value(seed_values);
if (type == experiment_t::NORMAL) {
arguments.add_argument("--generate-fold-files").help("generate fold information in datasets_experiment folder").default_value(false).implicit_value(true);
arguments.add_argument("--graph").help("generate graphviz dot files with the model").default_value(false).implicit_value(true);
}
}
void ArgumentsExperiment::parse_args(int argc, char** argv)
{
try {
arguments.parse_args(argc, argv);
}
catch (const exception& err) {
cerr << err.what() << std::endl;
cerr << arguments;
exit(1);
}
parse();
}
void ArgumentsExperiment::parse()
{
try {
file_name = arguments.get<std::string>("dataset");
file_names = arguments.get<std::vector<std::string>>("datasets");
datasets_file = arguments.get<std::string>("datasets-file");
model_name = arguments.get<std::string>("model");
discretize_dataset = arguments.get<bool>("discretize");
discretize_algo = arguments.get<std::string>("discretize-algo");
smooth_strat = arguments.get<std::string>("smooth-strat");
stratified = arguments.get<bool>("stratified");
quiet = arguments.get<bool>("quiet");
n_folds = arguments.get<int>("folds");
score = arguments.get<std::string>("score");
seeds = arguments.get<std::vector<int>>("seeds");
auto hyperparameters = arguments.get<std::string>("hyperparameters");
hyperparameters_json = json::parse(hyperparameters);
hyperparameters_file = arguments.get<std::string>("hyper-file");
no_train_score = arguments.get<bool>("no-train-score");
hyper_best = arguments.get<bool>("hyper-best");
if (hyper_best) {
// Build the best results file_name
hyperparameters_file = platform::Paths::results() + platform::Paths::bestResultsFile(score, model_name);
// ignore this parameter
hyperparameters = "{}";
} else {
if (hyperparameters_file != "" && hyperparameters != "{}") {
throw runtime_error("hyperparameters and hyper_file are mutually exclusive");
}
}
title = arguments.get<std::string>("title");
if (title == "" && file_name == "all") {
throw runtime_error("title is mandatory if all datasets are to be tested");
}
saveResults = arguments.get<bool>("save");
if (type == experiment_t::NORMAL) {
graph = arguments.get<bool>("graph");
generate_fold_files = arguments.get<bool>("generate-fold-files");
} else {
graph = false;
generate_fold_files = false;
}
}
catch (const exception& err) {
cerr << err.what() << std::endl;
cerr << arguments;
exit(1);
}
auto datasets = platform::Datasets(false, platform::Paths::datasets());
if (datasets_file != "") {
ifstream catalog(datasets_file);
if (catalog.is_open()) {
std::string line;
while (getline(catalog, line)) {
if (line.empty() || line[0] == '#') {
continue;
}
if (!datasets.isDataset(line)) {
cerr << "Dataset " << line << " not found" << std::endl;
exit(1);
}
filesToTest.push_back(line);
}
catalog.close();
saveResults = true;
if (title == "") {
title = "Test " + to_string(filesToTest.size()) + " datasets (" + datasets_file + ") "\
+ model_name + " " + to_string(n_folds) + " folds";
}
} else {
throw std::invalid_argument("Unable to open catalog file. [" + datasets_file + "]");
}
} else {
if (file_names.size() > 0) {
for (auto file : file_names) {
if (!datasets.isDataset(file)) {
cerr << "Dataset " << file << " not found" << std::endl;
exit(1);
}
}
filesToTest = file_names;
saveResults = true;
if (title == "") {
title = "Test " + to_string(file_names.size()) + " datasets " + model_name + " " + to_string(n_folds) + " folds";
}
} else {
if (file_name != "all") {
if (!datasets.isDataset(file_name)) {
cerr << "Dataset " << file_name << " not found" << std::endl;
exit(1);
}
if (title == "") {
title = "Test " + file_name + " " + model_name + " " + to_string(n_folds) + " folds";
}
filesToTest.push_back(file_name);
} else {
filesToTest = datasets.getNames();
saveResults = true;
}
}
}
if (hyperparameters_file != "") {
test_hyperparams = platform::HyperParameters(datasets.getNames(), hyperparameters_file, hyper_best);
} else {
test_hyperparams = platform::HyperParameters(datasets.getNames(), hyperparameters_json);
}
}
Experiment& ArgumentsExperiment::initializedExperiment()
{
auto env = platform::DotEnv();
experiment.setTitle(title).setLanguage("c++").setLanguageVersion("gcc 14.1.1");
experiment.setDiscretizationAlgorithm(discretize_algo).setSmoothSrategy(smooth_strat);
experiment.setDiscretized(discretize_dataset).setModel(model_name).setPlatform(env.get("platform"));
experiment.setStratified(stratified).setNFolds(n_folds).setScoreName(score);
experiment.setHyperparameters(test_hyperparams);
for (auto seed : seeds) {
experiment.addRandomSeed(seed);
}
experiment.setFilesToTest(filesToTest);
experiment.setQuiet(quiet);
experiment.setNoTrainScore(no_train_score);
experiment.setGenerateFoldFiles(generate_fold_files);
experiment.setGraph(graph);
return experiment;
}
}

View File

@@ -0,0 +1,38 @@
#ifndef ARGUMENTSEXPERIMENT_H
#define ARGUMENTSEXPERIMENT_H
#include <string>
#include <iostream>
#include <vector>
#include <argparse/argparse.hpp>
#include <nlohmann/json.hpp>
#include "Experiment.h"
namespace platform {
using json = nlohmann::ordered_json;
enum class experiment_t { NORMAL, GRID };
class ArgumentsExperiment {
public:
ArgumentsExperiment(argparse::ArgumentParser& program, experiment_t type);
~ArgumentsExperiment() = default;
std::vector<std::string> getFilesToTest() const { return filesToTest; }
void parse_args(int argc, char** argv);
void parse();
Experiment& initializedExperiment();
bool isQuiet() const { return quiet; }
bool haveToSaveResults() const { return saveResults; }
bool doGraph() const { return graph; }
private:
Experiment experiment;
experiment_t type;
argparse::ArgumentParser& arguments;
std::string file_name, model_name, title, hyperparameters_file, datasets_file, discretize_algo, smooth_strat, score;
json hyperparameters_json;
bool discretize_dataset, stratified, saveResults, quiet, no_train_score, generate_fold_files, graph, hyper_best;
std::vector<int> seeds;
std::vector<std::string> file_names;
std::vector<std::string> filesToTest;
platform::HyperParameters test_hyperparams;
int n_folds;
};
}
#endif

View File

@@ -14,11 +14,11 @@ namespace platform {
result.save();
std::cout << "Result saved in " << Paths::results() << result.getFilename() << std::endl;
}
void Experiment::report(bool classification_report)
void Experiment::report()
{
ReportConsole report(result.getJson());
report.show();
if (classification_report) {
if (filesToTest.size() == 1) {
std::cout << report.showClassificationReport(Colors::BLUE());
}
}
@@ -43,9 +43,9 @@ namespace platform {
}
}
}
void Experiment::go(std::vector<std::string> filesToProcess, bool quiet, bool no_train_score, bool generate_fold_files, bool graph)
void Experiment::go()
{
for (auto fileName : filesToProcess) {
for (auto fileName : filesToTest) {
if (fileName.size() > max_name)
max_name = fileName.size();
}
@@ -64,10 +64,10 @@ namespace platform {
std::cout << " --- " << string(max_name, '-') << " ----- ----- ---- " << string(4 + 3 * nfolds, '-') << " ----------" << Colors::RESET() << std::endl;
}
int num = 0;
for (auto fileName : filesToProcess) {
for (auto fileName : filesToTest) {
if (!quiet)
std::cout << " " << setw(3) << right << num++ << " " << setw(max_name) << left << fileName << right << flush;
cross_validation(fileName, quiet, no_train_score, generate_fold_files, graph);
cross_validation(fileName);
if (!quiet)
std::cout << std::endl;
}
@@ -139,7 +139,7 @@ namespace platform {
file << output.dump(4);
file.close();
}
void Experiment::cross_validation(const std::string& fileName, bool quiet, bool no_train_score, bool generate_fold_files, bool graph)
void Experiment::cross_validation(const std::string& fileName)
{
//
// Load dataset and prepare data

View File

@@ -20,7 +20,6 @@ namespace platform {
Experiment& setTitle(const std::string& title) { this->result.setTitle(title); return *this; }
Experiment& setModelVersion(const std::string& model_version) { this->result.setModelVersion(model_version); return *this; }
Experiment& setModel(const std::string& model) { this->result.setModel(model); return *this; }
std::string getModel() const { return result.getModel(); }
Experiment& setLanguage(const std::string& language) { this->result.setLanguage(language); return *this; }
Experiment& setDiscretizationAlgorithm(const std::string& discretization_algo)
{
@@ -28,7 +27,8 @@ namespace platform {
}
Experiment& setSmoothSrategy(const std::string& smooth_strategy)
{
this->smooth_strategy = smooth_strategy; this->result.setSmoothStrategy(smooth_strategy);
this->smooth_strategy = smooth_strategy;
this->result.setSmoothStrategy(smooth_strategy);
if (smooth_strategy == "ORIGINAL")
smooth_type = bayesnet::Smoothing_t::ORIGINAL;
else if (smooth_strategy == "LAPLACE")
@@ -50,18 +50,32 @@ namespace platform {
Experiment& setDuration(float duration) { this->result.setDuration(duration); return *this; }
Experiment& setHyperparameters(const HyperParameters& hyperparameters_) { this->hyperparameters = hyperparameters_; return *this; }
HyperParameters& getHyperParameters() { return hyperparameters; }
void cross_validation(const std::string& fileName, bool quiet, bool no_train_score, bool generate_fold_files, bool graph);
void go(std::vector<std::string> filesToProcess, bool quiet, bool no_train_score, bool generate_fold_files, bool graph);
std::string getModel() const { return result.getModel(); }
std::string getScore() const { return result.getScoreName(); }
bool isDiscretized() const { return discretized; }
bool isStratified() const { return stratified; }
bool isQuiet() const { return quiet; }
std::string getSmoothStrategy() const { return smooth_strategy; }
int getNFolds() const { return nfolds; }
std::vector<int> getRandomSeeds() const { return randomSeeds; }
void cross_validation(const std::string& fileName);
void go();
void saveResult();
void show();
void saveGraph();
void report(bool classification_report = false);
void report();
void setFilesToTest(const std::vector<std::string>& filesToTest) { this->filesToTest = filesToTest; }
void setQuiet(bool quiet) { this->quiet = quiet; }
void setNoTrainScore(bool no_train_score) { this->no_train_score = no_train_score; }
void setGenerateFoldFiles(bool generate_fold_files) { this->generate_fold_files = generate_fold_files; }
void setGraph(bool graph) { this->graph = graph; }
private:
score_t parse_score() const;
Result result;
bool discretized{ false }, stratified{ false };
bool discretized{ false }, stratified{ false }, generate_fold_files{ false }, graph{ false }, quiet{ false }, no_train_score{ false };
std::vector<PartialResult> results;
std::vector<int> randomSeeds;
std::vector<std::string> filesToTest;
std::string discretization_algo;
std::string smooth_strategy;
bayesnet::Smoothing_t smooth_type{ bayesnet::Smoothing_t::NONE };