Add hyperparameters management in experiments

This commit is contained in:
Ricardo Montañana Gómez 2023-08-20 17:57:38 +02:00
parent 7a6ec73d63
commit 4964aab722
Signed by: rmontanana
GPG Key ID: 46064262FD9A7ADE
17 changed files with 141 additions and 117 deletions

View File

@ -3,5 +3,6 @@ include_directories(${BayesNet_SOURCE_DIR}/src/BayesNet)
include_directories(${BayesNet_SOURCE_DIR}/lib/Files) include_directories(${BayesNet_SOURCE_DIR}/lib/Files)
include_directories(${BayesNet_SOURCE_DIR}/lib/mdlp) include_directories(${BayesNet_SOURCE_DIR}/lib/mdlp)
include_directories(${BayesNet_SOURCE_DIR}/lib/argparse/include) include_directories(${BayesNet_SOURCE_DIR}/lib/argparse/include)
include_directories(${BayesNet_SOURCE_DIR}/lib/json/include)
add_executable(BayesNetSample sample.cc ${BayesNet_SOURCE_DIR}/src/Platform/Folding.cc ${BayesNet_SOURCE_DIR}/src/Platform/Models.cc) add_executable(BayesNetSample sample.cc ${BayesNet_SOURCE_DIR}/src/Platform/Folding.cc ${BayesNet_SOURCE_DIR}/src/Platform/Models.cc)
target_link_libraries(BayesNetSample BayesNet ArffFiles mdlp "${TORCH_LIBRARIES}") target_link_libraries(BayesNetSample BayesNet ArffFiles mdlp "${TORCH_LIBRARIES}")

View File

@ -3,6 +3,7 @@
#include <string> #include <string>
#include <map> #include <map>
#include <argparse/argparse.hpp> #include <argparse/argparse.hpp>
#include <nlohmann/json.hpp>
#include "ArffFiles.h" #include "ArffFiles.h"
#include "BayesMetrics.h" #include "BayesMetrics.h"
#include "CPPFImdlp.h" #include "CPPFImdlp.h"
@ -141,111 +142,97 @@ int main(int argc, char** argv)
/* /*
* Begin Processing * Begin Processing
*/ */
auto ypred = torch::tensor({ 1,2,3,2,2,3,4,5,2,1 }); weights.index_put_({ mask }, weights + 10);
auto y = torch::tensor({ 0,0,0,0,2,3,4,0,0,0 }); auto handler = ArffFiles();
auto weights = torch::ones({ 10 }, kDouble); handler.load(complete_file_name, class_last);
auto mask = ypred == y; // Get Dataset X, y
cout << "ypred:" << ypred << endl; vector<mdlp::samples_t>& X = handler.getX();
cout << "y:" << y << endl; mdlp::labels_t& y = handler.getY();
cout << "weights:" << weights << endl; // Get className & Features
cout << "mask:" << mask << endl; auto className = handler.getClassName();
double value_to_add = 0.5; vector<string> features;
weights += mask.to(torch::kDouble) * value_to_add; auto attributes = handler.getAttributes();
cout << "New weights:" << weights << endl; transform(attributes.begin(), attributes.end(), back_inserter(features),
auto masked_weights = weights * mask.to(weights.dtype()); [](const pair<string, string>& item) { return item.first; });
double sum_of_weights = masked_weights.sum().item<double>(); // Discretize Dataset
cout << "Sum of weights: " << sum_of_weights << endl; auto [Xd, maxes] = discretize(X, y, features);
//weights.index_put_({ mask }, weights + 10); maxes[className] = *max_element(y.begin(), y.end()) + 1;
// auto handler = ArffFiles(); map<string, vector<int>> states;
// handler.load(complete_file_name, class_last); for (auto feature : features) {
// // Get Dataset X, y states[feature] = vector<int>(maxes[feature]);
// vector<mdlp::samples_t>& X = handler.getX(); }
// mdlp::labels_t& y = handler.getY(); states[className] = vector<int>(maxes[className]);
// // Get className & Features auto clf = platform::Models::instance()->create(model_name);
// auto className = handler.getClassName(); clf->fit(Xd, y, features, className, states);
// vector<string> features; if (dump_cpt) {
// auto attributes = handler.getAttributes(); cout << "--- CPT Tables ---" << endl;
// transform(attributes.begin(), attributes.end(), back_inserter(features), clf->dump_cpt();
// [](const pair<string, string>& item) { return item.first; }); }
// // Discretize Dataset auto lines = clf->show();
// auto [Xd, maxes] = discretize(X, y, features); for (auto line : lines) {
// maxes[className] = *max_element(y.begin(), y.end()) + 1; cout << line << endl;
// map<string, vector<int>> states; }
// for (auto feature : features) { cout << "--- Topological Order ---" << endl;
// states[feature] = vector<int>(maxes[feature]); auto order = clf->topological_order();
// } for (auto name : order) {
// states[className] = vector<int>(maxes[className]); cout << name << ", ";
// auto clf = platform::Models::instance()->create(model_name); }
// clf->fit(Xd, y, features, className, states); cout << "end." << endl;
// if (dump_cpt) { auto score = clf->score(Xd, y);
// cout << "--- CPT Tables ---" << endl; cout << "Score: " << score << endl;
// clf->dump_cpt(); auto graph = clf->graph();
// } auto dot_file = model_name + "_" + file_name;
// auto lines = clf->show(); ofstream file(dot_file + ".dot");
// for (auto line : lines) { file << graph;
// cout << line << endl; file.close();
// } cout << "Graph saved in " << model_name << "_" << file_name << ".dot" << endl;
// cout << "--- Topological Order ---" << endl; cout << "dot -Tpng -o " + dot_file + ".png " + dot_file + ".dot " << endl;
// auto order = clf->topological_order(); string stratified_string = stratified ? " Stratified" : "";
// for (auto name : order) { cout << nFolds << " Folds" << stratified_string << " Cross validation" << endl;
// cout << name << ", "; cout << "==========================================" << endl;
// } torch::Tensor Xt = torch::zeros({ static_cast<int>(Xd.size()), static_cast<int>(Xd[0].size()) }, torch::kInt32);
// cout << "end." << endl; torch::Tensor yt = torch::tensor(y, torch::kInt32);
// auto score = clf->score(Xd, y); for (int i = 0; i < features.size(); ++i) {
// cout << "Score: " << score << endl; Xt.index_put_({ i, "..." }, torch::tensor(Xd[i], torch::kInt32));
// auto graph = clf->graph(); }
// auto dot_file = model_name + "_" + file_name; float total_score = 0, total_score_train = 0, score_train, score_test;
// ofstream file(dot_file + ".dot"); Fold* fold;
// file << graph; if (stratified)
// file.close(); fold = new StratifiedKFold(nFolds, y, seed);
// cout << "Graph saved in " << model_name << "_" << file_name << ".dot" << endl; else
// cout << "dot -Tpng -o " + dot_file + ".png " + dot_file + ".dot " << endl; fold = new KFold(nFolds, y.size(), seed);
// string stratified_string = stratified ? " Stratified" : ""; for (auto i = 0; i < nFolds; ++i) {
// cout << nFolds << " Folds" << stratified_string << " Cross validation" << endl; auto [train, test] = fold->getFold(i);
// cout << "==========================================" << endl; cout << "Fold: " << i + 1 << endl;
// torch::Tensor Xt = torch::zeros({ static_cast<int>(Xd.size()), static_cast<int>(Xd[0].size()) }, torch::kInt32); if (tensors) {
// torch::Tensor yt = torch::tensor(y, torch::kInt32); auto ttrain = torch::tensor(train, torch::kInt64);
// for (int i = 0; i < features.size(); ++i) { auto ttest = torch::tensor(test, torch::kInt64);
// Xt.index_put_({ i, "..." }, torch::tensor(Xd[i], torch::kInt32)); torch::Tensor Xtraint = torch::index_select(Xt, 1, ttrain);
// } torch::Tensor ytraint = yt.index({ ttrain });
// float total_score = 0, total_score_train = 0, score_train, score_test; torch::Tensor Xtestt = torch::index_select(Xt, 1, ttest);
// Fold* fold; torch::Tensor ytestt = yt.index({ ttest });
// if (stratified) clf->fit(Xtraint, ytraint, features, className, states);
// fold = new StratifiedKFold(nFolds, y, seed); auto temp = clf->predict(Xtraint);
// else score_train = clf->score(Xtraint, ytraint);
// fold = new KFold(nFolds, y.size(), seed); score_test = clf->score(Xtestt, ytestt);
// for (auto i = 0; i < nFolds; ++i) { } else {
// auto [train, test] = fold->getFold(i); auto [Xtrain, ytrain] = extract_indices(train, Xd, y);
// cout << "Fold: " << i + 1 << endl; auto [Xtest, ytest] = extract_indices(test, Xd, y);
// if (tensors) { clf->fit(Xtrain, ytrain, features, className, states);
// auto ttrain = torch::tensor(train, torch::kInt64); score_train = clf->score(Xtrain, ytrain);
// auto ttest = torch::tensor(test, torch::kInt64); score_test = clf->score(Xtest, ytest);
// torch::Tensor Xtraint = torch::index_select(Xt, 1, ttrain); }
// torch::Tensor ytraint = yt.index({ ttrain }); if (dump_cpt) {
// torch::Tensor Xtestt = torch::index_select(Xt, 1, ttest); cout << "--- CPT Tables ---" << endl;
// torch::Tensor ytestt = yt.index({ ttest }); clf->dump_cpt();
// clf->fit(Xtraint, ytraint, features, className, states); }
// auto temp = clf->predict(Xtraint); total_score_train += score_train;
// score_train = clf->score(Xtraint, ytraint); total_score += score_test;
// score_test = clf->score(Xtestt, ytestt); cout << "Score Train: " << score_train << endl;
// } else { cout << "Score Test : " << score_test << endl;
// auto [Xtrain, ytrain] = extract_indices(train, Xd, y); cout << "-------------------------------------------------------------------------------" << endl;
// auto [Xtest, ytest] = extract_indices(test, Xd, y); }
// clf->fit(Xtrain, ytrain, features, className, states); cout << "**********************************************************************************" << endl;
// score_train = clf->score(Xtrain, ytrain); cout << "Average Score Train: " << total_score_train / nFolds << endl;
// score_test = clf->score(Xtest, ytest); cout << "Average Score Test : " << total_score / nFolds << endl;return 0;
// }
// if (dump_cpt) {
// cout << "--- CPT Tables ---" << endl;
// clf->dump_cpt();
// }
// total_score_train += score_train;
// total_score += score_test;
// cout << "Score Train: " << score_train << endl;
// cout << "Score Test : " << score_test << endl;
// cout << "-------------------------------------------------------------------------------" << endl;
// }
// cout << "**********************************************************************************" << endl;
// cout << "Average Score Train: " << total_score_train / nFolds << endl;
// cout << "Average Score Test : " << total_score / nFolds << endl;return 0;
} }

View File

@ -10,6 +10,7 @@ namespace bayesnet {
AODE(); AODE();
virtual ~AODE() {}; virtual ~AODE() {};
vector<string> graph(const string& title = "AODE") const override; vector<string> graph(const string& title = "AODE") const override;
void setHyperparameters(nlohmann::json& hyperparameters) override {};
}; };
} }
#endif #endif

View File

@ -16,6 +16,7 @@ namespace bayesnet {
virtual ~AODELd() = default; virtual ~AODELd() = default;
vector<string> graph(const string& name = "AODE") const override; vector<string> graph(const string& name = "AODE") const override;
static inline string version() { return "0.0.1"; }; static inline string version() { return "0.0.1"; };
void setHyperparameters(nlohmann::json& hyperparameters) override {};
}; };
} }
#endif // !AODELD_H #endif // !AODELD_H

View File

@ -1,6 +1,7 @@
#ifndef BASE_H #ifndef BASE_H
#define BASE_H #define BASE_H
#include <torch/torch.h> #include <torch/torch.h>
#include <nlohmann/json.hpp>
#include <vector> #include <vector>
namespace bayesnet { namespace bayesnet {
using namespace std; using namespace std;
@ -27,6 +28,7 @@ namespace bayesnet {
const string inline getVersion() const { return "0.1.0"; }; const string inline getVersion() const { return "0.1.0"; };
vector<string> virtual topological_order() = 0; vector<string> virtual topological_order() = 0;
void virtual dump_cpt()const = 0; void virtual dump_cpt()const = 0;
virtual void setHyperparameters(nlohmann::json& hyperparameters) = 0;
}; };
} }
#endif #endif

View File

@ -2,11 +2,17 @@
#include "BayesMetrics.h" #include "BayesMetrics.h"
namespace bayesnet { namespace bayesnet {
BoostAODE::BoostAODE() : Ensemble() {} BoostAODE::BoostAODE() : Ensemble(), repeatSparent(false) {}
void BoostAODE::buildModel(const torch::Tensor& weights) void BoostAODE::buildModel(const torch::Tensor& weights)
{ {
// Models shall be built in trainModel // Models shall be built in trainModel
} }
void BoostAODE::setHyperparameters(nlohmann::json& hyperparameters)
{
if (hyperparameters.contains("repeatSparent")) {
repeatSparent = hyperparameters["repeatSparent"];
}
}
void BoostAODE::trainModel(const torch::Tensor& weights) void BoostAODE::trainModel(const torch::Tensor& weights)
{ {
models.clear(); models.clear();
@ -16,7 +22,6 @@ namespace bayesnet {
auto X_ = dataset.index({ torch::indexing::Slice(0, dataset.size(0) - 1), "..." }); auto X_ = dataset.index({ torch::indexing::Slice(0, dataset.size(0) - 1), "..." });
auto y_ = dataset.index({ -1, "..." }); auto y_ = dataset.index({ -1, "..." });
bool exitCondition = false; bool exitCondition = false;
bool repeatSparent = false;
vector<int> featuresUsed; vector<int> featuresUsed;
// Step 0: Set the finish condition // Step 0: Set the finish condition
// if not repeatSparent a finish condition is run out of features // if not repeatSparent a finish condition is run out of features

View File

@ -4,13 +4,16 @@
#include "SPODE.h" #include "SPODE.h"
namespace bayesnet { namespace bayesnet {
class BoostAODE : public Ensemble { class BoostAODE : public Ensemble {
protected:
void buildModel(const torch::Tensor& weights) override;
void trainModel(const torch::Tensor& weights) override;
public: public:
BoostAODE(); BoostAODE();
virtual ~BoostAODE() {}; virtual ~BoostAODE() {};
vector<string> graph(const string& title = "BoostAODE") const override; vector<string> graph(const string& title = "BoostAODE") const override;
void setHyperparameters(nlohmann::json& hyperparameters) override;
protected:
void buildModel(const torch::Tensor& weights) override;
void trainModel(const torch::Tensor& weights) override;
private:
bool repeatSparent;
}; };
} }
#endif #endif

View File

@ -1,5 +1,6 @@
include_directories(${BayesNet_SOURCE_DIR}/lib/mdlp) include_directories(${BayesNet_SOURCE_DIR}/lib/mdlp)
include_directories(${BayesNet_SOURCE_DIR}/lib/Files) include_directories(${BayesNet_SOURCE_DIR}/lib/Files)
include_directories(${BayesNet_SOURCE_DIR}/lib/json/include)
include_directories(${BayesNet_SOURCE_DIR}/src/BayesNet) include_directories(${BayesNet_SOURCE_DIR}/src/BayesNet)
include_directories(${BayesNet_SOURCE_DIR}/src/Platform) include_directories(${BayesNet_SOURCE_DIR}/src/Platform)
add_library(BayesNet bayesnetUtils.cc Network.cc Node.cc BayesMetrics.cc Classifier.cc add_library(BayesNet bayesnetUtils.cc Network.cc Node.cc BayesMetrics.cc Classifier.cc

View File

@ -16,6 +16,7 @@ namespace bayesnet {
public: public:
explicit KDB(int k, float theta = 0.03); explicit KDB(int k, float theta = 0.03);
virtual ~KDB() {}; virtual ~KDB() {};
void setHyperparameters(nlohmann::json& hyperparameters) override {};
vector<string> graph(const string& name = "KDB") const override; vector<string> graph(const string& name = "KDB") const override;
}; };
} }

View File

@ -13,6 +13,7 @@ namespace bayesnet {
KDBLd& fit(torch::Tensor& X, torch::Tensor& y, vector<string>& features, string className, map<string, vector<int>>& states) override; KDBLd& fit(torch::Tensor& X, torch::Tensor& y, vector<string>& features, string className, map<string, vector<int>>& states) override;
vector<string> graph(const string& name = "KDB") const override; vector<string> graph(const string& name = "KDB") const override;
Tensor predict(Tensor& X) override; Tensor predict(Tensor& X) override;
void setHyperparameters(nlohmann::json& hyperparameters) override {};
static inline string version() { return "0.0.1"; }; static inline string version() { return "0.0.1"; };
}; };
} }

View File

@ -12,6 +12,7 @@ namespace bayesnet {
explicit SPODE(int root); explicit SPODE(int root);
virtual ~SPODE() {}; virtual ~SPODE() {};
vector<string> graph(const string& name = "SPODE") const override; vector<string> graph(const string& name = "SPODE") const override;
void setHyperparameters(nlohmann::json& hyperparameters) override {};
}; };
} }
#endif #endif

View File

@ -13,6 +13,7 @@ namespace bayesnet {
SPODELd& fit(torch::Tensor& dataset, vector<string>& features, string className, map<string, vector<int>>& states) override; SPODELd& fit(torch::Tensor& dataset, vector<string>& features, string className, map<string, vector<int>>& states) override;
vector<string> graph(const string& name = "SPODE") const override; vector<string> graph(const string& name = "SPODE") const override;
Tensor predict(Tensor& X) override; Tensor predict(Tensor& X) override;
void setHyperparameters(nlohmann::json& hyperparameters) override {};
static inline string version() { return "0.0.1"; }; static inline string version() { return "0.0.1"; };
}; };
} }

View File

@ -3,7 +3,6 @@
#include "Classifier.h" #include "Classifier.h"
namespace bayesnet { namespace bayesnet {
using namespace std; using namespace std;
using namespace torch;
class TAN : public Classifier { class TAN : public Classifier {
private: private:
protected: protected:
@ -12,6 +11,7 @@ namespace bayesnet {
TAN(); TAN();
virtual ~TAN() {}; virtual ~TAN() {};
vector<string> graph(const string& name = "TAN") const override; vector<string> graph(const string& name = "TAN") const override;
void setHyperparameters(nlohmann::json& hyperparameters) override {};
}; };
} }
#endif #endif

View File

@ -14,6 +14,7 @@ namespace bayesnet {
vector<string> graph(const string& name = "TAN") const override; vector<string> graph(const string& name = "TAN") const override;
Tensor predict(Tensor& X) override; Tensor predict(Tensor& X) override;
static inline string version() { return "0.0.1"; }; static inline string version() { return "0.0.1"; };
void setHyperparameters(nlohmann::json& hyperparameters) override {};
}; };
} }
#endif // !TANLD_H #endif // !TANLD_H

View File

@ -25,6 +25,7 @@ namespace platform {
oss << std::put_time(timeinfo, "%H:%M:%S"); oss << std::put_time(timeinfo, "%H:%M:%S");
return oss.str(); return oss.str();
} }
Experiment::Experiment() : hyperparameters(json::parse("{}")) {}
string Experiment::get_file_name() string Experiment::get_file_name()
{ {
string result = "results_" + score_name + "_" + model + "_" + platform + "_" + get_date() + "_" + get_time() + "_" + (stratified ? "1" : "0") + ".json"; string result = "results_" + score_name + "_" + model + "_" + platform + "_" + get_date() + "_" + get_time() + "_" + (stratified ? "1" : "0") + ".json";
@ -124,6 +125,8 @@ namespace platform {
auto result = Result(); auto result = Result();
auto [values, counts] = at::_unique(y); auto [values, counts] = at::_unique(y);
result.setSamples(X.size(1)).setFeatures(X.size(0)).setClasses(values.size(0)); result.setSamples(X.size(1)).setFeatures(X.size(0)).setClasses(values.size(0));
result.setHyperparameters(hyperparameters);
// Initialize results vectors
int nResults = nfolds * static_cast<int>(randomSeeds.size()); int nResults = nfolds * static_cast<int>(randomSeeds.size());
auto accuracy_test = torch::zeros({ nResults }, torch::kFloat64); auto accuracy_test = torch::zeros({ nResults }, torch::kFloat64);
auto accuracy_train = torch::zeros({ nResults }, torch::kFloat64); auto accuracy_train = torch::zeros({ nResults }, torch::kFloat64);
@ -144,6 +147,10 @@ namespace platform {
for (int nfold = 0; nfold < nfolds; nfold++) { for (int nfold = 0; nfold < nfolds; nfold++) {
auto clf = Models::instance()->create(model); auto clf = Models::instance()->create(model);
setModelVersion(clf->getVersion()); setModelVersion(clf->getVersion());
if (hyperparameters.size() != 0) {
clf->setHyperparameters(hyperparameters);
}
// Split train - test dataset
train_timer.start(); train_timer.start();
auto [train, test] = fold->getFold(nfold); auto [train, test] = fold->getFold(nfold);
auto train_t = torch::tensor(train); auto train_t = torch::tensor(train);
@ -153,12 +160,14 @@ namespace platform {
auto X_test = X.index({ "...", test_t }); auto X_test = X.index({ "...", test_t });
auto y_test = y.index({ test_t }); auto y_test = y.index({ test_t });
cout << nfold + 1 << ", " << flush; cout << nfold + 1 << ", " << flush;
// Train model
clf->fit(X_train, y_train, features, className, states); clf->fit(X_train, y_train, features, className, states);
nodes[item] = clf->getNumberOfNodes(); nodes[item] = clf->getNumberOfNodes();
edges[item] = clf->getNumberOfEdges(); edges[item] = clf->getNumberOfEdges();
num_states[item] = clf->getNumberOfStates(); num_states[item] = clf->getNumberOfStates();
train_time[item] = train_timer.getDuration(); train_time[item] = train_timer.getDuration();
auto accuracy_train_value = clf->score(X_train, y_train); auto accuracy_train_value = clf->score(X_train, y_train);
// Test model
test_timer.start(); test_timer.start();
auto accuracy_test_value = clf->score(X_test, y_test); auto accuracy_test_value = clf->score(X_test, y_test);
test_time[item] = test_timer.getDuration(); test_time[item] = test_timer.getDuration();

View File

@ -29,7 +29,8 @@ namespace platform {
}; };
class Result { class Result {
private: private:
string dataset, hyperparameters, model_version; string dataset, model_version;
json hyperparameters;
int samples{ 0 }, features{ 0 }, classes{ 0 }; int samples{ 0 }, features{ 0 }, classes{ 0 };
double score_train{ 0 }, score_test{ 0 }, score_train_std{ 0 }, score_test_std{ 0 }, train_time{ 0 }, train_time_std{ 0 }, test_time{ 0 }, test_time_std{ 0 }; double score_train{ 0 }, score_test{ 0 }, score_train_std{ 0 }, score_test_std{ 0 }, train_time{ 0 }, train_time_std{ 0 }, test_time{ 0 }, test_time_std{ 0 };
float nodes{ 0 }, leaves{ 0 }, depth{ 0 }; float nodes{ 0 }, leaves{ 0 }, depth{ 0 };
@ -37,7 +38,7 @@ namespace platform {
public: public:
Result() = default; Result() = default;
Result& setDataset(const string& dataset) { this->dataset = dataset; return *this; } Result& setDataset(const string& dataset) { this->dataset = dataset; return *this; }
Result& setHyperparameters(const string& hyperparameters) { this->hyperparameters = hyperparameters; return *this; } Result& setHyperparameters(const json& hyperparameters) { this->hyperparameters = hyperparameters; return *this; }
Result& setSamples(int samples) { this->samples = samples; return *this; } Result& setSamples(int samples) { this->samples = samples; return *this; }
Result& setFeatures(int features) { this->features = features; return *this; } Result& setFeatures(int features) { this->features = features; return *this; }
Result& setClasses(int classes) { this->classes = classes; return *this; } Result& setClasses(int classes) { this->classes = classes; return *this; }
@ -59,7 +60,7 @@ namespace platform {
const float get_score_train() const { return score_train; } const float get_score_train() const { return score_train; }
float get_score_test() { return score_test; } float get_score_test() { return score_test; }
const string& getDataset() const { return dataset; } const string& getDataset() const { return dataset; }
const string& getHyperparameters() const { return hyperparameters; } const json& getHyperparameters() const { return hyperparameters; }
const int getSamples() const { return samples; } const int getSamples() const { return samples; }
const int getFeatures() const { return features; } const int getFeatures() const { return features; }
const int getClasses() const { return classes; } const int getClasses() const { return classes; }
@ -85,11 +86,12 @@ namespace platform {
bool discretized{ false }, stratified{ false }; bool discretized{ false }, stratified{ false };
vector<Result> results; vector<Result> results;
vector<int> randomSeeds; vector<int> randomSeeds;
json hyperparameters = "{}";
int nfolds{ 0 }; int nfolds{ 0 };
float duration{ 0 }; float duration{ 0 };
json build_json(); json build_json();
public: public:
Experiment() = default; Experiment();
Experiment& setTitle(const string& title) { this->title = title; return *this; } Experiment& setTitle(const string& title) { this->title = title; return *this; }
Experiment& setModel(const string& model) { this->model = model; return *this; } Experiment& setModel(const string& model) { this->model = model; return *this; }
Experiment& setPlatform(const string& platform) { this->platform = platform; return *this; } Experiment& setPlatform(const string& platform) { this->platform = platform; return *this; }
@ -103,6 +105,7 @@ namespace platform {
Experiment& addResult(Result result) { results.push_back(result); return *this; } Experiment& addResult(Result result) { results.push_back(result); return *this; }
Experiment& addRandomSeed(int randomSeed) { randomSeeds.push_back(randomSeed); return *this; } Experiment& addRandomSeed(int randomSeed) { randomSeeds.push_back(randomSeed); return *this; }
Experiment& setDuration(float duration) { this->duration = duration; return *this; } Experiment& setDuration(float duration) { this->duration = duration; return *this; }
Experiment& setHyperparameters(const json& hyperparameters) { this->hyperparameters = hyperparameters; return *this; }
string get_file_name(); string get_file_name();
void save(const string& path); void save(const string& path);
void cross_validation(const string& path, const string& fileName); void cross_validation(const string& path, const string& fileName);

View File

@ -1,5 +1,6 @@
#include <iostream> #include <iostream>
#include <argparse/argparse.hpp> #include <argparse/argparse.hpp>
#include <nlohmann/json.hpp>
#include "platformUtils.h" #include "platformUtils.h"
#include "Experiment.h" #include "Experiment.h"
#include "Datasets.h" #include "Datasets.h"
@ -10,12 +11,14 @@
using namespace std; using namespace std;
using json = nlohmann::json;
argparse::ArgumentParser manageArguments(int argc, char** argv) argparse::ArgumentParser manageArguments(int argc, char** argv)
{ {
auto env = platform::DotEnv(); auto env = platform::DotEnv();
argparse::ArgumentParser program("main"); argparse::ArgumentParser program("main");
program.add_argument("-d", "--dataset").default_value("").help("Dataset file name"); program.add_argument("-d", "--dataset").default_value("").help("Dataset file name");
program.add_argument("--hyperparameters").default_value("{}").help("Hyperparamters passed to the model in Experiment");
program.add_argument("-p", "--path") program.add_argument("-p", "--path")
.help("folder where the data files are located, default") .help("folder where the data files are located, default")
.default_value(string{ platform::Paths::datasets() }); .default_value(string{ platform::Paths::datasets() });
@ -59,6 +62,7 @@ argparse::ArgumentParser manageArguments(int argc, char** argv)
auto seeds = program.get<vector<int>>("seeds"); auto seeds = program.get<vector<int>>("seeds");
auto complete_file_name = path + file_name + ".arff"; auto complete_file_name = path + file_name + ".arff";
auto title = program.get<string>("title"); auto title = program.get<string>("title");
auto hyperparameters = program.get<string>("hyperparameters");
if (title == "" && file_name == "") { if (title == "" && file_name == "") {
throw runtime_error("title is mandatory if dataset is not provided"); throw runtime_error("title is mandatory if dataset is not provided");
} }
@ -82,6 +86,7 @@ int main(int argc, char** argv)
auto stratified = program.get<bool>("stratified"); auto stratified = program.get<bool>("stratified");
auto n_folds = program.get<int>("folds"); auto n_folds = program.get<int>("folds");
auto seeds = program.get<vector<int>>("seeds"); auto seeds = program.get<vector<int>>("seeds");
auto hyperparameters =program.get<string>("hyperparameters");
vector<string> filesToTest; vector<string> filesToTest;
auto datasets = platform::Datasets(path, true, platform::ARFF); auto datasets = platform::Datasets(path, true, platform::ARFF);
auto title = program.get<string>("title"); auto title = program.get<string>("title");
@ -106,6 +111,7 @@ int main(int argc, char** argv)
experiment.setTitle(title).setLanguage("cpp").setLanguageVersion("14.0.3"); experiment.setTitle(title).setLanguage("cpp").setLanguageVersion("14.0.3");
experiment.setDiscretized(discretize_dataset).setModel(model_name).setPlatform(env.get("platform")); experiment.setDiscretized(discretize_dataset).setModel(model_name).setPlatform(env.get("platform"));
experiment.setStratified(stratified).setNFolds(n_folds).setScoreName("accuracy"); experiment.setStratified(stratified).setNFolds(n_folds).setScoreName("accuracy");
experiment.setHyperparameters(json::parse(hyperparameters));
for (auto seed : seeds) { for (auto seed : seeds) {
experiment.addRandomSeed(seed); experiment.addRandomSeed(seed);
} }