Add dotenv and possible multiple seeds
This commit is contained in:
@@ -4,5 +4,5 @@ include_directories(${BayesNet_SOURCE_DIR}/lib/Files)
|
|||||||
include_directories(${BayesNet_SOURCE_DIR}/lib/mdlp)
|
include_directories(${BayesNet_SOURCE_DIR}/lib/mdlp)
|
||||||
include_directories(${BayesNet_SOURCE_DIR}/lib/argparse/include)
|
include_directories(${BayesNet_SOURCE_DIR}/lib/argparse/include)
|
||||||
include_directories(${BayesNet_SOURCE_DIR}/lib/json/include)
|
include_directories(${BayesNet_SOURCE_DIR}/lib/json/include)
|
||||||
add_executable(main main.cc Folding.cc platformUtils.cc Experiment.cc Datasets.cc)
|
add_executable(main main.cc Folding.cc platformUtils.cc Experiment.cc Datasets.cc CrossValidation.cc)
|
||||||
target_link_libraries(main BayesNet ArffFiles mdlp "${TORCH_LIBRARIES}")
|
target_link_libraries(main BayesNet ArffFiles mdlp "${TORCH_LIBRARIES}")
|
81
src/Platform/CrossValidation.cc
Normal file
81
src/Platform/CrossValidation.cc
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
#include "CrossValidation.h"
|
||||||
|
#include "AODE.h"
|
||||||
|
#include "TAN.h"
|
||||||
|
#include "KDB.h"
|
||||||
|
#include "SPODE.h"
|
||||||
|
|
||||||
|
namespace platform {
|
||||||
|
using json = nlohmann::json;
|
||||||
|
using namespace std::chrono;
|
||||||
|
|
||||||
|
CrossValidation::CrossValidation(string modelName, bool stratified, int nfolds, vector<int> randomSeeds, platform::Datasets& datasets) : modelName(modelName), stratified(stratified), nfolds(nfolds), randomSeeds(randomSeeds), datasets(datasets)
|
||||||
|
{
|
||||||
|
classifiers = map<string, bayesnet::BaseClassifier*>({
|
||||||
|
{ "AODE", new bayesnet::AODE() }, { "KDB", new bayesnet::KDB(2) },
|
||||||
|
{ "SPODE", new bayesnet::SPODE(2) }, { "TAN", new bayesnet::TAN() }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Result CrossValidation::crossValidate(string fileName)
|
||||||
|
{
|
||||||
|
auto [Xt, y] = datasets.getTensors(fileName);
|
||||||
|
auto states = datasets.getStates(fileName);
|
||||||
|
auto className = datasets.getClassName(fileName);
|
||||||
|
auto features = datasets.getFeatures(fileName);
|
||||||
|
auto samples = datasets.getNSamples(fileName);
|
||||||
|
auto result = Result();
|
||||||
|
auto [values, counts] = at::_unique(y);
|
||||||
|
result.setSamples(Xt.size(1)).setFeatures(Xt.size(0)).setClasses(values.size(0));
|
||||||
|
int nSeeds = static_cast<int>(randomSeeds.size());
|
||||||
|
auto accuracy_test = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
||||||
|
auto accuracy_train = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
||||||
|
auto train_time = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
||||||
|
auto test_time = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
||||||
|
auto nodes = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
||||||
|
auto edges = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
||||||
|
auto num_states = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
||||||
|
Timer train_timer, test_timer;
|
||||||
|
int item = 0;
|
||||||
|
for (auto seed : randomSeeds) {
|
||||||
|
cout << "(" << seed << ") " << flush;
|
||||||
|
Fold* fold;
|
||||||
|
if (stratified)
|
||||||
|
fold = new StratifiedKFold(nfolds, y, seed);
|
||||||
|
else
|
||||||
|
fold = new KFold(nfolds, samples, seed);
|
||||||
|
cout << "Fold: " << flush;
|
||||||
|
for (int nfold = 0; nfold < nfolds; nfold++) {
|
||||||
|
bayesnet::BaseClassifier* model = classifiers[modelName];
|
||||||
|
result.setModelVersion(model->getVersion());
|
||||||
|
train_timer.start();
|
||||||
|
auto [train, test] = fold->getFold(nfold);
|
||||||
|
auto train_t = torch::tensor(train);
|
||||||
|
auto test_t = torch::tensor(test);
|
||||||
|
auto X_train = Xt.index({ "...", train_t });
|
||||||
|
auto y_train = y.index({ train_t });
|
||||||
|
auto X_test = Xt.index({ "...", test_t });
|
||||||
|
auto y_test = y.index({ test_t });
|
||||||
|
cout << nfold + 1 << ", " << flush;
|
||||||
|
model->fit(X_train, y_train, features, className, states);
|
||||||
|
nodes[item] = model->getNumberOfNodes();
|
||||||
|
edges[item] = model->getNumberOfEdges();
|
||||||
|
num_states[item] = model->getNumberOfStates();
|
||||||
|
train_time[item] = train_timer.getDuration();
|
||||||
|
auto accuracy_train_value = model->score(X_train, y_train);
|
||||||
|
test_timer.start();
|
||||||
|
auto accuracy_test_value = model->score(X_test, y_test);
|
||||||
|
test_time[item] = test_timer.getDuration();
|
||||||
|
accuracy_train[item] = accuracy_train_value;
|
||||||
|
accuracy_test[item] = accuracy_test_value;
|
||||||
|
item++;
|
||||||
|
}
|
||||||
|
delete fold;
|
||||||
|
}
|
||||||
|
cout << "end." << endl;
|
||||||
|
result.setScoreTest(torch::mean(accuracy_test).item<double>()).setScoreTrain(torch::mean(accuracy_train).item<double>());
|
||||||
|
result.setScoreTestStd(torch::std(accuracy_test).item<double>()).setScoreTrainStd(torch::std(accuracy_train).item<double>());
|
||||||
|
result.setTrainTime(torch::mean(train_time).item<double>()).setTestTime(torch::mean(test_time).item<double>());
|
||||||
|
result.setNodes(torch::mean(nodes).item<double>()).setLeaves(torch::mean(edges).item<double>()).setDepth(torch::mean(num_states).item<double>());
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
} // namespace platform
|
27
src/Platform/CrossValidation.h
Normal file
27
src/Platform/CrossValidation.h
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
#ifndef CROSSVALIDATION_H
|
||||||
|
#define CROSSVALIDATION_H
|
||||||
|
#include <torch/torch.h>
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
#include <string>
|
||||||
|
#include <chrono>
|
||||||
|
#include "Folding.h"
|
||||||
|
#include "BaseClassifier.h"
|
||||||
|
#include "Datasets.h"
|
||||||
|
#include "Experiment.h"
|
||||||
|
|
||||||
|
namespace platform {
|
||||||
|
class CrossValidation {
|
||||||
|
private:
|
||||||
|
bool stratified;
|
||||||
|
int nfolds;
|
||||||
|
string modelName;
|
||||||
|
vector<int> randomSeeds;
|
||||||
|
platform::Datasets& datasets;
|
||||||
|
map<string, bayesnet::BaseClassifier*> classifiers;
|
||||||
|
public:
|
||||||
|
CrossValidation(string modelName, bool stratified, int nfolds, vector<int> randomSeeds, platform::Datasets& datasets);
|
||||||
|
~CrossValidation() = default;
|
||||||
|
Result crossValidate(string fileName);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
#endif // !CROSSVALIDATION_H
|
69
src/Platform/DotEnv.h
Normal file
69
src/Platform/DotEnv.h
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
#ifndef DOTENV_H
|
||||||
|
#define DOTENV_H
|
||||||
|
#include <string>
|
||||||
|
#include <map>
|
||||||
|
#include <fstream>
|
||||||
|
#include <sstream>
|
||||||
|
std::vector<std::string> split(std::string text, char delimiter)
|
||||||
|
{
|
||||||
|
std::vector<std::string> result;
|
||||||
|
std::stringstream ss(text);
|
||||||
|
std::string token;
|
||||||
|
while (getline(ss, token, delimiter)) {
|
||||||
|
result.push_back(token);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
class DotEnv {
|
||||||
|
private:
|
||||||
|
std::map<std::string, std::string> env;
|
||||||
|
std::string trim(const std::string& str)
|
||||||
|
{
|
||||||
|
std::string result = str;
|
||||||
|
result.erase(result.begin(), std::find_if(result.begin(), result.end(), [](int ch) {
|
||||||
|
return !std::isspace(ch);
|
||||||
|
}));
|
||||||
|
result.erase(std::find_if(result.rbegin(), result.rend(), [](int ch) {
|
||||||
|
return !std::isspace(ch);
|
||||||
|
}).base(), result.end());
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
public:
|
||||||
|
DotEnv()
|
||||||
|
{
|
||||||
|
std::ifstream file(".env");
|
||||||
|
if (!file.is_open()) {
|
||||||
|
std::cerr << "File .env not found" << std::endl;
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
std::string line;
|
||||||
|
while (std::getline(file, line)) {
|
||||||
|
line = trim(line);
|
||||||
|
if (line.empty() || line[0] == '#') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
std::istringstream iss(line);
|
||||||
|
std::string key, value;
|
||||||
|
if (std::getline(iss, key, '=') && std::getline(iss, value)) {
|
||||||
|
env[key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
std::string get(const std::string& key)
|
||||||
|
{
|
||||||
|
return env[key];
|
||||||
|
}
|
||||||
|
std::vector<int> getSeeds()
|
||||||
|
{
|
||||||
|
auto seeds = std::vector<int>();
|
||||||
|
auto seeds_str = env["seeds"];
|
||||||
|
seeds_str = trim(seeds_str);
|
||||||
|
seeds_str = seeds_str.substr(1, seeds_str.size() - 2);
|
||||||
|
auto seeds_str_split = split(seeds_str, ',');
|
||||||
|
for (auto seed_str : seeds_str_split) {
|
||||||
|
seeds.push_back(stoi(seed_str));
|
||||||
|
}
|
||||||
|
return seeds;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
#endif
|
@@ -79,55 +79,10 @@ namespace platform {
|
|||||||
file << data;
|
file << data;
|
||||||
file.close();
|
file.close();
|
||||||
}
|
}
|
||||||
Result cross_validation(Fold* fold, string model_name, torch::Tensor& Xt, torch::Tensor& y, vector<string> features, string className, map<string, vector<int>> states)
|
void Experiment::show()
|
||||||
{
|
{
|
||||||
auto classifiers = map<string, bayesnet::BaseClassifier*>({
|
json data = build_json();
|
||||||
{ "AODE", new bayesnet::AODE() }, { "KDB", new bayesnet::KDB(2) },
|
cout << data.dump(4) << endl;
|
||||||
{ "SPODE", new bayesnet::SPODE(2) }, { "TAN", new bayesnet::TAN() }
|
|
||||||
}
|
|
||||||
);
|
|
||||||
auto result = Result();
|
|
||||||
auto [values, counts] = at::_unique(y);
|
|
||||||
result.setSamples(Xt.size(1)).setFeatures(Xt.size(0)).setClasses(values.size(0));
|
|
||||||
auto k = fold->getNumberOfFolds();
|
|
||||||
auto accuracy_test = torch::zeros({ k }, torch::kFloat64);
|
|
||||||
auto accuracy_train = torch::zeros({ k }, torch::kFloat64);
|
|
||||||
auto train_time = torch::zeros({ k }, torch::kFloat64);
|
|
||||||
auto test_time = torch::zeros({ k }, torch::kFloat64);
|
|
||||||
auto nodes = torch::zeros({ k }, torch::kFloat64);
|
|
||||||
auto edges = torch::zeros({ k }, torch::kFloat64);
|
|
||||||
auto num_states = torch::zeros({ k }, torch::kFloat64);
|
|
||||||
Timer train_timer, test_timer;
|
|
||||||
cout << "doing Fold: " << flush;
|
|
||||||
for (int i = 0; i < k; i++) {
|
|
||||||
bayesnet::BaseClassifier* model = classifiers[model_name];
|
|
||||||
result.setModelVersion(model->getVersion());
|
|
||||||
train_timer.start();
|
|
||||||
auto [train, test] = fold->getFold(i);
|
|
||||||
auto train_t = torch::tensor(train);
|
|
||||||
auto test_t = torch::tensor(test);
|
|
||||||
auto X_train = Xt.index({ "...", train_t });
|
|
||||||
auto y_train = y.index({ train_t });
|
|
||||||
auto X_test = Xt.index({ "...", test_t });
|
|
||||||
auto y_test = y.index({ test_t });
|
|
||||||
cout << i + 1 << ", " << flush;
|
|
||||||
model->fit(X_train, y_train, features, className, states);
|
|
||||||
nodes[i] = model->getNumberOfNodes();
|
|
||||||
edges[i] = model->getNumberOfEdges();
|
|
||||||
num_states[i] = model->getNumberOfStates();
|
|
||||||
train_time[i] = train_timer.getDuration();
|
|
||||||
auto accuracy_train_value = model->score(X_train, y_train);
|
|
||||||
test_timer.start();
|
|
||||||
auto accuracy_test_value = model->score(X_test, y_test);
|
|
||||||
test_time[i] = test_timer.getDuration();
|
|
||||||
accuracy_train[i] = accuracy_train_value;
|
|
||||||
accuracy_test[i] = accuracy_test_value;
|
|
||||||
}
|
|
||||||
cout << "end." << endl;
|
|
||||||
result.setScoreTest(torch::mean(accuracy_test).item<double>()).setScoreTrain(torch::mean(accuracy_train).item<double>());
|
|
||||||
result.setScoreTestStd(torch::std(accuracy_test).item<double>()).setScoreTrainStd(torch::std(accuracy_train).item<double>());
|
|
||||||
result.setTrainTime(torch::mean(train_time).item<double>()).setTestTime(torch::mean(test_time).item<double>());
|
|
||||||
result.setNodes(torch::mean(nodes).item<double>()).setLeaves(torch::mean(edges).item<double>()).setDepth(torch::mean(num_states).item<double>());
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
@@ -4,11 +4,6 @@
|
|||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
#include "Folding.h"
|
|
||||||
#include "BaseClassifier.h"
|
|
||||||
#include "TAN.h"
|
|
||||||
#include "KDB.h"
|
|
||||||
#include "AODE.h"
|
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
namespace platform {
|
namespace platform {
|
||||||
@@ -98,8 +93,7 @@ namespace platform {
|
|||||||
Experiment& setDuration(float duration) { this->duration = duration; return *this; }
|
Experiment& setDuration(float duration) { this->duration = duration; return *this; }
|
||||||
string get_file_name();
|
string get_file_name();
|
||||||
void save(string path);
|
void save(string path);
|
||||||
void show() { cout << "Showing experiment..." << "Score Test: " << results[0].get_score_test() << " Score Train: " << results[0].get_score_train() << endl; }
|
void show();
|
||||||
};
|
};
|
||||||
Result cross_validation(Fold* fold, string model_name, torch::Tensor& X, torch::Tensor& y, vector<string> features, string className, map<string, vector<int>> states);
|
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
@@ -3,18 +3,22 @@
|
|||||||
#include "platformUtils.h"
|
#include "platformUtils.h"
|
||||||
#include "Experiment.h"
|
#include "Experiment.h"
|
||||||
#include "Datasets.h"
|
#include "Datasets.h"
|
||||||
|
#include "DotEnv.h"
|
||||||
|
#include "CrossValidation.h"
|
||||||
|
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
const string PATH_RESULTS = "results";
|
const string PATH_RESULTS = "results";
|
||||||
|
const string PATH_DATASETS = "datasets";
|
||||||
|
|
||||||
argparse::ArgumentParser manageArguments(int argc, char** argv)
|
argparse::ArgumentParser manageArguments(int argc, char** argv)
|
||||||
{
|
{
|
||||||
|
auto env = DotEnv();
|
||||||
argparse::ArgumentParser program("BayesNetSample");
|
argparse::ArgumentParser program("BayesNetSample");
|
||||||
program.add_argument("-d", "--dataset").default_value("").help("Dataset file name");
|
program.add_argument("-d", "--dataset").default_value("").help("Dataset file name");
|
||||||
program.add_argument("-p", "--path")
|
program.add_argument("-p", "--path")
|
||||||
.help("folder where the data files are located, default")
|
.help("folder where the data files are located, default")
|
||||||
.default_value(string{ PATH }
|
.default_value(string{ PATH_DATASETS }
|
||||||
);
|
);
|
||||||
program.add_argument("-m", "--model")
|
program.add_argument("-m", "--model")
|
||||||
.help("Model to use {AODE, KDB, SPODE, TAN}")
|
.help("Model to use {AODE, KDB, SPODE, TAN}")
|
||||||
@@ -27,9 +31,9 @@ argparse::ArgumentParser manageArguments(int argc, char** argv)
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
program.add_argument("--title").required().help("Experiment title");
|
program.add_argument("--title").required().help("Experiment title");
|
||||||
program.add_argument("--discretize").help("Discretize input dataset").default_value(false).implicit_value(true);
|
program.add_argument("--discretize").help("Discretize input dataset").default_value((bool)stoi(env.get("discretize"))).implicit_value(true);
|
||||||
program.add_argument("--stratified").help("If Stratified KFold is to be done").default_value(false).implicit_value(true);
|
program.add_argument("--stratified").help("If Stratified KFold is to be done").default_value((bool)stoi(env.get("stratified"))).implicit_value(true);
|
||||||
program.add_argument("-f", "--folds").help("Number of folds").default_value(5).scan<'i', int>().action([](const string& value) {
|
program.add_argument("-f", "--folds").help("Number of folds").default_value(stoi(env.get("n_folds"))).scan<'i', int>().action([](const string& value) {
|
||||||
try {
|
try {
|
||||||
auto k = stoi(value);
|
auto k = stoi(value);
|
||||||
if (k < 2) {
|
if (k < 2) {
|
||||||
@@ -43,9 +47,12 @@ argparse::ArgumentParser manageArguments(int argc, char** argv)
|
|||||||
catch (...) {
|
catch (...) {
|
||||||
throw runtime_error("Number of folds must be an integer");
|
throw runtime_error("Number of folds must be an integer");
|
||||||
}});
|
}});
|
||||||
program.add_argument("-s", "--seed").help("Random seed").default_value(-1).scan<'i', int>();
|
|
||||||
|
auto seed_values = env.getSeeds();
|
||||||
|
program.add_argument("-s", "--seeds").help("Random seeds comma separated. Set to -1 to have pseudo random").default_value(seed_values);
|
||||||
bool class_last, discretize_dataset, stratified;
|
bool class_last, discretize_dataset, stratified;
|
||||||
int n_folds, seed;
|
int n_folds;
|
||||||
|
vector<int> seeds;
|
||||||
string model_name, file_name, path, complete_file_name, title;
|
string model_name, file_name, path, complete_file_name, title;
|
||||||
try {
|
try {
|
||||||
program.parse_args(argc, argv);
|
program.parse_args(argc, argv);
|
||||||
@@ -55,7 +62,7 @@ argparse::ArgumentParser manageArguments(int argc, char** argv)
|
|||||||
discretize_dataset = program.get<bool>("discretize");
|
discretize_dataset = program.get<bool>("discretize");
|
||||||
stratified = program.get<bool>("stratified");
|
stratified = program.get<bool>("stratified");
|
||||||
n_folds = program.get<int>("folds");
|
n_folds = program.get<int>("folds");
|
||||||
seed = program.get<int>("seed");
|
seeds = program.get<vector<int>>("seeds");
|
||||||
complete_file_name = path + file_name + ".arff";
|
complete_file_name = path + file_name + ".arff";
|
||||||
class_last = false;//datasets[file_name];
|
class_last = false;//datasets[file_name];
|
||||||
title = program.get<string>("title");
|
title = program.get<string>("title");
|
||||||
@@ -71,13 +78,15 @@ argparse::ArgumentParser manageArguments(int argc, char** argv)
|
|||||||
int main(int argc, char** argv)
|
int main(int argc, char** argv)
|
||||||
{
|
{
|
||||||
auto program = manageArguments(argc, argv);
|
auto program = manageArguments(argc, argv);
|
||||||
|
auto env = DotEnv();
|
||||||
|
bool saveResults = false;
|
||||||
auto file_name = program.get<string>("dataset");
|
auto file_name = program.get<string>("dataset");
|
||||||
auto path = program.get<string>("path");
|
auto path = program.get<string>("path");
|
||||||
auto model_name = program.get<string>("model");
|
auto model_name = program.get<string>("model");
|
||||||
auto discretize_dataset = program.get<bool>("discretize");
|
auto discretize_dataset = program.get<bool>("discretize");
|
||||||
auto stratified = program.get<bool>("stratified");
|
auto stratified = program.get<bool>("stratified");
|
||||||
auto n_folds = program.get<int>("folds");
|
auto n_folds = program.get<int>("folds");
|
||||||
auto seed = program.get<int>("seed");
|
auto seeds = program.get<vector<int>>("seeds");
|
||||||
vector<string> filesToProcess;
|
vector<string> filesToProcess;
|
||||||
auto datasets = platform::Datasets(path, true, platform::ARFF);
|
auto datasets = platform::Datasets(path, true, platform::ARFF);
|
||||||
if (file_name != "") {
|
if (file_name != "") {
|
||||||
@@ -88,6 +97,7 @@ int main(int argc, char** argv)
|
|||||||
filesToProcess.push_back(file_name);
|
filesToProcess.push_back(file_name);
|
||||||
} else {
|
} else {
|
||||||
filesToProcess = platform::Datasets(path, true, platform::ARFF).getNames();
|
filesToProcess = platform::Datasets(path, true, platform::ARFF).getNames();
|
||||||
|
saveResults = true; // Only save results if all datasets are processed
|
||||||
}
|
}
|
||||||
auto title = program.get<string>("title");
|
auto title = program.get<string>("title");
|
||||||
|
|
||||||
@@ -97,10 +107,14 @@ int main(int argc, char** argv)
|
|||||||
auto experiment = platform::Experiment();
|
auto experiment = platform::Experiment();
|
||||||
experiment.setTitle(title).setLanguage("cpp").setLanguageVersion("1.0.0");
|
experiment.setTitle(title).setLanguage("cpp").setLanguageVersion("1.0.0");
|
||||||
experiment.setDiscretized(discretize_dataset).setModel(model_name).setPlatform("BayesNet");
|
experiment.setDiscretized(discretize_dataset).setModel(model_name).setPlatform("BayesNet");
|
||||||
experiment.setStratified(stratified).setNFolds(n_folds).addRandomSeed(seed).setScoreName("accuracy");
|
experiment.setStratified(stratified).setNFolds(n_folds).setScoreName("accuracy");
|
||||||
|
for (auto seed : seeds) {
|
||||||
|
experiment.addRandomSeed(seed);
|
||||||
|
}
|
||||||
platform::Timer timer;
|
platform::Timer timer;
|
||||||
cout << "*** Starting experiment: " << title << " ***" << endl;
|
cout << "*** Starting experiment: " << title << " ***" << endl;
|
||||||
timer.start();
|
timer.start();
|
||||||
|
auto validation = platform::CrossValidation(model_name, stratified, n_folds, seeds, datasets);
|
||||||
for (auto fileName : filesToProcess) {
|
for (auto fileName : filesToProcess) {
|
||||||
cout << "- " << setw(20) << left << fileName << " " << right << flush;
|
cout << "- " << setw(20) << left << fileName << " " << right << flush;
|
||||||
auto [X, y] = datasets.getTensors(fileName);
|
auto [X, y] = datasets.getTensors(fileName);
|
||||||
@@ -109,19 +123,16 @@ int main(int argc, char** argv)
|
|||||||
auto samples = datasets.getNSamples(fileName);
|
auto samples = datasets.getNSamples(fileName);
|
||||||
auto className = datasets.getClassName(fileName);
|
auto className = datasets.getClassName(fileName);
|
||||||
cout << " (" << setw(5) << samples << "," << setw(3) << features.size() << ") " << flush;
|
cout << " (" << setw(5) << samples << "," << setw(3) << features.size() << ") " << flush;
|
||||||
Fold* fold;
|
auto result = validation.crossValidate(fileName);
|
||||||
if (stratified)
|
|
||||||
fold = new StratifiedKFold(n_folds, y, seed);
|
|
||||||
else
|
|
||||||
fold = new KFold(n_folds, samples, seed);
|
|
||||||
auto result = platform::cross_validation(fold, model_name, X, y, features, className, states);
|
|
||||||
result.setDataset(fileName);
|
result.setDataset(fileName);
|
||||||
experiment.setModelVersion(result.getModelVersion());
|
experiment.setModelVersion(result.getModelVersion());
|
||||||
experiment.addResult(result);
|
experiment.addResult(result);
|
||||||
delete fold;
|
|
||||||
}
|
}
|
||||||
experiment.setDuration(timer.getDuration());
|
experiment.setDuration(timer.getDuration());
|
||||||
experiment.save(PATH_RESULTS);
|
if (saveResults)
|
||||||
|
experiment.save(PATH_RESULTS);
|
||||||
|
else
|
||||||
|
experiment.show();
|
||||||
cout << "Done!" << endl;
|
cout << "Done!" << endl;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user