Compare commits
12 Commits
d8764db716
...
aftermath
Author | SHA1 | Date | |
---|---|---|---|
4ebc9c2013
|
|||
b882569169
|
|||
8b2ed26ab7
|
|||
5efa3beaee
|
|||
9a0449c12d
|
|||
7222119dfb
|
|||
cb54f61a69
|
|||
07d572a98c
|
|||
c4f3e6f19a
|
|||
adc0ca238f
|
|||
b9e76becce
|
|||
85cb447283
|
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
@@ -31,8 +31,6 @@
|
|||||||
"--stratified",
|
"--stratified",
|
||||||
"--title",
|
"--title",
|
||||||
"Debug test",
|
"Debug test",
|
||||||
"--seeds",
|
|
||||||
"1",
|
|
||||||
"-d",
|
"-d",
|
||||||
"ionosphere"
|
"ionosphere"
|
||||||
],
|
],
|
||||||
|
@@ -9,6 +9,7 @@
|
|||||||
#include "CPPFImdlp.h"
|
#include "CPPFImdlp.h"
|
||||||
#include "Folding.h"
|
#include "Folding.h"
|
||||||
#include "Models.h"
|
#include "Models.h"
|
||||||
|
#include "modelRegister.h"
|
||||||
|
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
@@ -68,9 +69,8 @@ int main(int argc, char** argv)
|
|||||||
{"mfeat-factors", true},
|
{"mfeat-factors", true},
|
||||||
};
|
};
|
||||||
auto valid_datasets = vector<string>();
|
auto valid_datasets = vector<string>();
|
||||||
for (auto dataset : datasets) {
|
transform(datasets.begin(), datasets.end(), back_inserter(valid_datasets),
|
||||||
valid_datasets.push_back(dataset.first);
|
[](const pair<string, bool>& pair) { return pair.first; });
|
||||||
}
|
|
||||||
argparse::ArgumentParser program("BayesNetSample");
|
argparse::ArgumentParser program("BayesNetSample");
|
||||||
program.add_argument("-d", "--dataset")
|
program.add_argument("-d", "--dataset")
|
||||||
.help("Dataset file name")
|
.help("Dataset file name")
|
||||||
@@ -86,13 +86,13 @@ int main(int argc, char** argv)
|
|||||||
.default_value(string{ PATH }
|
.default_value(string{ PATH }
|
||||||
);
|
);
|
||||||
program.add_argument("-m", "--model")
|
program.add_argument("-m", "--model")
|
||||||
.help("Model to use " + platform::Models::toString())
|
.help("Model to use " + platform::Models::instance()->toString())
|
||||||
.action([](const std::string& value) {
|
.action([](const std::string& value) {
|
||||||
static const vector<string> choices = platform::Models::getNames();
|
static const vector<string> choices = platform::Models::instance()->getNames();
|
||||||
if (find(choices.begin(), choices.end(), value) != choices.end()) {
|
if (find(choices.begin(), choices.end(), value) != choices.end()) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
throw runtime_error("Model must be one of " + platform::Models::toString());
|
throw runtime_error("Model must be one of " + platform::Models::instance()->toString());
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
program.add_argument("--discretize").help("Discretize input dataset").default_value(false).implicit_value(true);
|
program.add_argument("--discretize").help("Discretize input dataset").default_value(false).implicit_value(true);
|
||||||
@@ -148,9 +148,9 @@ int main(int argc, char** argv)
|
|||||||
// Get className & Features
|
// Get className & Features
|
||||||
auto className = handler.getClassName();
|
auto className = handler.getClassName();
|
||||||
vector<string> features;
|
vector<string> features;
|
||||||
for (auto feature : handler.getAttributes()) {
|
auto attributes = handler.getAttributes();
|
||||||
features.push_back(feature.first);
|
transform(attributes.begin(), attributes.end(), back_inserter(features),
|
||||||
}
|
[](const pair<string, string>& item) { return item.first; });
|
||||||
// Discretize Dataset
|
// Discretize Dataset
|
||||||
auto [Xd, maxes] = discretize(X, y, features);
|
auto [Xd, maxes] = discretize(X, y, features);
|
||||||
maxes[className] = *max_element(y.begin(), y.end()) + 1;
|
maxes[className] = *max_element(y.begin(), y.end()) + 1;
|
||||||
@@ -159,8 +159,7 @@ int main(int argc, char** argv)
|
|||||||
states[feature] = vector<int>(maxes[feature]);
|
states[feature] = vector<int>(maxes[feature]);
|
||||||
}
|
}
|
||||||
states[className] = vector<int>(maxes[className]);
|
states[className] = vector<int>(maxes[className]);
|
||||||
|
auto clf = platform::Models::instance()->create(model_name);
|
||||||
bayesnet::BaseClassifier* clf = platform::Models::get(model_name);
|
|
||||||
clf->fit(Xd, y, features, className, states);
|
clf->fit(Xd, y, features, className, states);
|
||||||
auto score = clf->score(Xd, y);
|
auto score = clf->score(Xd, y);
|
||||||
auto lines = clf->show();
|
auto lines = clf->show();
|
||||||
|
@@ -8,6 +8,7 @@ namespace bayesnet {
|
|||||||
void train() override;
|
void train() override;
|
||||||
public:
|
public:
|
||||||
AODE();
|
AODE();
|
||||||
|
virtual ~AODE() {};
|
||||||
vector<string> graph(string title = "AODE") override;
|
vector<string> graph(string title = "AODE") override;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@@ -67,7 +67,6 @@ namespace bayesnet {
|
|||||||
}
|
}
|
||||||
return matrix;
|
return matrix;
|
||||||
}
|
}
|
||||||
// To Interface with Python
|
|
||||||
vector<float> Metrics::conditionalEdgeWeights()
|
vector<float> Metrics::conditionalEdgeWeights()
|
||||||
{
|
{
|
||||||
auto matrix = conditionalEdge();
|
auto matrix = conditionalEdge();
|
||||||
|
@@ -11,7 +11,7 @@ namespace bayesnet {
|
|||||||
Tensor samples;
|
Tensor samples;
|
||||||
vector<string> features;
|
vector<string> features;
|
||||||
string className;
|
string className;
|
||||||
int classNumStates;
|
int classNumStates = 0;
|
||||||
public:
|
public:
|
||||||
Metrics() = default;
|
Metrics() = default;
|
||||||
Metrics(Tensor&, vector<string>&, string&, int);
|
Metrics(Tensor&, vector<string>&, string&, int);
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
namespace bayesnet {
|
namespace bayesnet {
|
||||||
using namespace torch;
|
using namespace torch;
|
||||||
|
|
||||||
Classifier::Classifier(const Network& model) : model(model), m(0), n(0), metrics(Metrics()), fitted(false) {}
|
Classifier::Classifier(Network model) : model(model), m(0), n(0), metrics(Metrics()), fitted(false) {}
|
||||||
Classifier& Classifier::build(vector<string>& features, string className, map<string, vector<int>>& states)
|
Classifier& Classifier::build(vector<string>& features, string className, map<string, vector<int>>& states)
|
||||||
{
|
{
|
||||||
dataset = torch::cat({ X, y.view({y.size(0), 1}) }, 1);
|
dataset = torch::cat({ X, y.view({y.size(0), 1}) }, 1);
|
||||||
|
@@ -27,7 +27,7 @@ namespace bayesnet {
|
|||||||
void checkFitParameters();
|
void checkFitParameters();
|
||||||
virtual void train() = 0;
|
virtual void train() = 0;
|
||||||
public:
|
public:
|
||||||
Classifier(const Network& model);
|
Classifier(Network model);
|
||||||
virtual ~Classifier() = default;
|
virtual ~Classifier() = default;
|
||||||
Classifier& fit(vector<vector<int>>& X, vector<int>& y, vector<string>& features, string className, map<string, vector<int>>& states) override;
|
Classifier& fit(vector<vector<int>>& X, vector<int>& y, vector<string>& features, string className, map<string, vector<int>>& states) override;
|
||||||
Classifier& fit(torch::Tensor& X, torch::Tensor& y, vector<string>& features, string className, map<string, vector<int>>& states) override;
|
Classifier& fit(torch::Tensor& X, torch::Tensor& y, vector<string>& features, string className, map<string, vector<int>>& states) override;
|
||||||
|
@@ -14,6 +14,7 @@ namespace bayesnet {
|
|||||||
void train() override;
|
void train() override;
|
||||||
public:
|
public:
|
||||||
explicit KDB(int k, float theta = 0.03);
|
explicit KDB(int k, float theta = 0.03);
|
||||||
|
virtual ~KDB() {};
|
||||||
vector<string> graph(string name = "KDB") override;
|
vector<string> graph(string name = "KDB") override;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
namespace bayesnet {
|
namespace bayesnet {
|
||||||
using namespace std;
|
using namespace std;
|
||||||
Graph::Graph(int V) : V(V), parent{ vector<int>(V) }
|
Graph::Graph(int V) : V(V), parent(vector<int>(V))
|
||||||
{
|
{
|
||||||
for (int i = 0; i < V; i++)
|
for (int i = 0; i < V; i++)
|
||||||
parent[i] = i;
|
parent[i] = i;
|
||||||
@@ -33,10 +33,9 @@ namespace bayesnet {
|
|||||||
}
|
}
|
||||||
void Graph::kruskal_algorithm()
|
void Graph::kruskal_algorithm()
|
||||||
{
|
{
|
||||||
int i;
|
|
||||||
// sort the edges ordered on decreasing weight
|
// sort the edges ordered on decreasing weight
|
||||||
sort(G.begin(), G.end(), [](const auto& left, const auto& right) {return left.first > right.first;});
|
sort(G.begin(), G.end(), [](const auto& left, const auto& right) {return left.first > right.first;});
|
||||||
for (i = 0; i < G.size(); i++) {
|
for (int i = 0; i < G.size(); i++) {
|
||||||
int uSt, vEd;
|
int uSt, vEd;
|
||||||
uSt = find_set(G[i].second.first);
|
uSt = find_set(G[i].second.first);
|
||||||
vEd = find_set(G[i].second.second);
|
vEd = find_set(G[i].second.second);
|
||||||
|
@@ -10,8 +10,9 @@ namespace bayesnet {
|
|||||||
private:
|
private:
|
||||||
Tensor weights;
|
Tensor weights;
|
||||||
vector<string> features;
|
vector<string> features;
|
||||||
int root;
|
int root = 0;
|
||||||
public:
|
public:
|
||||||
|
MST() = default;
|
||||||
MST(vector<string>& features, Tensor& weights, int root);
|
MST(vector<string>& features, Tensor& weights, int root);
|
||||||
vector<pair<int, int>> maximumSpanningTree();
|
vector<pair<int, int>> maximumSpanningTree();
|
||||||
};
|
};
|
||||||
|
@@ -4,9 +4,9 @@
|
|||||||
#include "bayesnetUtils.h"
|
#include "bayesnetUtils.h"
|
||||||
namespace bayesnet {
|
namespace bayesnet {
|
||||||
Network::Network() : laplaceSmoothing(1), features(vector<string>()), className(""), classNumStates(0), maxThreads(0.8), fitted(false) {}
|
Network::Network() : laplaceSmoothing(1), features(vector<string>()), className(""), classNumStates(0), maxThreads(0.8), fitted(false) {}
|
||||||
Network::Network(const float maxT) : laplaceSmoothing(1), features(vector<string>()), className(""), classNumStates(0), maxThreads(maxT), fitted(false) {}
|
Network::Network(float maxT) : laplaceSmoothing(1), features(vector<string>()), className(""), classNumStates(0), maxThreads(maxT), fitted(false) {}
|
||||||
Network::Network(const float maxT, const int smoothing) : laplaceSmoothing(smoothing), features(vector<string>()), className(""), classNumStates(0), maxThreads(maxT), fitted(false) {}
|
Network::Network(float maxT, int smoothing) : laplaceSmoothing(smoothing), features(vector<string>()), className(""), classNumStates(0), maxThreads(maxT), fitted(false) {}
|
||||||
Network::Network(const Network& other) : laplaceSmoothing(other.laplaceSmoothing), features(other.features), className(other.className), classNumStates(other.classNumStates), maxThreads(other.maxThreads), fitted(other.fitted)
|
Network::Network(Network& other) : laplaceSmoothing(other.laplaceSmoothing), features(other.features), className(other.className), classNumStates(other.getClassNumStates()), maxThreads(other.getmaxThreads()), fitted(other.fitted)
|
||||||
{
|
{
|
||||||
for (const auto& pair : other.nodes) {
|
for (const auto& pair : other.nodes) {
|
||||||
nodes[pair.first] = std::make_unique<Node>(*pair.second);
|
nodes[pair.first] = std::make_unique<Node>(*pair.second);
|
||||||
@@ -20,7 +20,7 @@ namespace bayesnet {
|
|||||||
{
|
{
|
||||||
return samples;
|
return samples;
|
||||||
}
|
}
|
||||||
void Network::addNode(const string& name, const int numStates)
|
void Network::addNode(const string& name, int numStates)
|
||||||
{
|
{
|
||||||
if (find(features.begin(), features.end(), name) == features.end()) {
|
if (find(features.begin(), features.end(), name) == features.end()) {
|
||||||
features.push_back(name);
|
features.push_back(name);
|
||||||
@@ -37,11 +37,11 @@ namespace bayesnet {
|
|||||||
{
|
{
|
||||||
return features;
|
return features;
|
||||||
}
|
}
|
||||||
const int Network::getClassNumStates()
|
int Network::getClassNumStates()
|
||||||
{
|
{
|
||||||
return classNumStates;
|
return classNumStates;
|
||||||
}
|
}
|
||||||
const int Network::getStates()
|
int Network::getStates()
|
||||||
{
|
{
|
||||||
int result = 0;
|
int result = 0;
|
||||||
for (auto& node : nodes) {
|
for (auto& node : nodes) {
|
||||||
@@ -49,7 +49,7 @@ namespace bayesnet {
|
|||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
const string Network::getClassName()
|
string Network::getClassName()
|
||||||
{
|
{
|
||||||
return className;
|
return className;
|
||||||
}
|
}
|
||||||
@@ -145,9 +145,6 @@ namespace bayesnet {
|
|||||||
while (nextNodeIndex < nodes.size()) {
|
while (nextNodeIndex < nodes.size()) {
|
||||||
unique_lock<mutex> lock(mtx);
|
unique_lock<mutex> lock(mtx);
|
||||||
cv.wait(lock, [&activeThreads, &maxThreadsRunning]() { return activeThreads < maxThreadsRunning; });
|
cv.wait(lock, [&activeThreads, &maxThreadsRunning]() { return activeThreads < maxThreadsRunning; });
|
||||||
if (nextNodeIndex >= nodes.size()) {
|
|
||||||
break; // No more work remaining
|
|
||||||
}
|
|
||||||
threads.emplace_back([this, &nextNodeIndex, &mtx, &cv, &activeThreads]() {
|
threads.emplace_back([this, &nextNodeIndex, &mtx, &cv, &activeThreads]() {
|
||||||
while (true) {
|
while (true) {
|
||||||
unique_lock<mutex> lock(mtx);
|
unique_lock<mutex> lock(mtx);
|
||||||
@@ -262,7 +259,7 @@ namespace bayesnet {
|
|||||||
|
|
||||||
// Normalize result
|
// Normalize result
|
||||||
double sum = accumulate(result.begin(), result.end(), 0.0);
|
double sum = accumulate(result.begin(), result.end(), 0.0);
|
||||||
transform(result.begin(), result.end(), result.begin(), [sum](double x) { return x / sum; });
|
transform(result.begin(), result.end(), result.begin(), [sum](double& value) { return value / sum; });
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
vector<string> Network::show()
|
vector<string> Network::show()
|
||||||
|
@@ -27,19 +27,19 @@ namespace bayesnet {
|
|||||||
void completeFit();
|
void completeFit();
|
||||||
public:
|
public:
|
||||||
Network();
|
Network();
|
||||||
explicit Network(const float, const int);
|
explicit Network(float, int);
|
||||||
explicit Network(const float);
|
explicit Network(float);
|
||||||
explicit Network(const Network&);
|
explicit Network(Network&);
|
||||||
torch::Tensor& getSamples();
|
torch::Tensor& getSamples();
|
||||||
float getmaxThreads();
|
float getmaxThreads();
|
||||||
void addNode(const string&, const int);
|
void addNode(const string&, int);
|
||||||
void addEdge(const string&, const string&);
|
void addEdge(const string&, const string&);
|
||||||
map<string, std::unique_ptr<Node>>& getNodes();
|
map<string, std::unique_ptr<Node>>& getNodes();
|
||||||
vector<string> getFeatures();
|
vector<string> getFeatures();
|
||||||
const int getStates();
|
int getStates();
|
||||||
vector<pair<string, string>> getEdges();
|
vector<pair<string, string>> getEdges();
|
||||||
const int getClassNumStates();
|
int getClassNumStates();
|
||||||
const string getClassName();
|
string getClassName();
|
||||||
void fit(const vector<vector<int>>&, const vector<int>&, const vector<string>&, const string&);
|
void fit(const vector<vector<int>>&, const vector<int>&, const vector<string>&, const string&);
|
||||||
void fit(torch::Tensor&, torch::Tensor&, const vector<string>&, const string&);
|
void fit(torch::Tensor&, torch::Tensor&, const vector<string>&, const string&);
|
||||||
vector<int> predict(const vector<vector<int>>&);
|
vector<int> predict(const vector<vector<int>>&);
|
||||||
|
@@ -88,14 +88,15 @@ namespace bayesnet {
|
|||||||
{
|
{
|
||||||
// Get dimensions of the CPT
|
// Get dimensions of the CPT
|
||||||
dimensions.push_back(numStates);
|
dimensions.push_back(numStates);
|
||||||
transform(parents.begin(), parents.end(), back_inserter(dimensions), [](Node* parent) { return parent->getNumStates(); });
|
transform(parents.begin(), parents.end(), back_inserter(dimensions), [](const auto& parent) { return parent->getNumStates(); });
|
||||||
|
|
||||||
// Create a tensor of zeros with the dimensions of the CPT
|
// Create a tensor of zeros with the dimensions of the CPT
|
||||||
cpTable = torch::zeros(dimensions, torch::kFloat) + laplaceSmoothing;
|
cpTable = torch::zeros(dimensions, torch::kFloat) + laplaceSmoothing;
|
||||||
// Fill table with counts
|
// Fill table with counts
|
||||||
for (int n_sample = 0; n_sample < dataset[name].size(); ++n_sample) {
|
for (int n_sample = 0; n_sample < dataset[name].size(); ++n_sample) {
|
||||||
torch::List<c10::optional<torch::Tensor>> coordinates;
|
torch::List<c10::optional<torch::Tensor>> coordinates;
|
||||||
coordinates.push_back(torch::tensor(dataset[name][n_sample]));
|
coordinates.push_back(torch::tensor(dataset[name][n_sample]));
|
||||||
transform(parents.begin(), parents.end(), back_inserter(coordinates), [&dataset, &n_sample](Node* parent) { return torch::tensor(dataset[parent->getName()][n_sample]); });
|
transform(parents.begin(), parents.end(), back_inserter(coordinates), [&dataset, &n_sample](const auto& parent) { return torch::tensor(dataset[parent->getName()][n_sample]); });
|
||||||
// Increment the count of the corresponding coordinate
|
// Increment the count of the corresponding coordinate
|
||||||
cpTable.index_put_({ coordinates }, cpTable.index({ coordinates }) + 1);
|
cpTable.index_put_({ coordinates }, cpTable.index({ coordinates }) + 1);
|
||||||
}
|
}
|
||||||
@@ -107,7 +108,7 @@ namespace bayesnet {
|
|||||||
torch::List<c10::optional<torch::Tensor>> coordinates;
|
torch::List<c10::optional<torch::Tensor>> coordinates;
|
||||||
// following predetermined order of indices in the cpTable (see Node.h)
|
// following predetermined order of indices in the cpTable (see Node.h)
|
||||||
coordinates.push_back(torch::tensor(evidence[name]));
|
coordinates.push_back(torch::tensor(evidence[name]));
|
||||||
transform(parents.begin(), parents.end(), back_inserter(coordinates), [&evidence](Node* parent) { return torch::tensor(evidence[parent->getName()]); });
|
transform(parents.begin(), parents.end(), back_inserter(coordinates), [&evidence](const auto& parent) { return torch::tensor(evidence[parent->getName()]); });
|
||||||
return cpTable.index({ coordinates }).item<float>();
|
return cpTable.index({ coordinates }).item<float>();
|
||||||
}
|
}
|
||||||
vector<string> Node::graph(const string& className)
|
vector<string> Node::graph(const string& className)
|
||||||
@@ -115,7 +116,7 @@ namespace bayesnet {
|
|||||||
auto output = vector<string>();
|
auto output = vector<string>();
|
||||||
auto suffix = name == className ? ", fontcolor=red, fillcolor=lightblue, style=filled " : "";
|
auto suffix = name == className ? ", fontcolor=red, fillcolor=lightblue, style=filled " : "";
|
||||||
output.push_back(name + " [shape=circle" + suffix + "] \n");
|
output.push_back(name + " [shape=circle" + suffix + "] \n");
|
||||||
transform(children.begin(), children.end(), back_inserter(output), [this](Node* child) { return name + " -> " + child->getName() + "\n"; });
|
transform(children.begin(), children.end(), back_inserter(output), [this](const auto& child) { return name + " -> " + child->getName(); });
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -16,7 +16,7 @@ namespace bayesnet {
|
|||||||
vector<int64_t> dimensions; // dimensions of the cpTable
|
vector<int64_t> dimensions; // dimensions of the cpTable
|
||||||
public:
|
public:
|
||||||
vector<pair<string, string>> combinations(const vector<string>&);
|
vector<pair<string, string>> combinations(const vector<string>&);
|
||||||
Node(const std::string&, int);
|
Node(const string&, int);
|
||||||
void clear();
|
void clear();
|
||||||
void addParent(Node*);
|
void addParent(Node*);
|
||||||
void addChild(Node*);
|
void addChild(Node*);
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
#ifndef SPODE_H
|
#ifndef SPODE_H
|
||||||
#define SPODE_H
|
#define SPODE_H
|
||||||
#include "Classifier.h"
|
#include "Classifier.h"
|
||||||
|
|
||||||
namespace bayesnet {
|
namespace bayesnet {
|
||||||
class SPODE : public Classifier {
|
class SPODE : public Classifier {
|
||||||
private:
|
private:
|
||||||
@@ -9,6 +10,7 @@ namespace bayesnet {
|
|||||||
void train() override;
|
void train() override;
|
||||||
public:
|
public:
|
||||||
explicit SPODE(int root);
|
explicit SPODE(int root);
|
||||||
|
virtual ~SPODE() {};
|
||||||
vector<string> graph(string name = "SPODE") override;
|
vector<string> graph(string name = "SPODE") override;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@@ -10,6 +10,7 @@ namespace bayesnet {
|
|||||||
void train() override;
|
void train() override;
|
||||||
public:
|
public:
|
||||||
TAN();
|
TAN();
|
||||||
|
virtual ~TAN() {};
|
||||||
vector<string> graph(string name = "TAN") override;
|
vector<string> graph(string name = "TAN") override;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@@ -4,5 +4,5 @@ include_directories(${BayesNet_SOURCE_DIR}/lib/Files)
|
|||||||
include_directories(${BayesNet_SOURCE_DIR}/lib/mdlp)
|
include_directories(${BayesNet_SOURCE_DIR}/lib/mdlp)
|
||||||
include_directories(${BayesNet_SOURCE_DIR}/lib/argparse/include)
|
include_directories(${BayesNet_SOURCE_DIR}/lib/argparse/include)
|
||||||
include_directories(${BayesNet_SOURCE_DIR}/lib/json/include)
|
include_directories(${BayesNet_SOURCE_DIR}/lib/json/include)
|
||||||
add_executable(main main.cc Folding.cc platformUtils.cc Experiment.cc Datasets.cc CrossValidation.cc Models.cc)
|
add_executable(main main.cc Folding.cc platformUtils.cc Experiment.cc Datasets.cc Models.cc)
|
||||||
target_link_libraries(main BayesNet ArffFiles mdlp "${TORCH_LIBRARIES}")
|
target_link_libraries(main BayesNet ArffFiles mdlp "${TORCH_LIBRARIES}")
|
@@ -1,79 +0,0 @@
|
|||||||
#include "CrossValidation.h"
|
|
||||||
#include "Models.h"
|
|
||||||
|
|
||||||
namespace platform {
|
|
||||||
using json = nlohmann::json;
|
|
||||||
using namespace std::chrono;
|
|
||||||
|
|
||||||
CrossValidation::CrossValidation(const string& modelName, bool stratified, int nfolds, const vector<int>& randomSeeds, platform::Datasets& datasets) : modelName(modelName), stratified(stratified), nfolds(nfolds), randomSeeds(randomSeeds), datasets(datasets)
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
Result CrossValidation::crossValidate(const string& fileName)
|
|
||||||
{
|
|
||||||
auto [Xt, y] = datasets.getTensors(fileName);
|
|
||||||
auto states = datasets.getStates(fileName);
|
|
||||||
auto className = datasets.getClassName(fileName);
|
|
||||||
auto features = datasets.getFeatures(fileName);
|
|
||||||
auto samples = datasets.getNSamples(fileName);
|
|
||||||
auto result = Result();
|
|
||||||
auto [values, counts] = at::_unique(y);
|
|
||||||
result.setSamples(Xt.size(1)).setFeatures(Xt.size(0)).setClasses(values.size(0));
|
|
||||||
int nSeeds = static_cast<int>(randomSeeds.size());
|
|
||||||
auto accuracy_test = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
|
||||||
auto accuracy_train = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
|
||||||
auto train_time = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
|
||||||
auto test_time = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
|
||||||
auto nodes = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
|
||||||
auto edges = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
|
||||||
auto num_states = torch::zeros({ nfolds * nSeeds }, torch::kFloat64);
|
|
||||||
Timer train_timer, test_timer;
|
|
||||||
int item = 0;
|
|
||||||
for (auto seed : randomSeeds) {
|
|
||||||
cout << "(" << seed << ") " << flush;
|
|
||||||
Fold* fold;
|
|
||||||
if (stratified)
|
|
||||||
fold = new StratifiedKFold(nfolds, y, seed);
|
|
||||||
else
|
|
||||||
fold = new KFold(nfolds, samples, seed);
|
|
||||||
cout << "Fold: " << flush;
|
|
||||||
for (int nfold = 0; nfold < nfolds; nfold++) {
|
|
||||||
bayesnet::BaseClassifier* model = Models::get(modelName);
|
|
||||||
result.setModelVersion(model->getVersion());
|
|
||||||
train_timer.start();
|
|
||||||
auto [train, test] = fold->getFold(nfold);
|
|
||||||
auto train_t = torch::tensor(train);
|
|
||||||
auto test_t = torch::tensor(test);
|
|
||||||
auto X_train = Xt.index({ "...", train_t });
|
|
||||||
auto y_train = y.index({ train_t });
|
|
||||||
auto X_test = Xt.index({ "...", test_t });
|
|
||||||
auto y_test = y.index({ test_t });
|
|
||||||
cout << nfold + 1 << ", " << flush;
|
|
||||||
model->fit(X_train, y_train, features, className, states);
|
|
||||||
nodes[item] = model->getNumberOfNodes();
|
|
||||||
edges[item] = model->getNumberOfEdges();
|
|
||||||
num_states[item] = model->getNumberOfStates();
|
|
||||||
train_time[item] = train_timer.getDuration();
|
|
||||||
auto accuracy_train_value = model->score(X_train, y_train);
|
|
||||||
test_timer.start();
|
|
||||||
auto accuracy_test_value = model->score(X_test, y_test);
|
|
||||||
test_time[item] = test_timer.getDuration();
|
|
||||||
accuracy_train[item] = accuracy_train_value;
|
|
||||||
accuracy_test[item] = accuracy_test_value;
|
|
||||||
// Store results and times in vector
|
|
||||||
result.addScoreTrain(accuracy_train_value);
|
|
||||||
result.addScoreTest(accuracy_test_value);
|
|
||||||
result.addTimeTrain(train_time[item].item<double>());
|
|
||||||
result.addTimeTest(test_time[item].item<double>());
|
|
||||||
item++;
|
|
||||||
}
|
|
||||||
delete fold;
|
|
||||||
}
|
|
||||||
cout << "end." << endl;
|
|
||||||
result.setScoreTest(torch::mean(accuracy_test).item<double>()).setScoreTrain(torch::mean(accuracy_train).item<double>());
|
|
||||||
result.setScoreTestStd(torch::std(accuracy_test).item<double>()).setScoreTrainStd(torch::std(accuracy_train).item<double>());
|
|
||||||
result.setTrainTime(torch::mean(train_time).item<double>()).setTestTime(torch::mean(test_time).item<double>());
|
|
||||||
result.setNodes(torch::mean(nodes).item<double>()).setLeaves(torch::mean(edges).item<double>()).setDepth(torch::mean(num_states).item<double>());
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
} // namespace platform
|
|
@@ -1,25 +0,0 @@
|
|||||||
#ifndef CROSSVALIDATION_H
|
|
||||||
#define CROSSVALIDATION_H
|
|
||||||
#include <torch/torch.h>
|
|
||||||
#include <nlohmann/json.hpp>
|
|
||||||
#include <string>
|
|
||||||
#include <chrono>
|
|
||||||
#include "Folding.h"
|
|
||||||
#include "Datasets.h"
|
|
||||||
#include "Experiment.h"
|
|
||||||
|
|
||||||
namespace platform {
|
|
||||||
class CrossValidation {
|
|
||||||
private:
|
|
||||||
bool stratified;
|
|
||||||
int nfolds;
|
|
||||||
string modelName;
|
|
||||||
vector<int> randomSeeds;
|
|
||||||
platform::Datasets& datasets;
|
|
||||||
public:
|
|
||||||
CrossValidation(const string& modelName, bool stratified, int nfolds, const vector<int>& randomSeeds, platform::Datasets& datasets);
|
|
||||||
~CrossValidation() = default;
|
|
||||||
Result crossValidate(const string& fileName);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
#endif // !CROSSVALIDATION_H
|
|
@@ -2,16 +2,6 @@
|
|||||||
#include "platformUtils.h"
|
#include "platformUtils.h"
|
||||||
#include "ArffFiles.h"
|
#include "ArffFiles.h"
|
||||||
namespace platform {
|
namespace platform {
|
||||||
vector<string> split(const string& text, char delimiter)
|
|
||||||
{
|
|
||||||
vector<string> result;
|
|
||||||
stringstream ss(text);
|
|
||||||
string token;
|
|
||||||
while (getline(ss, token, delimiter)) {
|
|
||||||
result.push_back(token);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
void Datasets::load()
|
void Datasets::load()
|
||||||
{
|
{
|
||||||
ifstream catalog(path + "/all.txt");
|
ifstream catalog(path + "/all.txt");
|
||||||
@@ -34,7 +24,7 @@ namespace platform {
|
|||||||
transform(datasets.begin(), datasets.end(), back_inserter(result), [](const auto& d) { return d.first; });
|
transform(datasets.begin(), datasets.end(), back_inserter(result), [](const auto& d) { return d.first; });
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
vector<string> Datasets::getFeatures(const string& name)
|
vector<string> Datasets::getFeatures(string name)
|
||||||
{
|
{
|
||||||
if (datasets[name]->isLoaded()) {
|
if (datasets[name]->isLoaded()) {
|
||||||
return datasets[name]->getFeatures();
|
return datasets[name]->getFeatures();
|
||||||
@@ -42,7 +32,7 @@ namespace platform {
|
|||||||
throw invalid_argument("Dataset not loaded.");
|
throw invalid_argument("Dataset not loaded.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
map<string, vector<int>> Datasets::getStates(const string& name)
|
map<string, vector<int>> Datasets::getStates(string name)
|
||||||
{
|
{
|
||||||
if (datasets[name]->isLoaded()) {
|
if (datasets[name]->isLoaded()) {
|
||||||
return datasets[name]->getStates();
|
return datasets[name]->getStates();
|
||||||
@@ -50,7 +40,7 @@ namespace platform {
|
|||||||
throw invalid_argument("Dataset not loaded.");
|
throw invalid_argument("Dataset not loaded.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
string Datasets::getClassName(const string& name)
|
string Datasets::getClassName(string name)
|
||||||
{
|
{
|
||||||
if (datasets[name]->isLoaded()) {
|
if (datasets[name]->isLoaded()) {
|
||||||
return datasets[name]->getClassName();
|
return datasets[name]->getClassName();
|
||||||
@@ -58,7 +48,7 @@ namespace platform {
|
|||||||
throw invalid_argument("Dataset not loaded.");
|
throw invalid_argument("Dataset not loaded.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
int Datasets::getNSamples(const string& name)
|
int Datasets::getNSamples(string name)
|
||||||
{
|
{
|
||||||
if (datasets[name]->isLoaded()) {
|
if (datasets[name]->isLoaded()) {
|
||||||
return datasets[name]->getNSamples();
|
return datasets[name]->getNSamples();
|
||||||
@@ -66,21 +56,21 @@ namespace platform {
|
|||||||
throw invalid_argument("Dataset not loaded.");
|
throw invalid_argument("Dataset not loaded.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pair<vector<vector<float>>&, vector<int>&> Datasets::getVectors(const string& name)
|
pair<vector<vector<float>>&, vector<int>&> Datasets::getVectors(string name)
|
||||||
{
|
{
|
||||||
if (!datasets[name]->isLoaded()) {
|
if (!datasets[name]->isLoaded()) {
|
||||||
datasets[name]->load();
|
datasets[name]->load();
|
||||||
}
|
}
|
||||||
return datasets[name]->getVectors();
|
return datasets[name]->getVectors();
|
||||||
}
|
}
|
||||||
pair<vector<vector<int>>&, vector<int>&> Datasets::getVectorsDiscretized(const string& name)
|
pair<vector<vector<int>>&, vector<int>&> Datasets::getVectorsDiscretized(string name)
|
||||||
{
|
{
|
||||||
if (!datasets[name]->isLoaded()) {
|
if (!datasets[name]->isLoaded()) {
|
||||||
datasets[name]->load();
|
datasets[name]->load();
|
||||||
}
|
}
|
||||||
return datasets[name]->getVectorsDiscretized();
|
return datasets[name]->getVectorsDiscretized();
|
||||||
}
|
}
|
||||||
pair<torch::Tensor&, torch::Tensor&> Datasets::getTensors(const string& name)
|
pair<torch::Tensor&, torch::Tensor&> Datasets::getTensors(string name)
|
||||||
{
|
{
|
||||||
if (!datasets[name]->isLoaded()) {
|
if (!datasets[name]->isLoaded()) {
|
||||||
datasets[name]->load();
|
datasets[name]->load();
|
||||||
@@ -91,7 +81,9 @@ namespace platform {
|
|||||||
{
|
{
|
||||||
return datasets.find(name) != datasets.end();
|
return datasets.find(name) != datasets.end();
|
||||||
}
|
}
|
||||||
Dataset::Dataset(Dataset& dataset) : path(dataset.path), name(dataset.name), className(dataset.className), n_samples(dataset.n_samples), n_features(dataset.n_features), features(dataset.features), states(dataset.states), loaded(dataset.loaded), discretize(dataset.discretize), X(dataset.X), y(dataset.y), Xv(dataset.Xv), Xd(dataset.Xd), yv(dataset.yv), fileType(dataset.fileType) {}
|
Dataset::Dataset(const Dataset& dataset) : path(dataset.path), name(dataset.name), className(dataset.className), n_samples(dataset.n_samples), n_features(dataset.n_features), features(dataset.features), states(dataset.states), loaded(dataset.loaded), discretize(dataset.discretize), X(dataset.X), y(dataset.y), Xv(dataset.Xv), Xd(dataset.Xd), yv(dataset.yv), fileType(dataset.fileType)
|
||||||
|
{
|
||||||
|
}
|
||||||
string Dataset::getName()
|
string Dataset::getName()
|
||||||
{
|
{
|
||||||
return name;
|
return name;
|
||||||
@@ -200,7 +192,7 @@ namespace platform {
|
|||||||
// Get className & Features
|
// Get className & Features
|
||||||
className = arff.getClassName();
|
className = arff.getClassName();
|
||||||
auto attributes = arff.getAttributes();
|
auto attributes = arff.getAttributes();
|
||||||
transform(attributes.begin(), attributes.end(), back_inserter(features), [](const auto& f) { return f.first; });
|
transform(attributes.begin(), attributes.end(), back_inserter(features), [](const auto& attribute) { return attribute.first; });
|
||||||
}
|
}
|
||||||
void Dataset::load()
|
void Dataset::load()
|
||||||
{
|
{
|
||||||
|
@@ -13,7 +13,7 @@ namespace platform {
|
|||||||
string name;
|
string name;
|
||||||
fileType_t fileType;
|
fileType_t fileType;
|
||||||
string className;
|
string className;
|
||||||
int n_samples, n_features;
|
int n_samples{ 0 }, n_features{ 0 };
|
||||||
vector<string> features;
|
vector<string> features;
|
||||||
map<string, vector<int>> states;
|
map<string, vector<int>> states;
|
||||||
bool loaded;
|
bool loaded;
|
||||||
@@ -27,8 +27,8 @@ namespace platform {
|
|||||||
void load_arff();
|
void load_arff();
|
||||||
void computeStates();
|
void computeStates();
|
||||||
public:
|
public:
|
||||||
Dataset(const string& path, const string& name, const string& className, bool discretize, fileType_t fileType) : path(path), name(name), className(className), discretize(discretize), loaded(false), fileType(fileType), n_samples(0), n_features(0) {};
|
Dataset(const string& path, const string& name, const string& className, bool discretize, fileType_t fileType) : path(path), name(name), className(className), discretize(discretize), loaded(false), fileType(fileType) {};
|
||||||
explicit Dataset(Dataset&);
|
explicit Dataset(const Dataset&);
|
||||||
string getName();
|
string getName();
|
||||||
string getClassName();
|
string getClassName();
|
||||||
vector<string> getFeatures();
|
vector<string> getFeatures();
|
||||||
@@ -51,16 +51,15 @@ namespace platform {
|
|||||||
public:
|
public:
|
||||||
explicit Datasets(const string& path, bool discretize = false, fileType_t fileType = ARFF) : path(path), discretize(discretize), fileType(fileType) { load(); };
|
explicit Datasets(const string& path, bool discretize = false, fileType_t fileType = ARFF) : path(path), discretize(discretize), fileType(fileType) { load(); };
|
||||||
vector<string> getNames();
|
vector<string> getNames();
|
||||||
vector<string> getFeatures(const string& name);
|
vector<string> getFeatures(string name);
|
||||||
int getNSamples(const string& name);
|
int getNSamples(string name);
|
||||||
string getClassName(const string& name);
|
string getClassName(string name);
|
||||||
map<string, vector<int>> getStates(const string& name);
|
map<string, vector<int>> getStates(string name);
|
||||||
pair<vector<vector<float>>&, vector<int>&> getVectors(const string& name);
|
pair<vector<vector<float>>&, vector<int>&> getVectors(string name);
|
||||||
pair<vector<vector<int>>&, vector<int>&> getVectorsDiscretized(const string& name);
|
pair<vector<vector<int>>&, vector<int>&> getVectorsDiscretized(string name);
|
||||||
pair<torch::Tensor&, torch::Tensor&> getTensors(const string& name);
|
pair<torch::Tensor&, torch::Tensor&> getTensors(string name);
|
||||||
bool isDataset(const string& name);
|
bool isDataset(const string& name);
|
||||||
};
|
};
|
||||||
vector<string> split(const string&, char);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif
|
#endif
|
@@ -4,64 +4,59 @@
|
|||||||
#include <map>
|
#include <map>
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
std::vector<std::string> split(std::string text, char delimiter)
|
#include "platformUtils.h"
|
||||||
{
|
namespace platform {
|
||||||
std::vector<std::string> result;
|
class DotEnv {
|
||||||
std::stringstream ss(text);
|
private:
|
||||||
std::string token;
|
std::map<std::string, std::string> env;
|
||||||
while (getline(ss, token, delimiter)) {
|
std::string trim(const std::string& str)
|
||||||
result.push_back(token);
|
{
|
||||||
}
|
std::string result = str;
|
||||||
return result;
|
result.erase(result.begin(), std::find_if(result.begin(), result.end(), [](int ch) {
|
||||||
|
return !std::isspace(ch);
|
||||||
|
}));
|
||||||
|
result.erase(std::find_if(result.rbegin(), result.rend(), [](int ch) {
|
||||||
|
return !std::isspace(ch);
|
||||||
|
}).base(), result.end());
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
public:
|
||||||
|
DotEnv()
|
||||||
|
{
|
||||||
|
std::ifstream file(".env");
|
||||||
|
if (!file.is_open()) {
|
||||||
|
std::cerr << "File .env not found" << std::endl;
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
std::string line;
|
||||||
|
while (std::getline(file, line)) {
|
||||||
|
line = trim(line);
|
||||||
|
if (line.empty() || line[0] == '#') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
std::istringstream iss(line);
|
||||||
|
std::string key, value;
|
||||||
|
if (std::getline(iss, key, '=') && std::getline(iss, value)) {
|
||||||
|
env[key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
std::string get(const std::string& key)
|
||||||
|
{
|
||||||
|
return env[key];
|
||||||
|
}
|
||||||
|
std::vector<int> getSeeds()
|
||||||
|
{
|
||||||
|
auto seeds = std::vector<int>();
|
||||||
|
auto seeds_str = env["seeds"];
|
||||||
|
seeds_str = trim(seeds_str);
|
||||||
|
seeds_str = seeds_str.substr(1, seeds_str.size() - 2);
|
||||||
|
auto seeds_str_split = split(seeds_str, ',');
|
||||||
|
transform(seeds_str_split.begin(), seeds_str_split.end(), back_inserter(seeds), [](const std::string& str) {
|
||||||
|
return stoi(str);
|
||||||
|
});
|
||||||
|
return seeds;
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
class DotEnv {
|
|
||||||
private:
|
|
||||||
std::map<std::string, std::string> env;
|
|
||||||
std::string trim(const std::string& str)
|
|
||||||
{
|
|
||||||
std::string result = str;
|
|
||||||
result.erase(result.begin(), std::find_if(result.begin(), result.end(), [](int ch) {
|
|
||||||
return !std::isspace(ch);
|
|
||||||
}));
|
|
||||||
result.erase(std::find_if(result.rbegin(), result.rend(), [](int ch) {
|
|
||||||
return !std::isspace(ch);
|
|
||||||
}).base(), result.end());
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
public:
|
|
||||||
DotEnv()
|
|
||||||
{
|
|
||||||
std::ifstream file(".env");
|
|
||||||
if (!file.is_open()) {
|
|
||||||
std::cerr << "File .env not found" << std::endl;
|
|
||||||
exit(1);
|
|
||||||
}
|
|
||||||
std::string line;
|
|
||||||
while (std::getline(file, line)) {
|
|
||||||
line = trim(line);
|
|
||||||
if (line.empty() || line[0] == '#') {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
std::istringstream iss(line);
|
|
||||||
std::string key, value;
|
|
||||||
if (std::getline(iss, key, '=') && std::getline(iss, value)) {
|
|
||||||
env[key] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
std::string get(const std::string& key)
|
|
||||||
{
|
|
||||||
return env[key];
|
|
||||||
}
|
|
||||||
std::vector<int> getSeeds()
|
|
||||||
{
|
|
||||||
auto seeds = std::vector<int>();
|
|
||||||
auto seeds_str = env["seeds"];
|
|
||||||
seeds_str = trim(seeds_str);
|
|
||||||
seeds_str = seeds_str.substr(1, seeds_str.size() - 2);
|
|
||||||
auto seeds_str_split = split(seeds_str, ',');
|
|
||||||
transform(seeds_str_split.begin(), seeds_str_split.end(), back_inserter(seeds), [](const auto& s) { return stoi(s); });
|
|
||||||
return seeds;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
#endif
|
#endif
|
@@ -1,4 +1,6 @@
|
|||||||
#include "Experiment.h"
|
#include "Experiment.h"
|
||||||
|
#include "Datasets.h"
|
||||||
|
#include "Models.h"
|
||||||
|
|
||||||
namespace platform {
|
namespace platform {
|
||||||
using json = nlohmann::json;
|
using json = nlohmann::json;
|
||||||
@@ -43,7 +45,7 @@ namespace platform {
|
|||||||
result["discretized"] = discretized;
|
result["discretized"] = discretized;
|
||||||
result["stratified"] = stratified;
|
result["stratified"] = stratified;
|
||||||
result["folds"] = nfolds;
|
result["folds"] = nfolds;
|
||||||
result["seeds"] = random_seeds;
|
result["seeds"] = randomSeeds;
|
||||||
result["duration"] = duration;
|
result["duration"] = duration;
|
||||||
result["results"] = json::array();
|
result["results"] = json::array();
|
||||||
for (const auto& r : results) {
|
for (const auto& r : results) {
|
||||||
@@ -83,10 +85,92 @@ namespace platform {
|
|||||||
file << data;
|
file << data;
|
||||||
file.close();
|
file.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
void Experiment::show()
|
void Experiment::show()
|
||||||
{
|
{
|
||||||
json data = build_json();
|
json data = build_json();
|
||||||
cout << data.dump(4) << endl;
|
cout << data.dump(4) << endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Experiment::go(vector<string> filesToProcess, const string& path)
|
||||||
|
{
|
||||||
|
cout << "*** Starting experiment: " << title << " ***" << endl;
|
||||||
|
for (auto fileName : filesToProcess) {
|
||||||
|
cout << "- " << setw(20) << left << fileName << " " << right << flush;
|
||||||
|
cross_validation(path, fileName);
|
||||||
|
cout << endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void Experiment::cross_validation(const string& path, const string& fileName)
|
||||||
|
{
|
||||||
|
auto datasets = platform::Datasets(path, true, platform::ARFF);
|
||||||
|
// Get dataset
|
||||||
|
auto [X, y] = datasets.getTensors(fileName);
|
||||||
|
auto states = datasets.getStates(fileName);
|
||||||
|
auto features = datasets.getFeatures(fileName);
|
||||||
|
auto samples = datasets.getNSamples(fileName);
|
||||||
|
auto className = datasets.getClassName(fileName);
|
||||||
|
cout << " (" << setw(5) << samples << "," << setw(3) << features.size() << ") " << flush;
|
||||||
|
// Prepare Result
|
||||||
|
auto result = Result();
|
||||||
|
auto [values, counts] = at::_unique(y);;
|
||||||
|
result.setSamples(X.size(1)).setFeatures(X.size(0)).setClasses(values.size(0));
|
||||||
|
int nResults = nfolds * static_cast<int>(randomSeeds.size());
|
||||||
|
auto accuracy_test = torch::zeros({ nResults }, torch::kFloat64);
|
||||||
|
auto accuracy_train = torch::zeros({ nResults }, torch::kFloat64);
|
||||||
|
auto train_time = torch::zeros({ nResults }, torch::kFloat64);
|
||||||
|
auto test_time = torch::zeros({ nResults }, torch::kFloat64);
|
||||||
|
auto nodes = torch::zeros({ nResults }, torch::kFloat64);
|
||||||
|
auto edges = torch::zeros({ nResults }, torch::kFloat64);
|
||||||
|
auto num_states = torch::zeros({ nResults }, torch::kFloat64);
|
||||||
|
Timer train_timer, test_timer;
|
||||||
|
int item = 0;
|
||||||
|
for (auto seed : randomSeeds) {
|
||||||
|
cout << "(" << seed << ") doing Fold: " << flush;
|
||||||
|
Fold* fold;
|
||||||
|
if (stratified)
|
||||||
|
fold = new StratifiedKFold(nfolds, y, seed);
|
||||||
|
else
|
||||||
|
fold = new KFold(nfolds, y.size(0), seed);
|
||||||
|
for (int nfold = 0; nfold < nfolds; nfold++) {
|
||||||
|
auto clf = Models::instance()->create(model);
|
||||||
|
setModelVersion(clf->getVersion());
|
||||||
|
train_timer.start();
|
||||||
|
auto [train, test] = fold->getFold(nfold);
|
||||||
|
auto train_t = torch::tensor(train);
|
||||||
|
auto test_t = torch::tensor(test);
|
||||||
|
auto X_train = X.index({ "...", train_t });
|
||||||
|
auto y_train = y.index({ train_t });
|
||||||
|
auto X_test = X.index({ "...", test_t });
|
||||||
|
auto y_test = y.index({ test_t });
|
||||||
|
cout << nfold + 1 << ", " << flush;
|
||||||
|
clf->fit(X_train, y_train, features, className, states);
|
||||||
|
nodes[item] = clf->getNumberOfNodes();
|
||||||
|
edges[item] = clf->getNumberOfEdges();
|
||||||
|
num_states[item] = clf->getNumberOfStates();
|
||||||
|
train_time[item] = train_timer.getDuration();
|
||||||
|
auto accuracy_train_value = clf->score(X_train, y_train);
|
||||||
|
test_timer.start();
|
||||||
|
auto accuracy_test_value = clf->score(X_test, y_test);
|
||||||
|
test_time[item] = test_timer.getDuration();
|
||||||
|
accuracy_train[item] = accuracy_train_value;
|
||||||
|
accuracy_test[item] = accuracy_test_value;
|
||||||
|
// Store results and times in vector
|
||||||
|
result.addScoreTrain(accuracy_train_value);
|
||||||
|
result.addScoreTest(accuracy_test_value);
|
||||||
|
result.addTimeTrain(train_time[item].item<double>());
|
||||||
|
result.addTimeTest(test_time[item].item<double>());
|
||||||
|
item++;
|
||||||
|
}
|
||||||
|
cout << "end. " << flush;
|
||||||
|
delete fold;
|
||||||
|
}
|
||||||
|
result.setScoreTest(torch::mean(accuracy_test).item<double>()).setScoreTrain(torch::mean(accuracy_train).item<double>());
|
||||||
|
result.setScoreTestStd(torch::std(accuracy_test).item<double>()).setScoreTrainStd(torch::std(accuracy_train).item<double>());
|
||||||
|
result.setTrainTime(torch::mean(train_time).item<double>()).setTestTime(torch::mean(test_time).item<double>());
|
||||||
|
result.setNodes(torch::mean(nodes).item<double>()).setLeaves(torch::mean(edges).item<double>()).setDepth(torch::mean(num_states).item<double>());
|
||||||
|
result.setDataset(fileName);
|
||||||
|
addResult(result);
|
||||||
|
}
|
||||||
}
|
}
|
@@ -4,6 +4,11 @@
|
|||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
|
#include "Folding.h"
|
||||||
|
#include "BaseClassifier.h"
|
||||||
|
#include "TAN.h"
|
||||||
|
#include "KDB.h"
|
||||||
|
#include "AODE.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
namespace platform {
|
namespace platform {
|
||||||
@@ -24,34 +29,33 @@ namespace platform {
|
|||||||
};
|
};
|
||||||
class Result {
|
class Result {
|
||||||
private:
|
private:
|
||||||
string dataset = "", hyperparameters = "", model_version = "";
|
string dataset, hyperparameters, model_version;
|
||||||
int samples{ 0 }, features{ 0 }, classes{ 0 };
|
int samples{ 0 }, features{ 0 }, classes{ 0 };
|
||||||
double score_train{ 0 }, score_test = 0, score_train_std = 0, score_test_std = 0, train_time = 0, train_time_std = 0, test_time = 0, test_time_std = 0;
|
double score_train{ 0 }, score_test{ 0 }, score_train_std{ 0 }, score_test_std{ 0 }, train_time{ 0 }, train_time_std{ 0 }, test_time{ 0 }, test_time_std{ 0 };
|
||||||
vector<double> scores_train{}, scores_test{}, times_train{}, times_test{};
|
|
||||||
float nodes{ 0 }, leaves{ 0 }, depth{ 0 };
|
float nodes{ 0 }, leaves{ 0 }, depth{ 0 };
|
||||||
|
vector<double> scores_train, scores_test, times_train, times_test;
|
||||||
public:
|
public:
|
||||||
Result() = default;
|
Result() = default;
|
||||||
Result& setDataset(const string& dataset) { this->dataset = dataset; return *this; }
|
Result& setDataset(const string& dataset) { this->dataset = dataset; return *this; }
|
||||||
Result& setHyperparameters(const string& hyperparameters) { this->hyperparameters = hyperparameters; return *this; }
|
Result& setHyperparameters(const string& hyperparameters) { this->hyperparameters = hyperparameters; return *this; }
|
||||||
Result& setSamples(const int samples) { this->samples = samples; return *this; }
|
Result& setSamples(int samples) { this->samples = samples; return *this; }
|
||||||
Result& setFeatures(const int features) { this->features = features; return *this; }
|
Result& setFeatures(int features) { this->features = features; return *this; }
|
||||||
Result& setClasses(const int classes) { this->classes = classes; return *this; }
|
Result& setClasses(int classes) { this->classes = classes; return *this; }
|
||||||
Result& setScoreTrain(const double score) { this->score_train = score; return *this; }
|
Result& setScoreTrain(double score) { this->score_train = score; return *this; }
|
||||||
Result& setScoreTest(const double score) { this->score_test = score; return *this; }
|
Result& setScoreTest(double score) { this->score_test = score; return *this; }
|
||||||
Result& setScoreTrainStd(const double score_std) { this->score_train_std = score_std; return *this; }
|
Result& setScoreTrainStd(double score_std) { this->score_train_std = score_std; return *this; }
|
||||||
Result& setScoreTestStd(const double score_std) { this->score_test_std = score_std; return *this; }
|
Result& setScoreTestStd(double score_std) { this->score_test_std = score_std; return *this; }
|
||||||
Result& setTrainTime(const double train_time) { this->train_time = train_time; return *this; }
|
Result& setTrainTime(double train_time) { this->train_time = train_time; return *this; }
|
||||||
Result& setTrainTimeStd(const double train_time_std) { this->train_time_std = train_time_std; return *this; }
|
Result& setTrainTimeStd(double train_time_std) { this->train_time_std = train_time_std; return *this; }
|
||||||
Result& setTestTime(const double test_time) { this->test_time = test_time; return *this; }
|
Result& setTestTime(double test_time) { this->test_time = test_time; return *this; }
|
||||||
Result& setTestTimeStd(const double test_time_std) { this->test_time_std = test_time_std; return *this; }
|
Result& setTestTimeStd(double test_time_std) { this->test_time_std = test_time_std; return *this; }
|
||||||
Result& setNodes(const float nodes) { this->nodes = nodes; return *this; }
|
Result& setNodes(float nodes) { this->nodes = nodes; return *this; }
|
||||||
Result& setLeaves(const float leaves) { this->leaves = leaves; return *this; }
|
Result& setLeaves(float leaves) { this->leaves = leaves; return *this; }
|
||||||
Result& setDepth(const float depth) { this->depth = depth; return *this; }
|
Result& setDepth(float depth) { this->depth = depth; return *this; }
|
||||||
Result& setModelVersion(const string& model_version) { this->model_version = model_version; return *this; }
|
Result& addScoreTrain(double score) { scores_train.push_back(score); return *this; }
|
||||||
Result& addScoreTrain(const double score) { scores_train.push_back(score); return *this; }
|
Result& addScoreTest(double score) { scores_test.push_back(score); return *this; }
|
||||||
Result& addScoreTest(const double score) { scores_test.push_back(score); return *this; }
|
Result& addTimeTrain(double time) { times_train.push_back(time); return *this; }
|
||||||
Result& addTimeTrain(const double time) { times_train.push_back(time); return *this; }
|
Result& addTimeTest(double time) { times_test.push_back(time); return *this; }
|
||||||
Result& addTimeTest(const double time) { times_test.push_back(time); return *this; }
|
|
||||||
const float get_score_train() const { return score_train; }
|
const float get_score_train() const { return score_train; }
|
||||||
float get_score_test() { return score_test; }
|
float get_score_test() { return score_test; }
|
||||||
const string& getDataset() const { return dataset; }
|
const string& getDataset() const { return dataset; }
|
||||||
@@ -74,16 +78,15 @@ namespace platform {
|
|||||||
const vector<double>& getScoresTest() const { return scores_test; }
|
const vector<double>& getScoresTest() const { return scores_test; }
|
||||||
const vector<double>& getTimesTrain() const { return times_train; }
|
const vector<double>& getTimesTrain() const { return times_train; }
|
||||||
const vector<double>& getTimesTest() const { return times_test; }
|
const vector<double>& getTimesTest() const { return times_test; }
|
||||||
const string& getModelVersion() const { return model_version; }
|
|
||||||
};
|
};
|
||||||
class Experiment {
|
class Experiment {
|
||||||
private:
|
private:
|
||||||
string title{""}, model{""}, platform{""}, score_name{""}, model_version{""}, language_version{""}, language{""};
|
string title, model, platform, score_name, model_version, language_version, language;
|
||||||
bool discretized{false}, stratified{false};
|
bool discretized{ false }, stratified{ false };
|
||||||
vector<Result> results;
|
vector<Result> results;
|
||||||
vector<int> random_seeds;
|
vector<int> randomSeeds;
|
||||||
int nfolds{0};
|
int nfolds{ 0 };
|
||||||
float duration{0};
|
float duration{ 0 };
|
||||||
json build_json();
|
json build_json();
|
||||||
public:
|
public:
|
||||||
Experiment() = default;
|
Experiment() = default;
|
||||||
@@ -94,14 +97,16 @@ namespace platform {
|
|||||||
Experiment& setModelVersion(const string& model_version) { this->model_version = model_version; return *this; }
|
Experiment& setModelVersion(const string& model_version) { this->model_version = model_version; return *this; }
|
||||||
Experiment& setLanguage(const string& language) { this->language = language; return *this; }
|
Experiment& setLanguage(const string& language) { this->language = language; return *this; }
|
||||||
Experiment& setLanguageVersion(const string& language_version) { this->language_version = language_version; return *this; }
|
Experiment& setLanguageVersion(const string& language_version) { this->language_version = language_version; return *this; }
|
||||||
Experiment& setDiscretized(const bool discretized) { this->discretized = discretized; return *this; }
|
Experiment& setDiscretized(bool discretized) { this->discretized = discretized; return *this; }
|
||||||
Experiment& setStratified(const bool stratified) { this->stratified = stratified; return *this; }
|
Experiment& setStratified(bool stratified) { this->stratified = stratified; return *this; }
|
||||||
Experiment& setNFolds(const int nfolds) { this->nfolds = nfolds; return *this; }
|
Experiment& setNFolds(int nfolds) { this->nfolds = nfolds; return *this; }
|
||||||
Experiment& addResult(Result result) { results.push_back(result); return *this; }
|
Experiment& addResult(Result result) { results.push_back(result); return *this; }
|
||||||
Experiment& addRandomSeed(const int random_seed) { random_seeds.push_back(random_seed); return *this; }
|
Experiment& addRandomSeed(int randomSeed) { randomSeeds.push_back(randomSeed); return *this; }
|
||||||
Experiment& setDuration(const float duration) { this->duration = duration; return *this; }
|
Experiment& setDuration(float duration) { this->duration = duration; return *this; }
|
||||||
string get_file_name();
|
string get_file_name();
|
||||||
void save(const string& path);
|
void save(const string& path);
|
||||||
|
void cross_validation(const string& path, const string& fileName);
|
||||||
|
void go(vector<string> filesToProcess, const string& path);
|
||||||
void show();
|
void show();
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@@ -7,7 +7,7 @@ Fold::Fold(int k, int n, int seed) : k(k), n(n), seed(seed)
|
|||||||
random_seed = default_random_engine(seed == -1 ? rd() : seed);
|
random_seed = default_random_engine(seed == -1 ? rd() : seed);
|
||||||
srand(seed == -1 ? time(0) : seed);
|
srand(seed == -1 ? time(0) : seed);
|
||||||
}
|
}
|
||||||
KFold::KFold(int k, int n, int seed) : Fold(k, n, seed), indices(vector<int>())
|
KFold::KFold(int k, int n, int seed) : Fold(k, n, seed), indices(vector<int>(n))
|
||||||
{
|
{
|
||||||
iota(begin(indices), end(indices), 0); // fill with 0, 1, ..., n - 1
|
iota(begin(indices), end(indices), 0); // fill with 0, 1, ..., n - 1
|
||||||
shuffle(indices.begin(), indices.end(), random_seed);
|
shuffle(indices.begin(), indices.end(), random_seed);
|
||||||
|
@@ -1,8 +1,54 @@
|
|||||||
#include "Models.h"
|
#include "Models.h"
|
||||||
namespace platform {
|
namespace platform {
|
||||||
using namespace std;
|
using namespace std;
|
||||||
map<string, bayesnet::BaseClassifier*> Models::classifiers = map<string, bayesnet::BaseClassifier*>({
|
// Idea from: https://www.codeproject.com/Articles/567242/AplusC-2b-2bplusObjectplusFactory
|
||||||
{ "AODE", new bayesnet::AODE() }, { "KDB", new bayesnet::KDB(2) },
|
Models* Models::factory = nullptr;;
|
||||||
{ "SPODE", new bayesnet::SPODE(2) }, { "TAN", new bayesnet::TAN() }
|
Models* Models::instance()
|
||||||
});
|
{
|
||||||
}
|
//manages singleton
|
||||||
|
if (factory == nullptr)
|
||||||
|
factory = new Models();
|
||||||
|
return factory;
|
||||||
|
}
|
||||||
|
void Models::registerFactoryFunction(const string& name,
|
||||||
|
function<bayesnet::BaseClassifier* (void)> classFactoryFunction)
|
||||||
|
{
|
||||||
|
// register the class factory function
|
||||||
|
functionRegistry[name] = classFactoryFunction;
|
||||||
|
}
|
||||||
|
shared_ptr<bayesnet::BaseClassifier> Models::create(const string& name)
|
||||||
|
{
|
||||||
|
bayesnet::BaseClassifier* instance = nullptr;
|
||||||
|
|
||||||
|
// find name in the registry and call factory method.
|
||||||
|
auto it = functionRegistry.find(name);
|
||||||
|
if (it != functionRegistry.end())
|
||||||
|
instance = it->second();
|
||||||
|
// wrap instance in a shared ptr and return
|
||||||
|
if (instance != nullptr)
|
||||||
|
return shared_ptr<bayesnet::BaseClassifier>(instance);
|
||||||
|
else
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
vector<string> Models::getNames()
|
||||||
|
{
|
||||||
|
vector<string> names;
|
||||||
|
transform(functionRegistry.begin(), functionRegistry.end(), back_inserter(names),
|
||||||
|
[](const pair<string, function<bayesnet::BaseClassifier* (void)>>& pair) { return pair.first; });
|
||||||
|
return names;
|
||||||
|
}
|
||||||
|
string Models::toString()
|
||||||
|
{
|
||||||
|
string result = "";
|
||||||
|
for (const auto& pair : functionRegistry) {
|
||||||
|
result += pair.first + ", ";
|
||||||
|
}
|
||||||
|
return "{" + result.substr(0, result.size() - 2) + "}";
|
||||||
|
}
|
||||||
|
|
||||||
|
Registrar::Registrar(const string& name, function<bayesnet::BaseClassifier* (void)> classFactoryFunction)
|
||||||
|
{
|
||||||
|
// register the class factory function
|
||||||
|
Models::instance()->registerFactoryFunction(name, classFactoryFunction);
|
||||||
|
}
|
||||||
|
}
|
@@ -9,25 +9,24 @@
|
|||||||
namespace platform {
|
namespace platform {
|
||||||
class Models {
|
class Models {
|
||||||
private:
|
private:
|
||||||
static map<string, bayesnet::BaseClassifier*> classifiers;
|
map<string, function<bayesnet::BaseClassifier* (void)>> functionRegistry;
|
||||||
|
static Models* factory; //singleton
|
||||||
|
Models() {};
|
||||||
public:
|
public:
|
||||||
static bayesnet::BaseClassifier* get(string name) { return classifiers[name]; }
|
Models(Models&) = delete;
|
||||||
static vector<string> getNames()
|
void operator=(const Models&) = delete;
|
||||||
{
|
// Idea from: https://www.codeproject.com/Articles/567242/AplusC-2b-2bplusObjectplusFactory
|
||||||
vector<string> names;
|
static Models* instance();
|
||||||
for (auto& [name, classifier] : classifiers) {
|
shared_ptr<bayesnet::BaseClassifier> create(const string& name);
|
||||||
names.push_back(name);
|
void registerFactoryFunction(const string& name,
|
||||||
}
|
function<bayesnet::BaseClassifier* (void)> classFactoryFunction);
|
||||||
return names;
|
vector<string> getNames();
|
||||||
}
|
string toString();
|
||||||
static string toString()
|
|
||||||
{
|
};
|
||||||
string names = "";
|
class Registrar {
|
||||||
for (auto& [name, classifier] : classifiers) {
|
public:
|
||||||
names += name + ", ";
|
Registrar(const string& className, function<bayesnet::BaseClassifier* (void)> classFactoryFunction);
|
||||||
}
|
|
||||||
return "{" + names.substr(0, names.size() - 2) + "}";
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
@@ -4,9 +4,8 @@
|
|||||||
#include "Experiment.h"
|
#include "Experiment.h"
|
||||||
#include "Datasets.h"
|
#include "Datasets.h"
|
||||||
#include "DotEnv.h"
|
#include "DotEnv.h"
|
||||||
#include "CrossValidation.h"
|
|
||||||
#include "Models.h"
|
#include "Models.h"
|
||||||
|
#include "modelRegister.h"
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
const string PATH_RESULTS = "results";
|
const string PATH_RESULTS = "results";
|
||||||
@@ -14,7 +13,7 @@ const string PATH_DATASETS = "datasets";
|
|||||||
|
|
||||||
argparse::ArgumentParser manageArguments(int argc, char** argv)
|
argparse::ArgumentParser manageArguments(int argc, char** argv)
|
||||||
{
|
{
|
||||||
auto env = DotEnv();
|
auto env = platform::DotEnv();
|
||||||
argparse::ArgumentParser program("BayesNetSample");
|
argparse::ArgumentParser program("BayesNetSample");
|
||||||
program.add_argument("-d", "--dataset").default_value("").help("Dataset file name");
|
program.add_argument("-d", "--dataset").default_value("").help("Dataset file name");
|
||||||
program.add_argument("-p", "--path")
|
program.add_argument("-p", "--path")
|
||||||
@@ -22,13 +21,13 @@ argparse::ArgumentParser manageArguments(int argc, char** argv)
|
|||||||
.default_value(string{ PATH_DATASETS }
|
.default_value(string{ PATH_DATASETS }
|
||||||
);
|
);
|
||||||
program.add_argument("-m", "--model")
|
program.add_argument("-m", "--model")
|
||||||
.help("Model to use " + platform::Models::toString())
|
.help("Model to use " + platform::Models::instance()->toString())
|
||||||
.action([](const std::string& value) {
|
.action([](const std::string& value) {
|
||||||
static const vector<string> choices = platform::Models::getNames();
|
static const vector<string> choices = platform::Models::instance()->getNames();
|
||||||
if (find(choices.begin(), choices.end(), value) != choices.end()) {
|
if (find(choices.begin(), choices.end(), value) != choices.end()) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
throw runtime_error("Model must be one of " + platform::Models::toString());
|
throw runtime_error("Model must be one of " + platform::Models::instance()->toString());
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
program.add_argument("--title").default_value("").help("Experiment title");
|
program.add_argument("--title").default_value("").help("Experiment title");
|
||||||
@@ -84,7 +83,7 @@ int main(int argc, char** argv)
|
|||||||
auto stratified = program.get<bool>("stratified");
|
auto stratified = program.get<bool>("stratified");
|
||||||
auto n_folds = program.get<int>("folds");
|
auto n_folds = program.get<int>("folds");
|
||||||
auto seeds = program.get<vector<int>>("seeds");
|
auto seeds = program.get<vector<int>>("seeds");
|
||||||
vector<string> filesToProcess;
|
vector<string> filesToTest;
|
||||||
auto datasets = platform::Datasets(path, true, platform::ARFF);
|
auto datasets = platform::Datasets(path, true, platform::ARFF);
|
||||||
auto title = program.get<string>("title");
|
auto title = program.get<string>("title");
|
||||||
if (file_name != "") {
|
if (file_name != "") {
|
||||||
@@ -95,10 +94,10 @@ int main(int argc, char** argv)
|
|||||||
if (title == "") {
|
if (title == "") {
|
||||||
title = "Test " + file_name + " " + model_name + " " + to_string(n_folds) + " folds";
|
title = "Test " + file_name + " " + model_name + " " + to_string(n_folds) + " folds";
|
||||||
}
|
}
|
||||||
filesToProcess.push_back(file_name);
|
filesToTest.push_back(file_name);
|
||||||
} else {
|
} else {
|
||||||
filesToProcess = platform::Datasets(path, true, platform::ARFF).getNames();
|
filesToTest = platform::Datasets(path, true, platform::ARFF).getNames();
|
||||||
saveResults = true; // Only save results if all datasets are processed
|
saveResults = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@@ -112,20 +111,8 @@ int main(int argc, char** argv)
|
|||||||
experiment.addRandomSeed(seed);
|
experiment.addRandomSeed(seed);
|
||||||
}
|
}
|
||||||
platform::Timer timer;
|
platform::Timer timer;
|
||||||
cout << "*** Starting experiment: " << title << " ***" << endl;
|
|
||||||
timer.start();
|
timer.start();
|
||||||
auto validation = platform::CrossValidation(model_name, stratified, n_folds, seeds, datasets);
|
experiment.go(filesToTest, path);
|
||||||
for (auto fileName : filesToProcess) {
|
|
||||||
cout << "- " << setw(20) << left << fileName << " " << right << flush;
|
|
||||||
auto [X, y] = datasets.getTensors(fileName);
|
|
||||||
auto features = datasets.getFeatures(fileName);
|
|
||||||
auto samples = datasets.getNSamples(fileName);
|
|
||||||
cout << " (" << setw(5) << samples << "," << setw(3) << features.size() << ") " << flush;
|
|
||||||
auto result = validation.crossValidate(fileName);
|
|
||||||
result.setDataset(fileName);
|
|
||||||
experiment.setModelVersion(result.getModelVersion());
|
|
||||||
experiment.addResult(result);
|
|
||||||
}
|
|
||||||
experiment.setDuration(timer.getDuration());
|
experiment.setDuration(timer.getDuration());
|
||||||
if (saveResults)
|
if (saveResults)
|
||||||
experiment.save(PATH_RESULTS);
|
experiment.save(PATH_RESULTS);
|
||||||
|
11
src/Platform/modelRegister.h
Normal file
11
src/Platform/modelRegister.h
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
#ifndef MODEL_REGISTER_H
|
||||||
|
#define MODEL_REGISTER_H
|
||||||
|
static platform::Registrar registrarT("TAN",
|
||||||
|
[](void) -> bayesnet::BaseClassifier* { return new bayesnet::TAN();});
|
||||||
|
static platform::Registrar registrarS("SPODE",
|
||||||
|
[](void) -> bayesnet::BaseClassifier* { return new bayesnet::SPODE(2);});
|
||||||
|
static platform::Registrar registrarK("KDB",
|
||||||
|
[](void) -> bayesnet::BaseClassifier* { return new bayesnet::KDB(2);});
|
||||||
|
static platform::Registrar registrarA("AODE",
|
||||||
|
[](void) -> bayesnet::BaseClassifier* { return new bayesnet::AODE();});
|
||||||
|
#endif
|
@@ -2,6 +2,17 @@
|
|||||||
|
|
||||||
using namespace torch;
|
using namespace torch;
|
||||||
|
|
||||||
|
vector<string> split(const string& text, char delimiter)
|
||||||
|
{
|
||||||
|
vector<string> result;
|
||||||
|
stringstream ss(text);
|
||||||
|
string token;
|
||||||
|
while (getline(ss, token, delimiter)) {
|
||||||
|
result.push_back(token);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
pair<vector<mdlp::labels_t>, map<string, int>> discretize(vector<mdlp::samples_t>& X, mdlp::labels_t& y, vector<string> features)
|
pair<vector<mdlp::labels_t>, map<string, int>> discretize(vector<mdlp::samples_t>& X, mdlp::labels_t& y, vector<string> features)
|
||||||
{
|
{
|
||||||
vector<mdlp::labels_t> Xd;
|
vector<mdlp::labels_t> Xd;
|
||||||
@@ -28,7 +39,7 @@ vector<mdlp::labels_t> discretizeDataset(vector<mdlp::samples_t>& X, mdlp::label
|
|||||||
return Xd;
|
return Xd;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool file_exists(const std::string& name)
|
bool file_exists(const string& name)
|
||||||
{
|
{
|
||||||
if (FILE* file = fopen(name.c_str(), "r")) {
|
if (FILE* file = fopen(name.c_str(), "r")) {
|
||||||
fclose(file);
|
fclose(file);
|
||||||
@@ -49,7 +60,7 @@ tuple<Tensor, Tensor, vector<string>, string, map<string, vector<int>>> loadData
|
|||||||
auto className = handler.getClassName();
|
auto className = handler.getClassName();
|
||||||
vector<string> features;
|
vector<string> features;
|
||||||
auto attributes = handler.getAttributes();
|
auto attributes = handler.getAttributes();
|
||||||
transform(attributes.begin(), attributes.end(), back_inserter(features), [](const auto& f) { return f.first; });
|
transform(attributes.begin(), attributes.end(), back_inserter(features), [](const auto& pair) { return pair.first; });
|
||||||
Tensor Xd;
|
Tensor Xd;
|
||||||
auto states = map<string, vector<int>>();
|
auto states = map<string, vector<int>>();
|
||||||
if (discretize_dataset) {
|
if (discretize_dataset) {
|
||||||
@@ -82,7 +93,7 @@ tuple<vector<vector<int>>, vector<int>, vector<string>, string, map<string, vect
|
|||||||
auto className = handler.getClassName();
|
auto className = handler.getClassName();
|
||||||
vector<string> features;
|
vector<string> features;
|
||||||
auto attributes = handler.getAttributes();
|
auto attributes = handler.getAttributes();
|
||||||
transform(attributes.begin(), attributes.end(), back_inserter(features), [](const auto& f) { return f.first; });
|
transform(attributes.begin(), attributes.end(), back_inserter(features), [](const auto& pair) { return pair.first; });
|
||||||
// Discretize Dataset
|
// Discretize Dataset
|
||||||
vector<mdlp::labels_t> Xd;
|
vector<mdlp::labels_t> Xd;
|
||||||
map<string, int> maxes;
|
map<string, int> maxes;
|
||||||
|
@@ -11,9 +11,10 @@ using namespace std;
|
|||||||
const string PATH = "../../data/";
|
const string PATH = "../../data/";
|
||||||
|
|
||||||
bool file_exists(const std::string& name);
|
bool file_exists(const std::string& name);
|
||||||
|
vector<string> split(const string& text, char delimiter);
|
||||||
pair<vector<mdlp::labels_t>, map<string, int>> discretize(vector<mdlp::samples_t>& X, mdlp::labels_t& y, vector<string> features);
|
pair<vector<mdlp::labels_t>, map<string, int>> discretize(vector<mdlp::samples_t>& X, mdlp::labels_t& y, vector<string> features);
|
||||||
vector<mdlp::labels_t> discretizeDataset(vector<mdlp::samples_t>& X, mdlp::labels_t& y);
|
vector<mdlp::labels_t> discretizeDataset(vector<mdlp::samples_t>& X, mdlp::labels_t& y);
|
||||||
// pair<torch::Tensor, map<string, vector<int>>> discretizeTorch(torch::Tensor& X, torch::Tensor& y, vector<string>& features, const string& className);
|
pair<torch::Tensor, map<string, vector<int>>> discretizeTorch(torch::Tensor& X, torch::Tensor& y, vector<string>& features, const string& className);
|
||||||
tuple<vector<vector<int>>, vector<int>, vector<string>, string, map<string, vector<int>>> loadFile(const string& name);
|
tuple<vector<vector<int>>, vector<int>, vector<string>, string, map<string, vector<int>>> loadFile(const string& name);
|
||||||
tuple<torch::Tensor, torch::Tensor, vector<string>, string, map<string, vector<int>>> loadDataset(const string& path, const string& name, bool class_last, bool discretize_dataset);
|
tuple<torch::Tensor, torch::Tensor, vector<string>, string, map<string, vector<int>>> loadDataset(const string& path, const string& name, bool class_last, bool discretize_dataset);
|
||||||
map<string, vector<int>> get_states(vector<string>& features, string className, map<string, int>& maxes);
|
map<string, vector<int>> get_states(vector<string>& features, string className, map<string, int>& maxes);
|
||||||
|
Reference in New Issue
Block a user