Refactor Library 2 include a Platform/ Experiments
This commit is contained in:
parent
2f5bd0ea7e
commit
5f70449091
@ -37,9 +37,14 @@ set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules ${CMAKE_MODULE_P
|
||||
# Subdirectories
|
||||
# --------------
|
||||
add_subdirectory(config)
|
||||
add_subdirectory(src)
|
||||
add_subdirectory(${BayesNet_SOURCE_DIR}/src/BayesNet)
|
||||
add_subdirectory(${BayesNet_SOURCE_DIR}/src/Platform)
|
||||
add_subdirectory(sample)
|
||||
|
||||
file(GLOB BayesNet_HEADERS CONFIGURE_DEPENDS ${BayesNet_SOURCE_DIR}/src/BayesNet/*.h ${BayesNet_SOURCE_DIR}/BayesNet/*.hpp)
|
||||
file(GLOB BayesNet_SOURCES CONFIGURE_DEPENDS ${BayesNet_SOURCE_DIR}/src/BayesNet/*.cc ${BayesNet_SOURCE_DIR}/src/BayesNet/*.cpp)
|
||||
file(GLOB Platform_SOURCES CONFIGURE_DEPENDS ${BayesNet_SOURCE_DIR}/src/Platform/*.cc ${BayesNet_SOURCE_DIR}/src/Platform/*.cpp)
|
||||
|
||||
# Testing
|
||||
# -------
|
||||
if (ENABLE_TESTING)
|
||||
|
@ -1,3 +1,4 @@
|
||||
include_directories(${BayesNet_SOURCE_DIR}/src)
|
||||
add_executable(main main.cc ArffFiles.cc CPPFImdlp.cpp Metrics.cpp)
|
||||
target_link_libraries(main BayesNet "${TORCH_LIBRARIES}")
|
||||
include_directories(${BayesNet_SOURCE_DIR}/src/Platform)
|
||||
include_directories(${BayesNet_SOURCE_DIR}/src/BayesNet)
|
||||
add_executable(sample sample.cc ${BayesNet_SOURCE_DIR}/src/Platform/ArffFiles.cc ${BayesNet_SOURCE_DIR}/src/Platform/CPPFImdlp.cpp ${BayesNet_SOURCE_DIR}/src/Platform/Metrics.cpp ${BayesNet_SOURCE_DIR}/src/Platform/typesFImdlp.h ${BayesNet_HEADERS})
|
||||
target_link_libraries(sample BayesNet "${TORCH_LIBRARIES}")
|
@ -1,5 +1,5 @@
|
||||
#include "BaseClassifier.h"
|
||||
#include "utils.h"
|
||||
#include "bayesnetUtils.h"
|
||||
|
||||
namespace bayesnet {
|
||||
using namespace std;
|
2
src/BayesNet/CMakeLists.txt
Normal file
2
src/BayesNet/CMakeLists.txt
Normal file
@ -0,0 +1,2 @@
|
||||
add_library(BayesNet bayesnetUtils.cc Network.cc Node.cc Metrics.cc BaseClassifier.cc KDB.cc TAN.cc SPODE.cc Ensemble.cc AODE.cc Mst.cc)
|
||||
target_link_libraries(BayesNet "${TORCH_LIBRARIES}")
|
@ -3,7 +3,7 @@
|
||||
#include <torch/torch.h>
|
||||
#include "BaseClassifier.h"
|
||||
#include "Metrics.hpp"
|
||||
#include "utils.h"
|
||||
#include "bayesnetUtils.h"
|
||||
using namespace std;
|
||||
using namespace torch;
|
||||
|
@ -1,7 +1,7 @@
|
||||
#ifndef KDB_H
|
||||
#define KDB_H
|
||||
#include "BaseClassifier.h"
|
||||
#include "utils.h"
|
||||
#include "bayesnetUtils.h"
|
||||
namespace bayesnet {
|
||||
using namespace std;
|
||||
using namespace torch;
|
@ -1,5 +1,5 @@
|
||||
#include <torch/torch.h>
|
||||
#include <vector>
|
||||
|
||||
#include "bayesnetUtils.h"
|
||||
namespace bayesnet {
|
||||
using namespace std;
|
||||
using namespace torch;
|
@ -1,8 +1,11 @@
|
||||
#ifndef BAYESNET_UTILS_H
|
||||
#define BAYESNET_UTILS_H
|
||||
#include <torch/torch.h>
|
||||
#include <vector>
|
||||
namespace bayesnet {
|
||||
using namespace std;
|
||||
using namespace torch;
|
||||
vector<int> argsort(vector<float>& nums);
|
||||
|
||||
vector<vector<int>> tensorToVector(const Tensor& tensor);
|
||||
|
||||
}
|
||||
}
|
||||
#endif //BAYESNET_UTILS_H
|
@ -1,2 +0,0 @@
|
||||
add_library(BayesNet utils.cc Network.cc Node.cc Metrics.cc BaseClassifier.cc KDB.cc TAN.cc SPODE.cc Ensemble.cc AODE.cc Mst.cc)
|
||||
target_link_libraries(BayesNet "${TORCH_LIBRARIES}")
|
4
src/Platform/CMakeLists.txt
Normal file
4
src/Platform/CMakeLists.txt
Normal file
@ -0,0 +1,4 @@
|
||||
include_directories(${BayesNet_SOURCE_DIR}/src/BayesNet)
|
||||
include_directories(${BayesNet_SOURCE_DIR}/src/Platform)
|
||||
add_executable(main Experiment.cc ArffFiles.cc CPPFImdlp.cpp Metrics.cpp platformUtils.cc)
|
||||
target_link_libraries(main BayesNet "${TORCH_LIBRARIES}")
|
201
src/Platform/Experiment.cc
Normal file
201
src/Platform/Experiment.cc
Normal file
@ -0,0 +1,201 @@
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <torch/torch.h>
|
||||
#include <thread>
|
||||
#include <getopt.h>
|
||||
#include "ArffFiles.h"
|
||||
#include "Network.h"
|
||||
#include "Metrics.hpp"
|
||||
#include "CPPFImdlp.h"
|
||||
#include "KDB.h"
|
||||
#include "SPODE.h"
|
||||
#include "AODE.h"
|
||||
#include "TAN.h"
|
||||
#include "platformUtils.h"
|
||||
|
||||
|
||||
using namespace std;
|
||||
|
||||
/* print a description of all supported options */
|
||||
void usage(const char* path)
|
||||
{
|
||||
/* take only the last portion of the path */
|
||||
const char* basename = strrchr(path, '/');
|
||||
basename = basename ? basename + 1 : path;
|
||||
|
||||
cout << "usage: " << basename << "[OPTION]" << endl;
|
||||
cout << " -h, --help\t\t Print this help and exit." << endl;
|
||||
cout
|
||||
<< " -f, --file[=FILENAME]\t {diabetes, glass, iris, kdd_JapaneseVowels, letter, liver-disorders, mfeat-factors}."
|
||||
<< endl;
|
||||
cout << " -p, --path[=FILENAME]\t folder where the data files are located, default " << PATH << endl;
|
||||
cout << " -m, --model={AODE, KDB, SPODE, TAN}\t " << endl;
|
||||
}
|
||||
|
||||
tuple<string, string, string> parse_arguments(int argc, char** argv)
|
||||
{
|
||||
string file_name;
|
||||
string model_name;
|
||||
string path = PATH;
|
||||
const vector<struct option> long_options = {
|
||||
{"help", no_argument, nullptr, 'h'},
|
||||
{"file", required_argument, nullptr, 'f'},
|
||||
{"path", required_argument, nullptr, 'p'},
|
||||
{"model", required_argument, nullptr, 'm'},
|
||||
{nullptr, no_argument, nullptr, 0}
|
||||
};
|
||||
while (true) {
|
||||
const auto c = getopt_long(argc, argv, "hf:p:m:", long_options.data(), nullptr);
|
||||
if (c == -1)
|
||||
break;
|
||||
switch (c) {
|
||||
case 'h':
|
||||
usage(argv[0]);
|
||||
exit(0);
|
||||
case 'f':
|
||||
file_name = string(optarg);
|
||||
break;
|
||||
case 'm':
|
||||
model_name = string(optarg);
|
||||
break;
|
||||
case 'p':
|
||||
path = optarg;
|
||||
if (path.back() != '/')
|
||||
path += '/';
|
||||
break;
|
||||
case '?':
|
||||
usage(argv[0]);
|
||||
exit(1);
|
||||
default:
|
||||
abort();
|
||||
}
|
||||
}
|
||||
if (file_name.empty()) {
|
||||
usage(argv[0]);
|
||||
exit(1);
|
||||
}
|
||||
return make_tuple(file_name, path, model_name);
|
||||
}
|
||||
|
||||
inline constexpr auto hash_conv(const std::string_view sv)
|
||||
{
|
||||
unsigned long hash{ 5381 };
|
||||
for (unsigned char c : sv) {
|
||||
hash = ((hash << 5) + hash) ^ c;
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
inline constexpr auto operator"" _sh(const char* str, size_t len)
|
||||
{
|
||||
return hash_conv(std::string_view{ str, len });
|
||||
}
|
||||
|
||||
|
||||
|
||||
tuple<string, string, string> get_options(int argc, char** argv)
|
||||
{
|
||||
map<string, bool> datasets = {
|
||||
{"diabetes", true},
|
||||
{"ecoli", true},
|
||||
{"glass", true},
|
||||
{"iris", true},
|
||||
{"kdd_JapaneseVowels", false},
|
||||
{"letter", true},
|
||||
{"liver-disorders", true},
|
||||
{"mfeat-factors", true},
|
||||
};
|
||||
vector <string> models = { "AODE", "KDB", "SPODE", "TAN" };
|
||||
string file_name;
|
||||
string path;
|
||||
string model_name;
|
||||
tie(file_name, path, model_name) = parse_arguments(argc, argv);
|
||||
if (datasets.find(file_name) == datasets.end()) {
|
||||
cout << "Invalid file name: " << file_name << endl;
|
||||
usage(argv[0]);
|
||||
exit(1);
|
||||
}
|
||||
if (!file_exists(path + file_name + ".arff")) {
|
||||
cout << "Data File " << path + file_name + ".arff" << " does not exist" << endl;
|
||||
usage(argv[0]);
|
||||
exit(1);
|
||||
}
|
||||
if (find(models.begin(), models.end(), model_name) == models.end()) {
|
||||
cout << "Invalid model name: " << model_name << endl;
|
||||
usage(argv[0]);
|
||||
exit(1);
|
||||
}
|
||||
return { file_name, path, model_name };
|
||||
}
|
||||
|
||||
int main(int argc, char** argv)
|
||||
{
|
||||
string file_name, path, model_name;
|
||||
tie(file_name, path, model_name) = get_options(argc, argv);
|
||||
auto handler = ArffFiles();
|
||||
handler.load(path + file_name + ".arff");
|
||||
// Get Dataset X, y
|
||||
vector<mdlp::samples_t>& X = handler.getX();
|
||||
mdlp::labels_t& y = handler.getY();
|
||||
// Get className & Features
|
||||
auto className = handler.getClassName();
|
||||
vector<string> features;
|
||||
for (auto feature : handler.getAttributes()) {
|
||||
features.push_back(feature.first);
|
||||
}
|
||||
// Discretize Dataset
|
||||
vector<mdlp::labels_t> Xd;
|
||||
map<string, int> maxes;
|
||||
tie(Xd, maxes) = discretize(X, y, features);
|
||||
maxes[className] = *max_element(y.begin(), y.end()) + 1;
|
||||
map<string, vector<int>> states;
|
||||
for (auto feature : features) {
|
||||
states[feature] = vector<int>(maxes[feature]);
|
||||
}
|
||||
states[className] = vector<int>(
|
||||
maxes[className]);
|
||||
double score;
|
||||
vector<string> lines;
|
||||
vector<string> graph;
|
||||
auto kdb = bayesnet::KDB(2);
|
||||
auto aode = bayesnet::AODE();
|
||||
auto spode = bayesnet::SPODE(2);
|
||||
auto tan = bayesnet::TAN();
|
||||
switch (hash_conv(model_name)) {
|
||||
case "AODE"_sh:
|
||||
aode.fit(Xd, y, features, className, states);
|
||||
lines = aode.show();
|
||||
score = aode.score(Xd, y);
|
||||
graph = aode.graph();
|
||||
break;
|
||||
case "KDB"_sh:
|
||||
kdb.fit(Xd, y, features, className, states);
|
||||
lines = kdb.show();
|
||||
score = kdb.score(Xd, y);
|
||||
graph = kdb.graph();
|
||||
break;
|
||||
case "SPODE"_sh:
|
||||
spode.fit(Xd, y, features, className, states);
|
||||
lines = spode.show();
|
||||
score = spode.score(Xd, y);
|
||||
graph = spode.graph();
|
||||
break;
|
||||
case "TAN"_sh:
|
||||
tan.fit(Xd, y, features, className, states);
|
||||
lines = tan.show();
|
||||
score = tan.score(Xd, y);
|
||||
graph = tan.graph();
|
||||
break;
|
||||
}
|
||||
for (auto line : lines) {
|
||||
cout << line << endl;
|
||||
}
|
||||
cout << "Score: " << score << endl;
|
||||
auto dot_file = model_name + "_" + file_name;
|
||||
ofstream file(dot_file + ".dot");
|
||||
file << graph;
|
||||
file.close();
|
||||
cout << "Graph saved in " << model_name << "_" << file_name << ".dot" << endl;
|
||||
cout << "dot -Tpng -o " + dot_file + ".png " + dot_file + ".dot " << endl;
|
||||
return 0;
|
||||
}
|
0
src/Platform/Experiment.h
Normal file
0
src/Platform/Experiment.h
Normal file
@ -1,40 +1,51 @@
|
||||
#include "utils.h"
|
||||
#include "platformUtils.h"
|
||||
|
||||
pair<vector<mdlp::labels_t>, map<string, int>> discretize(vector<mdlp::samples_t> &X, mdlp::labels_t &y, vector<string> features) {
|
||||
pair<vector<mdlp::labels_t>, map<string, int>> discretize(vector<mdlp::samples_t>& X, mdlp::labels_t& y, vector<string> features)
|
||||
{
|
||||
vector<mdlp::labels_t> Xd;
|
||||
map<string, int> maxes;
|
||||
auto fimdlp = mdlp::CPPFImdlp();
|
||||
for (int i = 0; i < X.size(); i++) {
|
||||
fimdlp.fit(X[i], y);
|
||||
mdlp::labels_t &xd = fimdlp.transform(X[i]);
|
||||
mdlp::labels_t& xd = fimdlp.transform(X[i]);
|
||||
maxes[features[i]] = *max_element(xd.begin(), xd.end()) + 1;
|
||||
Xd.push_back(xd);
|
||||
}
|
||||
return {Xd, maxes};
|
||||
return { Xd, maxes };
|
||||
}
|
||||
|
||||
tuple<vector<vector<int>>, vector<int>, vector<string>, string, map<string, vector<int>>>
|
||||
loadFile(string name) {
|
||||
bool file_exists(const std::string& name)
|
||||
{
|
||||
if (FILE* file = fopen(name.c_str(), "r")) {
|
||||
fclose(file);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
tuple<vector<vector<int>>, vector<int>, vector<string>, string, map<string, vector<int>>> loadFile(string name)
|
||||
{
|
||||
auto handler = ArffFiles();
|
||||
handler.load(PATH + static_cast<string>(name) + ".arff");
|
||||
// Get Dataset X, y
|
||||
vector<mdlp::samples_t> &X = handler.getX();
|
||||
mdlp::labels_t &y = handler.getY();
|
||||
vector<mdlp::samples_t>& X = handler.getX();
|
||||
mdlp::labels_t& y = handler.getY();
|
||||
// Get className & Features
|
||||
auto className = handler.getClassName();
|
||||
vector<string> features;
|
||||
for (auto feature: handler.getAttributes()) {
|
||||
for (auto feature : handler.getAttributes()) {
|
||||
features.push_back(feature.first);
|
||||
}
|
||||
// Discretize Dataset
|
||||
vector<mdlp::labels_t> Xd;
|
||||
map<string, int> maxes;
|
||||
tie(Xd, maxes) = discretize(X, y, features);
|
||||
maxes[className] = *max_element(y.begin(), y. end()) + 1;
|
||||
maxes[className] = *max_element(y.begin(), y.end()) + 1;
|
||||
map<string, vector<int>> states;
|
||||
for (auto feature: features) {
|
||||
for (auto feature : features) {
|
||||
states[feature] = vector<int>(maxes[feature]);
|
||||
}
|
||||
states[className] = vector<int>(maxes[className]);
|
||||
return {Xd, y, features, className, states};
|
||||
return { Xd, y, features, className, states };
|
||||
}
|
@ -1,13 +1,15 @@
|
||||
#ifndef PLATFORM_UTILS_H
|
||||
#define PLATFORM_UTILS_H
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <tuple>
|
||||
#include "../sample/ArffFiles.h"
|
||||
#include "../sample/CPPFImdlp.h"
|
||||
#ifndef BAYESNET_UTILS_H
|
||||
#define BAYESNET_UTILS_H
|
||||
#include "ArffFiles.h"
|
||||
#include "CPPFImdlp.h"
|
||||
using namespace std;
|
||||
const string PATH = "../../data/";
|
||||
pair<vector<mdlp::labels_t>, map<string, int>> discretize(vector<mdlp::samples_t> &X, mdlp::labels_t &y, vector<string> features);
|
||||
|
||||
bool file_exists(const std::string& name);
|
||||
pair<vector<mdlp::labels_t>, map<string, int>> discretize(vector<mdlp::samples_t>& X, mdlp::labels_t& y, vector<string> features);
|
||||
tuple<vector<vector<int>>, vector<int>, vector<string>, string, map<string, vector<int>>> loadFile(string name);
|
||||
#endif //BAYESNET_UTILS_H
|
||||
#endif //PLATFORM_UTILS_H
|
@ -5,11 +5,11 @@
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include "../src/KDB.h"
|
||||
#include "../src/TAN.h"
|
||||
#include "../src/SPODE.h"
|
||||
#include "../src/AODE.h"
|
||||
#include "utils.h"
|
||||
#include "KDB.h"
|
||||
#include "TAN.h"
|
||||
#include "SPODE.h"
|
||||
#include "AODE.h"
|
||||
#include "platformUtils.h"
|
||||
|
||||
TEST_CASE("Test Bayesian Classifiers score", "[BayesNet]")
|
||||
{
|
||||
|
@ -2,8 +2,8 @@
|
||||
#include <catch2/catch_approx.hpp>
|
||||
#include <catch2/generators/catch_generators.hpp>
|
||||
#include <string>
|
||||
#include "../src/KDB.h"
|
||||
#include "utils.h"
|
||||
#include "KDB.h"
|
||||
#include "platformUtils.h"
|
||||
|
||||
TEST_CASE("Test Bayesian Network")
|
||||
{
|
||||
|
@ -1,8 +1,8 @@
|
||||
if(ENABLE_TESTING)
|
||||
set(TEST_MAIN "unit_tests")
|
||||
set(TEST_SOURCES BayesModels.cc BayesNetwork.cc ../sample/ArffFiles.cc ../sample/CPPFImdlp.cpp ../sample/Metrics.cpp
|
||||
../src/utils.cc ../src/Network.cc ../src/Node.cc ../src/Metrics.cc ../src/BaseClassifier.cc ../src/KDB.cc
|
||||
../src/TAN.cc ../src/SPODE.cc ../src/Ensemble.cc ../src/AODE.cc ../src/Mst.cc utils.cc utils.h)
|
||||
include_directories(${BayesNet_SOURCE_DIR}/src/BayesNet)
|
||||
include_directories(${BayesNet_SOURCE_DIR}/src/Platform)
|
||||
set(TEST_SOURCES BayesModels.cc BayesNetwork.cc ${BayesNet_SOURCES} ${Platform_SOURCES})
|
||||
add_executable(${TEST_MAIN} ${TEST_SOURCES})
|
||||
target_link_libraries(${TEST_MAIN} PUBLIC "${TORCH_LIBRARIES}" Catch2::Catch2WithMain)
|
||||
add_test(NAME ${TEST_MAIN} COMMAND ${TEST_MAIN})
|
||||
|
Loading…
Reference in New Issue
Block a user