|
|
@@ -22,7 +22,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
const std::string ACTUAL_VERSION = "1.0.6";
|
|
|
|
const std::string ACTUAL_VERSION = "1.0.6";
|
|
|
|
|
|
|
|
|
|
|
|
TEST_CASE("Test Bayesian Classifiers score & version", "[Models]") {
|
|
|
|
TEST_CASE("Test Bayesian Classifiers score & version", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
map<pair<std::string, std::string>, float> scores{// Diabetes
|
|
|
|
map<pair<std::string, std::string>, float> scores{// Diabetes
|
|
|
|
{{"diabetes", "AODE"}, 0.82161},
|
|
|
|
{{"diabetes", "AODE"}, 0.82161},
|
|
|
|
{{"diabetes", "KDB"}, 0.852865},
|
|
|
|
{{"diabetes", "KDB"}, 0.852865},
|
|
|
@@ -66,8 +67,8 @@ TEST_CASE("Test Bayesian Classifiers score & version", "[Models]") {
|
|
|
|
{{"iris", "KDBLd"}, 0.973333},
|
|
|
|
{{"iris", "KDBLd"}, 0.973333},
|
|
|
|
{{"iris", "SPODELd"}, 0.96f},
|
|
|
|
{{"iris", "SPODELd"}, 0.96f},
|
|
|
|
{{"iris", "TANLd"}, 0.97333f},
|
|
|
|
{{"iris", "TANLd"}, 0.97333f},
|
|
|
|
{{"iris", "BoostAODE"}, 0.98f}};
|
|
|
|
{{"iris", "BoostAODE"}, 0.98f} };
|
|
|
|
std::map<std::string, bayesnet::BaseClassifier *> models{{"AODE", new bayesnet::AODE()},
|
|
|
|
std::map<std::string, bayesnet::BaseClassifier*> models{ {"AODE", new bayesnet::AODE()},
|
|
|
|
{"AODELd", new bayesnet::AODELd()},
|
|
|
|
{"AODELd", new bayesnet::AODELd()},
|
|
|
|
{"BoostAODE", new bayesnet::BoostAODE()},
|
|
|
|
{"BoostAODE", new bayesnet::BoostAODE()},
|
|
|
|
{"KDB", new bayesnet::KDB(2)},
|
|
|
|
{"KDB", new bayesnet::KDB(2)},
|
|
|
@@ -76,12 +77,13 @@ TEST_CASE("Test Bayesian Classifiers score & version", "[Models]") {
|
|
|
|
{"SPODE", new bayesnet::SPODE(1)},
|
|
|
|
{"SPODE", new bayesnet::SPODE(1)},
|
|
|
|
{"SPODELd", new bayesnet::SPODELd(1)},
|
|
|
|
{"SPODELd", new bayesnet::SPODELd(1)},
|
|
|
|
{"TAN", new bayesnet::TAN()},
|
|
|
|
{"TAN", new bayesnet::TAN()},
|
|
|
|
{"TANLd", new bayesnet::TANLd()}};
|
|
|
|
{"TANLd", new bayesnet::TANLd()} };
|
|
|
|
std::string name = GENERATE("AODE", "AODELd", "KDB", "KDBLd", "SPODE", "XSPODE", "SPODELd", "TAN", "TANLd");
|
|
|
|
std::string name = GENERATE("AODE", "AODELd", "KDB", "KDBLd", "SPODE", "XSPODE", "SPODELd", "TAN", "TANLd");
|
|
|
|
auto clf = models[name];
|
|
|
|
auto clf = models[name];
|
|
|
|
|
|
|
|
|
|
|
|
SECTION("Test " + name + " classifier") {
|
|
|
|
SECTION("Test " + name + " classifier")
|
|
|
|
for (const std::string &file_name : {"glass", "iris", "ecoli", "diabetes"}) {
|
|
|
|
{
|
|
|
|
|
|
|
|
for (const std::string& file_name : { "glass", "iris", "ecoli", "diabetes" }) {
|
|
|
|
auto clf = models[name];
|
|
|
|
auto clf = models[name];
|
|
|
|
auto discretize = name.substr(name.length() - 2) != "Ld";
|
|
|
|
auto discretize = name.substr(name.length() - 2) != "Ld";
|
|
|
|
auto raw = RawDatasets(file_name, discretize);
|
|
|
|
auto raw = RawDatasets(file_name, discretize);
|
|
|
@@ -94,21 +96,24 @@ TEST_CASE("Test Bayesian Classifiers score & version", "[Models]") {
|
|
|
|
REQUIRE(clf->getStatus() == bayesnet::NORMAL);
|
|
|
|
REQUIRE(clf->getStatus() == bayesnet::NORMAL);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
SECTION("Library check version") {
|
|
|
|
SECTION("Library check version")
|
|
|
|
|
|
|
|
{
|
|
|
|
INFO("Checking version of " << name << " classifier");
|
|
|
|
INFO("Checking version of " << name << " classifier");
|
|
|
|
REQUIRE(clf->getVersion() == ACTUAL_VERSION);
|
|
|
|
REQUIRE(clf->getVersion() == ACTUAL_VERSION);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
delete clf;
|
|
|
|
delete clf;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_CASE("Models features & Graph", "[Models]") {
|
|
|
|
TEST_CASE("Models features & Graph", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto graph = std::vector<std::string>(
|
|
|
|
auto graph = std::vector<std::string>(
|
|
|
|
{"digraph BayesNet {\nlabel=<BayesNet Test>\nfontsize=30\nfontcolor=blue\nlabelloc=t\nlayout=circo\n",
|
|
|
|
{ "digraph BayesNet {\nlabel=<BayesNet Test>\nfontsize=30\nfontcolor=blue\nlabelloc=t\nlayout=circo\n",
|
|
|
|
"\"class\" [shape=circle, fontcolor=red, fillcolor=lightblue, style=filled ] \n",
|
|
|
|
"\"class\" [shape=circle, fontcolor=red, fillcolor=lightblue, style=filled ] \n",
|
|
|
|
"\"class\" -> \"sepallength\"", "\"class\" -> \"sepalwidth\"", "\"class\" -> \"petallength\"",
|
|
|
|
"\"class\" -> \"sepallength\"", "\"class\" -> \"sepalwidth\"", "\"class\" -> \"petallength\"",
|
|
|
|
"\"class\" -> \"petalwidth\"", "\"petallength\" [shape=circle] \n", "\"petallength\" -> \"sepallength\"",
|
|
|
|
"\"class\" -> \"petalwidth\"", "\"petallength\" [shape=circle] \n", "\"petallength\" -> \"sepallength\"",
|
|
|
|
"\"petalwidth\" [shape=circle] \n", "\"sepallength\" [shape=circle] \n", "\"sepallength\" -> \"sepalwidth\"",
|
|
|
|
"\"petalwidth\" [shape=circle] \n", "\"sepallength\" [shape=circle] \n", "\"sepallength\" -> \"sepalwidth\"",
|
|
|
|
"\"sepalwidth\" [shape=circle] \n", "\"sepalwidth\" -> \"petalwidth\"", "}\n"});
|
|
|
|
"\"sepalwidth\" [shape=circle] \n", "\"sepalwidth\" -> \"petalwidth\"", "}\n" });
|
|
|
|
SECTION("Test TAN") {
|
|
|
|
SECTION("Test TAN")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto clf = bayesnet::TAN();
|
|
|
|
auto clf = bayesnet::TAN();
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
@@ -117,11 +122,12 @@ TEST_CASE("Models features & Graph", "[Models]") {
|
|
|
|
REQUIRE(clf.getNumberOfStates() == 19);
|
|
|
|
REQUIRE(clf.getNumberOfStates() == 19);
|
|
|
|
REQUIRE(clf.getClassNumStates() == 3);
|
|
|
|
REQUIRE(clf.getClassNumStates() == 3);
|
|
|
|
REQUIRE(clf.show() == std::vector<std::string>{"class -> sepallength, sepalwidth, petallength, petalwidth, ",
|
|
|
|
REQUIRE(clf.show() == std::vector<std::string>{"class -> sepallength, sepalwidth, petallength, petalwidth, ",
|
|
|
|
"petallength -> sepallength, ", "petalwidth -> ",
|
|
|
|
"petallength -> sepallength, ", "petalwidth -> ",
|
|
|
|
"sepallength -> sepalwidth, ", "sepalwidth -> petalwidth, "});
|
|
|
|
"sepallength -> sepalwidth, ", "sepalwidth -> petalwidth, "});
|
|
|
|
REQUIRE(clf.graph("Test") == graph);
|
|
|
|
REQUIRE(clf.graph("Test") == graph);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
SECTION("Test TANLd") {
|
|
|
|
SECTION("Test TANLd")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto clf = bayesnet::TANLd();
|
|
|
|
auto clf = bayesnet::TANLd();
|
|
|
|
auto raw = RawDatasets("iris", false);
|
|
|
|
auto raw = RawDatasets("iris", false);
|
|
|
|
clf.fit(raw.Xt, raw.yt, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
clf.fit(raw.Xt, raw.yt, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
@@ -130,12 +136,13 @@ TEST_CASE("Models features & Graph", "[Models]") {
|
|
|
|
REQUIRE(clf.getNumberOfStates() == 27);
|
|
|
|
REQUIRE(clf.getNumberOfStates() == 27);
|
|
|
|
REQUIRE(clf.getClassNumStates() == 3);
|
|
|
|
REQUIRE(clf.getClassNumStates() == 3);
|
|
|
|
REQUIRE(clf.show() == std::vector<std::string>{"class -> sepallength, sepalwidth, petallength, petalwidth, ",
|
|
|
|
REQUIRE(clf.show() == std::vector<std::string>{"class -> sepallength, sepalwidth, petallength, petalwidth, ",
|
|
|
|
"petallength -> sepallength, ", "petalwidth -> ",
|
|
|
|
"petallength -> sepallength, ", "petalwidth -> ",
|
|
|
|
"sepallength -> sepalwidth, ", "sepalwidth -> petalwidth, "});
|
|
|
|
"sepallength -> sepalwidth, ", "sepalwidth -> petalwidth, "});
|
|
|
|
REQUIRE(clf.graph("Test") == graph);
|
|
|
|
REQUIRE(clf.graph("Test") == graph);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_CASE("Get num features & num edges", "[Models]") {
|
|
|
|
TEST_CASE("Get num features & num edges", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto clf = bayesnet::KDB(2);
|
|
|
|
auto clf = bayesnet::KDB(2);
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
@@ -143,9 +150,10 @@ TEST_CASE("Get num features & num edges", "[Models]") {
|
|
|
|
REQUIRE(clf.getNumberOfEdges() == 8);
|
|
|
|
REQUIRE(clf.getNumberOfEdges() == 8);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST_CASE("Model predict_proba", "[Models]") {
|
|
|
|
TEST_CASE("Model predict_proba", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
std::string model = GENERATE("TAN", "SPODE", "BoostAODEproba", "BoostAODEvoting");
|
|
|
|
std::string model = GENERATE("TAN", "SPODE", "BoostAODEproba", "BoostAODEvoting");
|
|
|
|
auto res_prob_tan = std::vector<std::vector<double>>({{0.00375671, 0.994457, 0.00178621},
|
|
|
|
auto res_prob_tan = std::vector<std::vector<double>>({ {0.00375671, 0.994457, 0.00178621},
|
|
|
|
{0.00137462, 0.992734, 0.00589123},
|
|
|
|
{0.00137462, 0.992734, 0.00589123},
|
|
|
|
{0.00137462, 0.992734, 0.00589123},
|
|
|
|
{0.00137462, 0.992734, 0.00589123},
|
|
|
|
{0.00137462, 0.992734, 0.00589123},
|
|
|
|
{0.00137462, 0.992734, 0.00589123},
|
|
|
@@ -153,8 +161,8 @@ TEST_CASE("Model predict_proba", "[Models]") {
|
|
|
|
{0.00494209, 0.0978534, 0.897205},
|
|
|
|
{0.00494209, 0.0978534, 0.897205},
|
|
|
|
{0.0054192, 0.974275, 0.0203054},
|
|
|
|
{0.0054192, 0.974275, 0.0203054},
|
|
|
|
{0.00433012, 0.985054, 0.0106159},
|
|
|
|
{0.00433012, 0.985054, 0.0106159},
|
|
|
|
{0.000860806, 0.996922, 0.00221698}});
|
|
|
|
{0.000860806, 0.996922, 0.00221698} });
|
|
|
|
auto res_prob_spode = std::vector<std::vector<double>>({{0.00419032, 0.994247, 0.00156265},
|
|
|
|
auto res_prob_spode = std::vector<std::vector<double>>({ {0.00419032, 0.994247, 0.00156265},
|
|
|
|
{0.00172808, 0.993433, 0.00483862},
|
|
|
|
{0.00172808, 0.993433, 0.00483862},
|
|
|
|
{0.00172808, 0.993433, 0.00483862},
|
|
|
|
{0.00172808, 0.993433, 0.00483862},
|
|
|
|
{0.00172808, 0.993433, 0.00483862},
|
|
|
|
{0.00172808, 0.993433, 0.00483862},
|
|
|
@@ -162,8 +170,8 @@ TEST_CASE("Model predict_proba", "[Models]") {
|
|
|
|
{0.0120674, 0.357909, 0.630024},
|
|
|
|
{0.0120674, 0.357909, 0.630024},
|
|
|
|
{0.00386239, 0.913919, 0.0822185},
|
|
|
|
{0.00386239, 0.913919, 0.0822185},
|
|
|
|
{0.0244389, 0.966447, 0.00911374},
|
|
|
|
{0.0244389, 0.966447, 0.00911374},
|
|
|
|
{0.003135, 0.991799, 0.0050661}});
|
|
|
|
{0.003135, 0.991799, 0.0050661} });
|
|
|
|
auto res_prob_baode = std::vector<std::vector<double>>({{0.0112349, 0.962274, 0.0264907},
|
|
|
|
auto res_prob_baode = std::vector<std::vector<double>>({ {0.0112349, 0.962274, 0.0264907},
|
|
|
|
{0.00371025, 0.950592, 0.0456973},
|
|
|
|
{0.00371025, 0.950592, 0.0456973},
|
|
|
|
{0.00371025, 0.950592, 0.0456973},
|
|
|
|
{0.00371025, 0.950592, 0.0456973},
|
|
|
|
{0.00371025, 0.950592, 0.0456973},
|
|
|
|
{0.00371025, 0.950592, 0.0456973},
|
|
|
@@ -171,21 +179,22 @@ TEST_CASE("Model predict_proba", "[Models]") {
|
|
|
|
{0.0252205, 0.113564, 0.861215},
|
|
|
|
{0.0252205, 0.113564, 0.861215},
|
|
|
|
{0.0284828, 0.770524, 0.200993},
|
|
|
|
{0.0284828, 0.770524, 0.200993},
|
|
|
|
{0.0213182, 0.857189, 0.121493},
|
|
|
|
{0.0213182, 0.857189, 0.121493},
|
|
|
|
{0.00868436, 0.949494, 0.0418215}});
|
|
|
|
{0.00868436, 0.949494, 0.0418215} });
|
|
|
|
auto res_prob_voting = std::vector<std::vector<double>>(
|
|
|
|
auto res_prob_voting = std::vector<std::vector<double>>(
|
|
|
|
{{0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 0, 1}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}});
|
|
|
|
{ {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0}, {0, 0, 1}, {0, 1, 0}, {0, 1, 0}, {0, 1, 0} });
|
|
|
|
std::map<std::string, std::vector<std::vector<double>>> res_prob{{"TAN", res_prob_tan},
|
|
|
|
std::map<std::string, std::vector<std::vector<double>>> res_prob{ {"TAN", res_prob_tan},
|
|
|
|
{"SPODE", res_prob_spode},
|
|
|
|
{"SPODE", res_prob_spode},
|
|
|
|
{"BoostAODEproba", res_prob_baode},
|
|
|
|
{"BoostAODEproba", res_prob_baode},
|
|
|
|
{"BoostAODEvoting", res_prob_voting}};
|
|
|
|
{"BoostAODEvoting", res_prob_voting} };
|
|
|
|
std::map<std::string, bayesnet::BaseClassifier *> models{{"TAN", new bayesnet::TAN()},
|
|
|
|
std::map<std::string, bayesnet::BaseClassifier*> models{ {"TAN", new bayesnet::TAN()},
|
|
|
|
{"SPODE", new bayesnet::SPODE(0)},
|
|
|
|
{"SPODE", new bayesnet::SPODE(0)},
|
|
|
|
{"BoostAODEproba", new bayesnet::BoostAODE(false)},
|
|
|
|
{"BoostAODEproba", new bayesnet::BoostAODE(false)},
|
|
|
|
{"BoostAODEvoting", new bayesnet::BoostAODE(true)}};
|
|
|
|
{"BoostAODEvoting", new bayesnet::BoostAODE(true)} };
|
|
|
|
int init_index = 78;
|
|
|
|
int init_index = 78;
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
|
|
|
|
|
|
|
|
SECTION("Test " + model + " predict_proba") {
|
|
|
|
SECTION("Test " + model + " predict_proba")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto clf = models[model];
|
|
|
|
auto clf = models[model];
|
|
|
|
clf->fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
clf->fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
auto y_pred_proba = clf->predict_proba(raw.Xv);
|
|
|
|
auto y_pred_proba = clf->predict_proba(raw.Xv);
|
|
|
@@ -214,14 +223,15 @@ TEST_CASE("Model predict_proba", "[Models]") {
|
|
|
|
for (int j = 0; j < 3; j++) {
|
|
|
|
for (int j = 0; j < 3; j++) {
|
|
|
|
REQUIRE(res_prob[model][i][j] == Catch::Approx(y_pred_proba[i + init_index][j]).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(res_prob[model][i][j] == Catch::Approx(y_pred_proba[i + init_index][j]).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(res_prob[model][i][j] ==
|
|
|
|
REQUIRE(res_prob[model][i][j] ==
|
|
|
|
Catch::Approx(yt_pred_proba[i + init_index][j].item<double>()).epsilon(raw.epsilon));
|
|
|
|
Catch::Approx(yt_pred_proba[i + init_index][j].item<double>()).epsilon(raw.epsilon));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
delete clf;
|
|
|
|
delete clf;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST_CASE("AODE voting-proba", "[Models]") {
|
|
|
|
TEST_CASE("AODE voting-proba", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto raw = RawDatasets("glass", true);
|
|
|
|
auto raw = RawDatasets("glass", true);
|
|
|
|
auto clf = bayesnet::AODE(false);
|
|
|
|
auto clf = bayesnet::AODE(false);
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
@@ -229,7 +239,7 @@ TEST_CASE("AODE voting-proba", "[Models]") {
|
|
|
|
auto pred_proba = clf.predict_proba(raw.Xv);
|
|
|
|
auto pred_proba = clf.predict_proba(raw.Xv);
|
|
|
|
clf.setHyperparameters({
|
|
|
|
clf.setHyperparameters({
|
|
|
|
{"predict_voting", true},
|
|
|
|
{"predict_voting", true},
|
|
|
|
});
|
|
|
|
});
|
|
|
|
auto score_voting = clf.score(raw.Xv, raw.yv);
|
|
|
|
auto score_voting = clf.score(raw.Xv, raw.yv);
|
|
|
|
auto pred_voting = clf.predict_proba(raw.Xv);
|
|
|
|
auto pred_voting = clf.predict_proba(raw.Xv);
|
|
|
|
REQUIRE(score_proba == Catch::Approx(0.79439f).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(score_proba == Catch::Approx(0.79439f).epsilon(raw.epsilon));
|
|
|
@@ -238,7 +248,8 @@ TEST_CASE("AODE voting-proba", "[Models]") {
|
|
|
|
REQUIRE(pred_proba[67][0] == Catch::Approx(0.702184).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(pred_proba[67][0] == Catch::Approx(0.702184).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(clf.topological_order() == std::vector<std::string>());
|
|
|
|
REQUIRE(clf.topological_order() == std::vector<std::string>());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_CASE("SPODELd dataset", "[Models]") {
|
|
|
|
TEST_CASE("SPODELd dataset", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto raw = RawDatasets("iris", false);
|
|
|
|
auto raw = RawDatasets("iris", false);
|
|
|
|
auto clf = bayesnet::SPODELd(0);
|
|
|
|
auto clf = bayesnet::SPODELd(0);
|
|
|
|
// raw.dataset.to(torch::kFloat32);
|
|
|
|
// raw.dataset.to(torch::kFloat32);
|
|
|
@@ -249,7 +260,8 @@ TEST_CASE("SPODELd dataset", "[Models]") {
|
|
|
|
REQUIRE(score == Catch::Approx(0.97333f).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(score == Catch::Approx(0.97333f).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(scoret == Catch::Approx(0.97333f).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(scoret == Catch::Approx(0.97333f).epsilon(raw.epsilon));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_CASE("KDB with hyperparameters", "[Models]") {
|
|
|
|
TEST_CASE("KDB with hyperparameters", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto raw = RawDatasets("glass", true);
|
|
|
|
auto raw = RawDatasets("glass", true);
|
|
|
|
auto clf = bayesnet::KDB(2);
|
|
|
|
auto clf = bayesnet::KDB(2);
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
@@ -257,18 +269,20 @@ TEST_CASE("KDB with hyperparameters", "[Models]") {
|
|
|
|
clf.setHyperparameters({
|
|
|
|
clf.setHyperparameters({
|
|
|
|
{"k", 3},
|
|
|
|
{"k", 3},
|
|
|
|
{"theta", 0.7},
|
|
|
|
{"theta", 0.7},
|
|
|
|
});
|
|
|
|
});
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
auto scoret = clf.score(raw.Xv, raw.yv);
|
|
|
|
auto scoret = clf.score(raw.Xv, raw.yv);
|
|
|
|
REQUIRE(score == Catch::Approx(0.827103).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(score == Catch::Approx(0.827103).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(scoret == Catch::Approx(0.761682).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(scoret == Catch::Approx(0.761682).epsilon(raw.epsilon));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_CASE("Incorrect type of data for SPODELd", "[Models]") {
|
|
|
|
TEST_CASE("Incorrect type of data for SPODELd", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto clf = bayesnet::SPODELd(0);
|
|
|
|
auto clf = bayesnet::SPODELd(0);
|
|
|
|
REQUIRE_THROWS_AS(clf.fit(raw.dataset, raw.features, raw.className, raw.states, raw.smoothing), std::runtime_error);
|
|
|
|
REQUIRE_THROWS_AS(clf.fit(raw.dataset, raw.features, raw.className, raw.states, raw.smoothing), std::runtime_error);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_CASE("Predict, predict_proba & score without fitting", "[Models]") {
|
|
|
|
TEST_CASE("Predict, predict_proba & score without fitting", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto clf = bayesnet::AODE();
|
|
|
|
auto clf = bayesnet::AODE();
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
std::string message = "Ensemble has not been fitted";
|
|
|
|
std::string message = "Ensemble has not been fitted";
|
|
|
@@ -285,55 +299,83 @@ TEST_CASE("Predict, predict_proba & score without fitting", "[Models]") {
|
|
|
|
REQUIRE_THROWS_WITH(clf.score(raw.Xv, raw.yv), message);
|
|
|
|
REQUIRE_THROWS_WITH(clf.score(raw.Xv, raw.yv), message);
|
|
|
|
REQUIRE_THROWS_WITH(clf.score(raw.Xt, raw.yt), message);
|
|
|
|
REQUIRE_THROWS_WITH(clf.score(raw.Xt, raw.yt), message);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_CASE("TAN & SPODE with hyperparameters", "[Models]") {
|
|
|
|
TEST_CASE("TAN & SPODE with hyperparameters", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto clf = bayesnet::TAN();
|
|
|
|
auto clf = bayesnet::TAN();
|
|
|
|
clf.setHyperparameters({
|
|
|
|
clf.setHyperparameters({
|
|
|
|
{"parent", 1},
|
|
|
|
{"parent", 1},
|
|
|
|
});
|
|
|
|
});
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
auto score = clf.score(raw.Xv, raw.yv);
|
|
|
|
auto score = clf.score(raw.Xv, raw.yv);
|
|
|
|
REQUIRE(score == Catch::Approx(0.973333).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(score == Catch::Approx(0.973333).epsilon(raw.epsilon));
|
|
|
|
auto clf2 = bayesnet::SPODE(0);
|
|
|
|
auto clf2 = bayesnet::SPODE(0);
|
|
|
|
clf2.setHyperparameters({
|
|
|
|
clf2.setHyperparameters({
|
|
|
|
{"parent", 1},
|
|
|
|
{"parent", 1},
|
|
|
|
});
|
|
|
|
});
|
|
|
|
clf2.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
clf2.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
|
|
|
|
auto score2 = clf2.score(raw.Xv, raw.yv);
|
|
|
|
auto score2 = clf2.score(raw.Xv, raw.yv);
|
|
|
|
REQUIRE(score2 == Catch::Approx(0.973333).epsilon(raw.epsilon));
|
|
|
|
REQUIRE(score2 == Catch::Approx(0.973333).epsilon(raw.epsilon));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_CASE("TAN & SPODE with invalid hyperparameters", "[Models]") {
|
|
|
|
TEST_CASE("TAN & SPODE with invalid hyperparameters", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto clf = bayesnet::TAN();
|
|
|
|
auto clf = bayesnet::TAN();
|
|
|
|
clf.setHyperparameters({
|
|
|
|
clf.setHyperparameters({
|
|
|
|
{"parent", 5},
|
|
|
|
{"parent", 5},
|
|
|
|
});
|
|
|
|
});
|
|
|
|
REQUIRE_THROWS_AS(clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing),
|
|
|
|
REQUIRE_THROWS_AS(clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing),
|
|
|
|
std::invalid_argument);
|
|
|
|
std::invalid_argument);
|
|
|
|
auto clf2 = bayesnet::SPODE(0);
|
|
|
|
auto clf2 = bayesnet::SPODE(0);
|
|
|
|
clf2.setHyperparameters({
|
|
|
|
clf2.setHyperparameters({
|
|
|
|
{"parent", 5},
|
|
|
|
{"parent", 5},
|
|
|
|
});
|
|
|
|
});
|
|
|
|
REQUIRE_THROWS_AS(clf2.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing),
|
|
|
|
REQUIRE_THROWS_AS(clf2.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing),
|
|
|
|
std::invalid_argument);
|
|
|
|
std::invalid_argument);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TEST_CASE("Check proposal checkInput", "[Models]") {
|
|
|
|
TEST_CASE("Check proposal checkInput", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
class testProposal : public bayesnet::Proposal {
|
|
|
|
class testProposal : public bayesnet::Proposal {
|
|
|
|
public:
|
|
|
|
public:
|
|
|
|
testProposal(torch::Tensor &dataset_, std::vector<std::string> &features_, std::string &className_)
|
|
|
|
testProposal(torch::Tensor& dataset_, std::vector<std::string>& features_, std::string& className_)
|
|
|
|
: Proposal(dataset_, features_, className_) {}
|
|
|
|
: Proposal(dataset_, features_, className_)
|
|
|
|
void test_X_y(const torch::Tensor &X, const torch::Tensor &y) { checkInput(X, y); }
|
|
|
|
{
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
void test_X_y(const torch::Tensor& X, const torch::Tensor& y) { checkInput(X, y); }
|
|
|
|
};
|
|
|
|
};
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto raw = RawDatasets("iris", true);
|
|
|
|
auto clf = testProposal(raw.dataset, raw.features, raw.className);
|
|
|
|
auto clf = testProposal(raw.dataset, raw.features, raw.className);
|
|
|
|
torch::Tensor X = torch::randint(0, 3, {10, 4});
|
|
|
|
torch::Tensor X = torch::randint(0, 3, { 10, 4 });
|
|
|
|
torch::Tensor y = torch::rand({10});
|
|
|
|
torch::Tensor y = torch::rand({ 10 });
|
|
|
|
INFO("Check X is not float");
|
|
|
|
INFO("Check X is not float");
|
|
|
|
REQUIRE_THROWS_AS(clf.test_X_y(X, y), std::invalid_argument);
|
|
|
|
REQUIRE_THROWS_AS(clf.test_X_y(X, y), std::invalid_argument);
|
|
|
|
X = torch::rand({10, 4});
|
|
|
|
X = torch::rand({ 10, 4 });
|
|
|
|
INFO("Check y is not integer");
|
|
|
|
INFO("Check y is not integer");
|
|
|
|
REQUIRE_THROWS_AS(clf.test_X_y(X, y), std::invalid_argument);
|
|
|
|
REQUIRE_THROWS_AS(clf.test_X_y(X, y), std::invalid_argument);
|
|
|
|
y = torch::randint(0, 3, {10});
|
|
|
|
y = torch::randint(0, 3, { 10 });
|
|
|
|
INFO("X and y are correct");
|
|
|
|
INFO("X and y are correct");
|
|
|
|
REQUIRE_NOTHROW(clf.test_X_y(X, y));
|
|
|
|
REQUIRE_NOTHROW(clf.test_X_y(X, y));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_CASE("Check KDB loop detection", "[Models]")
|
|
|
|
|
|
|
|
{
|
|
|
|
|
|
|
|
class testKDB : public bayesnet::KDB {
|
|
|
|
|
|
|
|
public:
|
|
|
|
|
|
|
|
testKDB() : KDB(2, 0) {}
|
|
|
|
|
|
|
|
void test_add_m_edges(std::vector<std::string> features_, int idx, std::vector<int>& S, torch::Tensor& weights)
|
|
|
|
|
|
|
|
{
|
|
|
|
|
|
|
|
features = features_;
|
|
|
|
|
|
|
|
add_m_edges(idx, S, weights);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
auto clf = testKDB();
|
|
|
|
|
|
|
|
auto features = std::vector<std::string>{ "A", "B", "C" };
|
|
|
|
|
|
|
|
int idx = 0;
|
|
|
|
|
|
|
|
std::vector<int> S = { 0 };
|
|
|
|
|
|
|
|
torch::Tensor weights = torch::tensor({
|
|
|
|
|
|
|
|
{ 1.0, 10.0, 0.0 }, // row0 -> picks col1
|
|
|
|
|
|
|
|
{ 0.0, 1.0, 10.0 }, // row1 -> picks col2
|
|
|
|
|
|
|
|
{ 10.0, 0.0, 1.0 }, // row2 -> picks col0
|
|
|
|
|
|
|
|
});
|
|
|
|
|
|
|
|
REQUIRE_NOTHROW(clf.test_add_m_edges(features, 0, S, weights));
|
|
|
|
|
|
|
|
REQUIRE_NOTHROW(clf.test_add_m_edges(features, 1, S, weights));
|
|
|
|
|
|
|
|
}
|
|
|
|