Update tests to 99,1% of coverage

This commit is contained in:
2024-11-23 18:14:15 +01:00
parent 3728bcb7d3
commit 5d01eccf1b
11 changed files with 5108 additions and 206 deletions

2
.vscode/launch.json vendored
View File

@@ -16,7 +16,7 @@
"name": "test", "name": "test",
"program": "${workspaceFolder}/build_Debug/tests/TestBayesNet", "program": "${workspaceFolder}/build_Debug/tests/TestBayesNet",
"args": [ "args": [
"Test Node computeCPT" "No features selected"
], ],
"cwd": "${workspaceFolder}/build_Debug/tests" "cwd": "${workspaceFolder}/build_Debug/tests"
}, },

View File

@@ -7,9 +7,9 @@
[![Security Rating](https://sonarcloud.io/api/project_badges/measure?project=rmontanana_BayesNet&metric=security_rating)](https://sonarcloud.io/summary/new_code?id=rmontanana_BayesNet) [![Security Rating](https://sonarcloud.io/api/project_badges/measure?project=rmontanana_BayesNet&metric=security_rating)](https://sonarcloud.io/summary/new_code?id=rmontanana_BayesNet)
[![Reliability Rating](https://sonarcloud.io/api/project_badges/measure?project=rmontanana_BayesNet&metric=reliability_rating)](https://sonarcloud.io/summary/new_code?id=rmontanana_BayesNet) [![Reliability Rating](https://sonarcloud.io/api/project_badges/measure?project=rmontanana_BayesNet&metric=reliability_rating)](https://sonarcloud.io/summary/new_code?id=rmontanana_BayesNet)
![Gitea Last Commit](https://img.shields.io/gitea/last-commit/rmontanana/bayesnet?gitea_url=https://gitea.rmontanana.es:3000&logo=gitea) ![Gitea Last Commit](https://img.shields.io/gitea/last-commit/rmontanana/bayesnet?gitea_url=https://gitea.rmontanana.es:3000&logo=gitea)
[![Coverage Badge](https://img.shields.io/badge/Coverage-97,0%25-green)](html/index.html) [![Coverage Badge](https://img.shields.io/badge/Coverage-99,1%25-green)](html/index.html)
Bayesian Network Classifiers using libtorch from scratch Bayesian Network Classifiers library
## Dependencies ## Dependencies
@@ -71,6 +71,8 @@ make sample fname=tests/data/glass.arff
#### - AODE #### - AODE
#### - A2DE
#### - [BoostAODE](docs/BoostAODE.md) #### - [BoostAODE](docs/BoostAODE.md)
#### - BoostA2DE #### - BoostA2DE

View File

@@ -59,6 +59,9 @@ namespace bayesnet {
std::vector<int> featuresUsed; std::vector<int> featuresUsed;
if (selectFeatures) { if (selectFeatures) {
featuresUsed = initializeModels(smoothing); featuresUsed = initializeModels(smoothing);
if (featuresUsed.size() == 0) {
return;
}
auto ypred = predict(X_train); auto ypred = predict(X_train);
std::tie(weights_, alpha_t, finished) = update_weights(y_train, ypred, weights_); std::tie(weights_, alpha_t, finished) = update_weights(y_train, ypred, weights_);
// Update significance of the models // Update significance of the models

View File

@@ -209,7 +209,7 @@ namespace bayesnet {
pthread_setname_np(threadName.c_str()); pthread_setname_np(threadName.c_str());
#endif #endif
double numStates = static_cast<double>(node.second->getNumStates()); double numStates = static_cast<double>(node.second->getNumStates());
double smoothing_factor = 0.0; double smoothing_factor;
switch (smoothing) { switch (smoothing) {
case Smoothing_t::ORIGINAL: case Smoothing_t::ORIGINAL:
smoothing_factor = 1.0 / n_samples; smoothing_factor = 1.0 / n_samples;
@@ -221,7 +221,7 @@ namespace bayesnet {
smoothing_factor = 1 / numStates; smoothing_factor = 1 / numStates;
break; break;
default: default:
throw std::invalid_argument("Smoothing method not recognized " + std::to_string(static_cast<int>(smoothing))); smoothing_factor = 0.0; // No smoothing
} }
node.second->computeCPT(samples, features, smoothing_factor, weights); node.second->computeCPT(samples, features, smoothing_factor, weights);
semaphore.release(); semaphore.release();
@@ -234,16 +234,6 @@ namespace bayesnet {
for (auto& thread : threads) { for (auto& thread : threads) {
thread.join(); thread.join();
} }
// std::fstream file;
// file.open("cpt.txt", std::fstream::out | std::fstream::app);
// file << std::string(80, '*') << std::endl;
// for (const auto& item : graph("Test")) {
// file << item << std::endl;
// }
// file << std::string(80, '-') << std::endl;
// file << dump_cpt() << std::endl;
// file << std::string(80, '=') << std::endl;
// file.close();
fitted = true; fitted = true;
} }
torch::Tensor Network::predict_tensor(const torch::Tensor& samples, const bool proba) torch::Tensor Network::predict_tensor(const torch::Tensor& samples, const bool proba)

View File

@@ -53,14 +53,14 @@ namespace bayesnet {
} }
} }
void insertElement(std::list<int>& variables, int variable) void MST::insertElement(std::list<int>& variables, int variable)
{ {
if (std::find(variables.begin(), variables.end(), variable) == variables.end()) { if (std::find(variables.begin(), variables.end(), variable) == variables.end()) {
variables.push_front(variable); variables.push_front(variable);
} }
} }
std::vector<std::pair<int, int>> reorder(std::vector<std::pair<float, std::pair<int, int>>> T, int root_original) std::vector<std::pair<int, int>> MST::reorder(std::vector<std::pair<float, std::pair<int, int>>> T, int root_original)
{ {
// Create the edges of a DAG from the MST // Create the edges of a DAG from the MST
// replacing unordered_set with list because unordered_set cannot guarantee the order of the elements inserted // replacing unordered_set with list because unordered_set cannot guarantee the order of the elements inserted

View File

@@ -14,6 +14,8 @@ namespace bayesnet {
public: public:
MST() = default; MST() = default;
MST(const std::vector<std::string>& features, const torch::Tensor& weights, const int root); MST(const std::vector<std::string>& features, const torch::Tensor& weights, const int root);
void insertElement(std::list<int>& variables, int variable);
std::vector<std::pair<int, int>> reorder(std::vector<std::pair<float, std::pair<int, int>>> T, int root_original);
std::vector<std::pair<int, int>> maximumSpanningTree(); std::vector<std::pair<int, int>> maximumSpanningTree();
private: private:
torch::Tensor weights; torch::Tensor weights;

View File

@@ -10,7 +10,7 @@ if(ENABLE_TESTING)
file(GLOB_RECURSE BayesNet_SOURCES "${BayesNet_SOURCE_DIR}/bayesnet/*.cc") file(GLOB_RECURSE BayesNet_SOURCES "${BayesNet_SOURCE_DIR}/bayesnet/*.cc")
add_executable(TestBayesNet TestBayesNetwork.cc TestBayesNode.cc TestBayesClassifier.cc add_executable(TestBayesNet TestBayesNetwork.cc TestBayesNode.cc TestBayesClassifier.cc
TestBayesModels.cc TestBayesMetrics.cc TestFeatureSelection.cc TestBoostAODE.cc TestA2DE.cc TestBayesModels.cc TestBayesMetrics.cc TestFeatureSelection.cc TestBoostAODE.cc TestA2DE.cc
TestUtils.cc TestBayesEnsemble.cc TestModulesVersions.cc TestBoostA2DE.cc ${BayesNet_SOURCES}) TestUtils.cc TestBayesEnsemble.cc TestModulesVersions.cc TestBoostA2DE.cc TestMST.cc ${BayesNet_SOURCES})
target_link_libraries(TestBayesNet PUBLIC "${TORCH_LIBRARIES}" fimdlp PRIVATE Catch2::Catch2WithMain) target_link_libraries(TestBayesNet PUBLIC "${TORCH_LIBRARIES}" fimdlp PRIVATE Catch2::Catch2WithMain)
add_test(NAME BayesNetworkTest COMMAND TestBayesNet) add_test(NAME BayesNetworkTest COMMAND TestBayesNet)
add_test(NAME A2DE COMMAND TestBayesNet "[A2DE]") add_test(NAME A2DE COMMAND TestBayesNet "[A2DE]")
@@ -24,4 +24,5 @@ if(ENABLE_TESTING)
add_test(NAME Modules COMMAND TestBayesNet "[Modules]") add_test(NAME Modules COMMAND TestBayesNet "[Modules]")
add_test(NAME Network COMMAND TestBayesNet "[Network]") add_test(NAME Network COMMAND TestBayesNet "[Network]")
add_test(NAME Node COMMAND TestBayesNet "[Node]") add_test(NAME Node COMMAND TestBayesNet "[Node]")
add_test(NAME MST COMMAND TestBayesNet "[MST]")
endif(ENABLE_TESTING) endif(ENABLE_TESTING)

View File

@@ -257,9 +257,9 @@ TEST_CASE("Test Bayesian Network", "[Network]")
REQUIRE(node->getCPT().equal(node2->getCPT())); REQUIRE(node->getCPT().equal(node2->getCPT()));
} }
} }
SECTION("Test oddities") SECTION("Network oddities")
{ {
INFO("Test oddities"); INFO("Network oddities");
buildModel(net, raw.features, raw.className); buildModel(net, raw.features, raw.className);
// predict without fitting // predict without fitting
std::vector<std::vector<int>> test = { {1, 2, 0, 1, 1}, {0, 1, 2, 0, 1}, {0, 0, 0, 0, 1}, {2, 2, 2, 2, 1} }; std::vector<std::vector<int>> test = { {1, 2, 0, 1, 1}, {0, 1, 2, 0, 1}, {0, 0, 0, 0, 1}, {2, 2, 2, 2, 1} };
@@ -329,6 +329,14 @@ TEST_CASE("Test Bayesian Network", "[Network]")
std::string invalid_state = "Feature sepallength not found in states"; std::string invalid_state = "Feature sepallength not found in states";
REQUIRE_THROWS_AS(net4.fit(raw.Xv, raw.yv, raw.weightsv, raw.features, raw.className, std::map<std::string, std::vector<int>>(), raw.smoothing), std::invalid_argument); REQUIRE_THROWS_AS(net4.fit(raw.Xv, raw.yv, raw.weightsv, raw.features, raw.className, std::map<std::string, std::vector<int>>(), raw.smoothing), std::invalid_argument);
REQUIRE_THROWS_WITH(net4.fit(raw.Xv, raw.yv, raw.weightsv, raw.features, raw.className, std::map<std::string, std::vector<int>>(), raw.smoothing), invalid_state); REQUIRE_THROWS_WITH(net4.fit(raw.Xv, raw.yv, raw.weightsv, raw.features, raw.className, std::map<std::string, std::vector<int>>(), raw.smoothing), invalid_state);
// Try to add node or edge to a fitted network
auto net5 = bayesnet::Network();
buildModel(net5, raw.features, raw.className);
net5.fit(raw.Xv, raw.yv, raw.weightsv, raw.features, raw.className, raw.states, raw.smoothing);
REQUIRE_THROWS_AS(net5.addNode("A"), std::logic_error);
REQUIRE_THROWS_WITH(net5.addNode("A"), "Cannot add node to a fitted network. Initialize first.");
REQUIRE_THROWS_AS(net5.addEdge("A", "B"), std::logic_error);
REQUIRE_THROWS_WITH(net5.addEdge("A", "B"), "Cannot add edge to a fitted network. Initialize first.");
} }
} }
@@ -525,6 +533,7 @@ TEST_CASE("Test Smoothing A", "[Network]")
} }
} }
} }
TEST_CASE("Test Smoothing B", "[Network]") TEST_CASE("Test Smoothing B", "[Network]")
{ {
auto net = bayesnet::Network(); auto net = bayesnet::Network();
@@ -577,4 +586,13 @@ TEST_CASE("Test Smoothing B", "[Network]")
REQUIRE(cestnik_score.at(i).at(j) == Catch::Approx(cestnik_values.at(i).at(j)).margin(threshold)); REQUIRE(cestnik_score.at(i).at(j) == Catch::Approx(cestnik_values.at(i).at(j)).margin(threshold));
} }
} }
INFO("Test Smoothing B - No smoothing");
net.fit(Data, C, weights, { "X", "Y", "Z" }, "C", states, bayesnet::Smoothing_t::NONE);
auto nosmooth_values = std::vector<std::vector<float>>({ {0.342465753, 0.65753424}, {0.0, 1.0} });
auto nosmooth_score = net.predict_proba({ {0, 1}, {1, 2}, {2, 3} });
for (auto i = 0; i < 2; ++i) {
for (auto j = 0; j < 2; ++j) {
REQUIRE(nosmooth_score.at(i).at(j) == Catch::Approx(nosmooth_values.at(i).at(j)).margin(threshold));
}
}
} }

View File

@@ -27,189 +27,192 @@ TEST_CASE("Build basic model", "[BoostA2DE]")
auto score = clf.score(raw.Xv, raw.yv); auto score = clf.score(raw.Xv, raw.yv);
REQUIRE(score == Catch::Approx(0.919271).epsilon(raw.epsilon)); REQUIRE(score == Catch::Approx(0.919271).epsilon(raw.epsilon));
} }
// TEST_CASE("Feature_select IWSS", "[BoostAODE]") TEST_CASE("Feature_select IWSS", "[BoostA2DE]")
// { {
// auto raw = RawDatasets("glass", true); auto raw = RawDatasets("glass", true);
// auto clf = bayesnet::BoostAODE(); auto clf = bayesnet::BoostA2DE();
// clf.setHyperparameters({ {"select_features", "IWSS"}, {"threshold", 0.5 } }); clf.setHyperparameters({ {"select_features", "IWSS"}, {"threshold", 0.5 } });
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing); clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// REQUIRE(clf.getNumberOfNodes() == 90); REQUIRE(clf.getNumberOfNodes() == 140);
// REQUIRE(clf.getNumberOfEdges() == 153); REQUIRE(clf.getNumberOfEdges() == 294);
// REQUIRE(clf.getNotes().size() == 2); REQUIRE(clf.getNotes().size() == 4);
// REQUIRE(clf.getNotes()[0] == "Used features in initialization: 4 of 9 with IWSS"); REQUIRE(clf.getNotes()[0] == "Used features in initialization: 4 of 9 with IWSS");
// REQUIRE(clf.getNotes()[1] == "Number of models: 9"); REQUIRE(clf.getNotes()[1] == "Convergence threshold reached & 15 models eliminated");
// } REQUIRE(clf.getNotes()[2] == "Pairs not used in train: 2");
// TEST_CASE("Feature_select FCBF", "[BoostAODE]") REQUIRE(clf.getNotes()[3] == "Number of models: 14");
// { }
// auto raw = RawDatasets("glass", true); TEST_CASE("Feature_select FCBF", "[BoostA2DE]")
// auto clf = bayesnet::BoostAODE(); {
// clf.setHyperparameters({ {"select_features", "FCBF"}, {"threshold", 1e-7 } }); auto raw = RawDatasets("glass", true);
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing); auto clf = bayesnet::BoostA2DE();
// REQUIRE(clf.getNumberOfNodes() == 90); clf.setHyperparameters({ {"select_features", "FCBF"}, {"threshold", 1e-7 } });
// REQUIRE(clf.getNumberOfEdges() == 153); clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// REQUIRE(clf.getNotes().size() == 2); REQUIRE(clf.getNumberOfNodes() == 110);
// REQUIRE(clf.getNotes()[0] == "Used features in initialization: 4 of 9 with FCBF"); REQUIRE(clf.getNumberOfEdges() == 231);
// REQUIRE(clf.getNotes()[1] == "Number of models: 9"); REQUIRE(clf.getNotes()[0] == "Used features in initialization: 4 of 9 with FCBF");
// } REQUIRE(clf.getNotes()[1] == "Convergence threshold reached & 15 models eliminated");
// TEST_CASE("Test used features in train note and score", "[BoostAODE]") REQUIRE(clf.getNotes()[2] == "Pairs not used in train: 2");
// { REQUIRE(clf.getNotes()[3] == "Number of models: 11");
// auto raw = RawDatasets("diabetes", true); }
// auto clf = bayesnet::BoostAODE(true); TEST_CASE("Test used features in train note and score", "[BoostA2DE]")
// clf.setHyperparameters({ {
// {"order", "asc"}, auto raw = RawDatasets("diabetes", true);
// {"convergence", true}, auto clf = bayesnet::BoostA2DE(true);
// {"select_features","CFS"}, clf.setHyperparameters({
// }); {"order", "asc"},
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing); {"convergence", true},
// REQUIRE(clf.getNumberOfNodes() == 72); {"select_features","CFS"},
// REQUIRE(clf.getNumberOfEdges() == 120); });
// REQUIRE(clf.getNotes().size() == 2); clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// REQUIRE(clf.getNotes()[0] == "Used features in initialization: 6 of 8 with CFS"); REQUIRE(clf.getNumberOfNodes() == 144);
// REQUIRE(clf.getNotes()[1] == "Number of models: 8"); REQUIRE(clf.getNumberOfEdges() == 288);
// auto score = clf.score(raw.Xv, raw.yv); REQUIRE(clf.getNotes().size() == 2);
// auto scoret = clf.score(raw.Xt, raw.yt); REQUIRE(clf.getNotes()[0] == "Used features in initialization: 6 of 8 with CFS");
// REQUIRE(score == Catch::Approx(0.809895813).epsilon(raw.epsilon)); REQUIRE(clf.getNotes()[1] == "Number of models: 16");
// REQUIRE(scoret == Catch::Approx(0.809895813).epsilon(raw.epsilon)); auto score = clf.score(raw.Xv, raw.yv);
// } auto scoret = clf.score(raw.Xt, raw.yt);
// TEST_CASE("Voting vs proba", "[BoostAODE]") REQUIRE(score == Catch::Approx(0.856771).epsilon(raw.epsilon));
// { REQUIRE(scoret == Catch::Approx(0.856771).epsilon(raw.epsilon));
// auto raw = RawDatasets("iris", true); }
// auto clf = bayesnet::BoostAODE(false); TEST_CASE("Voting vs proba", "[BoostA2DE]")
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing); {
// auto score_proba = clf.score(raw.Xv, raw.yv); auto raw = RawDatasets("iris", true);
// auto pred_proba = clf.predict_proba(raw.Xv); auto clf = bayesnet::BoostA2DE(false);
// clf.setHyperparameters({ clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// {"predict_voting",true}, auto score_proba = clf.score(raw.Xv, raw.yv);
// }); auto pred_proba = clf.predict_proba(raw.Xv);
// auto score_voting = clf.score(raw.Xv, raw.yv); clf.setHyperparameters({
// auto pred_voting = clf.predict_proba(raw.Xv); {"predict_voting",true},
// REQUIRE(score_proba == Catch::Approx(0.97333).epsilon(raw.epsilon)); });
// REQUIRE(score_voting == Catch::Approx(0.98).epsilon(raw.epsilon)); auto score_voting = clf.score(raw.Xv, raw.yv);
// REQUIRE(pred_voting[83][2] == Catch::Approx(1.0).epsilon(raw.epsilon)); auto pred_voting = clf.predict_proba(raw.Xv);
// REQUIRE(pred_proba[83][2] == Catch::Approx(0.86121525).epsilon(raw.epsilon)); REQUIRE(score_proba == Catch::Approx(0.98).epsilon(raw.epsilon));
// REQUIRE(clf.dump_cpt() == ""); REQUIRE(score_voting == Catch::Approx(0.946667).epsilon(raw.epsilon));
// REQUIRE(clf.topological_order() == std::vector<std::string>()); REQUIRE(pred_voting[83][2] == Catch::Approx(0.53508).epsilon(raw.epsilon));
// } REQUIRE(pred_proba[83][2] == Catch::Approx(0.48394).epsilon(raw.epsilon));
// TEST_CASE("Order asc, desc & random", "[BoostAODE]") REQUIRE(clf.dump_cpt() == "");
// { REQUIRE(clf.topological_order() == std::vector<std::string>());
// auto raw = RawDatasets("glass", true); }
// std::map<std::string, double> scores{ TEST_CASE("Order asc, desc & random", "[BoostA2DE]")
// {"asc", 0.83645f }, { "desc", 0.84579f }, { "rand", 0.84112 } {
// }; auto raw = RawDatasets("glass", true);
// for (const std::string& order : { "asc", "desc", "rand" }) { std::map<std::string, double> scores{
// auto clf = bayesnet::BoostAODE(); {"asc", 0.752336f }, { "desc", 0.813084f }, { "rand", 0.850467 }
// clf.setHyperparameters({ };
// {"order", order}, for (const std::string& order : { "asc", "desc", "rand" }) {
// {"bisection", false}, auto clf = bayesnet::BoostA2DE();
// {"maxTolerance", 1}, clf.setHyperparameters({
// {"convergence", false}, {"order", order},
// }); {"bisection", false},
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing); {"maxTolerance", 1},
// auto score = clf.score(raw.Xv, raw.yv); {"convergence", false},
// auto scoret = clf.score(raw.Xt, raw.yt); });
// INFO("BoostAODE order: " + order); clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// REQUIRE(score == Catch::Approx(scores[order]).epsilon(raw.epsilon)); auto score = clf.score(raw.Xv, raw.yv);
// REQUIRE(scoret == Catch::Approx(scores[order]).epsilon(raw.epsilon)); auto scoret = clf.score(raw.Xt, raw.yt);
// } INFO("BoostA2DE order: " + order);
// } REQUIRE(score == Catch::Approx(scores[order]).epsilon(raw.epsilon));
// TEST_CASE("Oddities", "[BoostAODE]") REQUIRE(scoret == Catch::Approx(scores[order]).epsilon(raw.epsilon));
// { }
// auto clf = bayesnet::BoostAODE(); }
// auto raw = RawDatasets("iris", true); TEST_CASE("Oddities2", "[BoostA2DE]")
// auto bad_hyper = nlohmann::json{ {
// { { "order", "duck" } }, auto clf = bayesnet::BoostA2DE();
// { { "select_features", "duck" } }, auto raw = RawDatasets("iris", true);
// { { "maxTolerance", 0 } }, auto bad_hyper = nlohmann::json{
// { { "maxTolerance", 5 } }, { { "order", "duck" } },
// }; { { "select_features", "duck" } },
// for (const auto& hyper : bad_hyper.items()) { { { "maxTolerance", 0 } },
// INFO("BoostAODE hyper: " + hyper.value().dump()); { { "maxTolerance", 5 } },
// REQUIRE_THROWS_AS(clf.setHyperparameters(hyper.value()), std::invalid_argument); };
// } for (const auto& hyper : bad_hyper.items()) {
// REQUIRE_THROWS_AS(clf.setHyperparameters({ {"maxTolerance", 0 } }), std::invalid_argument); INFO("BoostA2DE hyper: " + hyper.value().dump());
// auto bad_hyper_fit = nlohmann::json{ REQUIRE_THROWS_AS(clf.setHyperparameters(hyper.value()), std::invalid_argument);
// { { "select_features","IWSS" }, { "threshold", -0.01 } }, }
// { { "select_features","IWSS" }, { "threshold", 0.51 } }, REQUIRE_THROWS_AS(clf.setHyperparameters({ {"maxTolerance", 0 } }), std::invalid_argument);
// { { "select_features","FCBF" }, { "threshold", 1e-8 } }, auto bad_hyper_fit = nlohmann::json{
// { { "select_features","FCBF" }, { "threshold", 1.01 } }, { { "select_features","IWSS" }, { "threshold", -0.01 } },
// }; { { "select_features","IWSS" }, { "threshold", 0.51 } },
// for (const auto& hyper : bad_hyper_fit.items()) { { { "select_features","FCBF" }, { "threshold", 1e-8 } },
// INFO("BoostAODE hyper: " + hyper.value().dump()); { { "select_features","FCBF" }, { "threshold", 1.01 } },
// clf.setHyperparameters(hyper.value()); };
// REQUIRE_THROWS_AS(clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing, std::invalid_argument); for (const auto& hyper : bad_hyper_fit.items()) {
// } INFO("BoostA2DE hyper: " + hyper.value().dump());
// } clf.setHyperparameters(hyper.value());
REQUIRE_THROWS_AS(clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing), std::invalid_argument);
// TEST_CASE("Bisection Best", "[BoostAODE]") }
// { }
// auto clf = bayesnet::BoostAODE(); TEST_CASE("No features selected", "[BoostA2DE]")
// auto raw = RawDatasets("kdd_JapaneseVowels", true, 1200, true, false); {
// clf.setHyperparameters({ // Check that the note "No features selected in initialization" is added
// {"bisection", true}, //
// {"maxTolerance", 3}, auto raw = RawDatasets("iris", true);
// {"convergence", true}, auto clf = bayesnet::BoostA2DE();
// {"block_update", false}, clf.setHyperparameters({ {"select_features","FCBF"}, {"threshold", 1 } });
// {"convergence_best", false}, clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// }); REQUIRE(clf.getNotes().size() == 1);
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing); REQUIRE(clf.getNotes()[0] == "No features selected in initialization");
// REQUIRE(clf.getNumberOfNodes() == 210); }
// REQUIRE(clf.getNumberOfEdges() == 378); TEST_CASE("Bisection Best", "[BoostA2DE]")
// REQUIRE(clf.getNotes().size() == 1); {
// REQUIRE(clf.getNotes().at(0) == "Number of models: 14"); auto clf = bayesnet::BoostA2DE();
// auto score = clf.score(raw.X_test, raw.y_test); auto raw = RawDatasets("kdd_JapaneseVowels", true, 1200, true, false);
// auto scoret = clf.score(raw.X_test, raw.y_test); clf.setHyperparameters({
// REQUIRE(score == Catch::Approx(0.991666675f).epsilon(raw.epsilon)); {"bisection", true},
// REQUIRE(scoret == Catch::Approx(0.991666675f).epsilon(raw.epsilon)); {"maxTolerance", 3},
// } {"convergence", true},
// TEST_CASE("Bisection Best vs Last", "[BoostAODE]") {"block_update", false},
// { {"convergence_best", false},
// auto raw = RawDatasets("kdd_JapaneseVowels", true, 1500, true, false); });
// auto clf = bayesnet::BoostAODE(true); clf.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing);
// auto hyperparameters = nlohmann::json{ REQUIRE(clf.getNumberOfNodes() == 480);
// {"bisection", true}, REQUIRE(clf.getNumberOfEdges() == 1152);
// {"maxTolerance", 3}, REQUIRE(clf.getNotes().size() == 3);
// {"convergence", true}, REQUIRE(clf.getNotes().at(0) == "Convergence threshold reached & 15 models eliminated");
// {"convergence_best", true}, REQUIRE(clf.getNotes().at(1) == "Pairs not used in train: 83");
// }; REQUIRE(clf.getNotes().at(2) == "Number of models: 32");
// clf.setHyperparameters(hyperparameters); auto score = clf.score(raw.X_test, raw.y_test);
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing); auto scoret = clf.score(raw.X_test, raw.y_test);
// auto score_best = clf.score(raw.X_test, raw.y_test); REQUIRE(score == Catch::Approx(0.966667f).epsilon(raw.epsilon));
// REQUIRE(score_best == Catch::Approx(0.980000019f).epsilon(raw.epsilon)); REQUIRE(scoret == Catch::Approx(0.966667f).epsilon(raw.epsilon));
// // Now we will set the hyperparameter to use the last accuracy }
// hyperparameters["convergence_best"] = false; TEST_CASE("Block Update", "[BoostA2DE]")
// clf.setHyperparameters(hyperparameters); {
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing); auto clf = bayesnet::BoostA2DE();
// auto score_last = clf.score(raw.X_test, raw.y_test); auto raw = RawDatasets("spambase", true, 500);
// REQUIRE(score_last == Catch::Approx(0.976666689f).epsilon(raw.epsilon)); clf.setHyperparameters({
// } {"bisection", true},
{"block_update", true},
// TEST_CASE("Block Update", "[BoostAODE]") {"maxTolerance", 3},
// { {"convergence", true},
// auto clf = bayesnet::BoostAODE(); });
// auto raw = RawDatasets("mfeat-factors", true, 500); clf.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing);
// clf.setHyperparameters({ REQUIRE(clf.getNumberOfNodes() == 58);
// {"bisection", true}, REQUIRE(clf.getNumberOfEdges() == 165);
// {"block_update", true}, REQUIRE(clf.getNotes().size() == 3);
// {"maxTolerance", 3}, REQUIRE(clf.getNotes()[0] == "Convergence threshold reached & 15 models eliminated");
// {"convergence", true}, REQUIRE(clf.getNotes()[1] == "Pairs not used in train: 1588");
// }); REQUIRE(clf.getNotes()[2] == "Number of models: 1");
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing); auto score = clf.score(raw.X_test, raw.y_test);
// REQUIRE(clf.getNumberOfNodes() == 868); auto scoret = clf.score(raw.X_test, raw.y_test);
// REQUIRE(clf.getNumberOfEdges() == 1724); REQUIRE(score == Catch::Approx(1.0f).epsilon(raw.epsilon));
// REQUIRE(clf.getNotes().size() == 3); REQUIRE(scoret == Catch::Approx(1.0f).epsilon(raw.epsilon));
// REQUIRE(clf.getNotes()[0] == "Convergence threshold reached & 15 models eliminated"); //
// REQUIRE(clf.getNotes()[1] == "Used features in train: 19 of 216"); // std::cout << "Number of nodes " << clf.getNumberOfNodes() << std::endl;
// REQUIRE(clf.getNotes()[2] == "Number of models: 4"); // std::cout << "Number of edges " << clf.getNumberOfEdges() << std::endl;
// auto score = clf.score(raw.X_test, raw.y_test); // std::cout << "Notes size " << clf.getNotes().size() << std::endl;
// auto scoret = clf.score(raw.X_test, raw.y_test); // for (auto note : clf.getNotes()) {
// REQUIRE(score == Catch::Approx(0.99f).epsilon(raw.epsilon)); // std::cout << note << std::endl;
// REQUIRE(scoret == Catch::Approx(0.99f).epsilon(raw.epsilon)); // }
// // // std::cout << "Score " << score << std::endl;
// // std::cout << "Number of nodes " << clf.getNumberOfNodes() << std::endl; }
// // std::cout << "Number of edges " << clf.getNumberOfEdges() << std::endl; TEST_CASE("Test graph b2a2de", "[BoostA2DE]")
// // std::cout << "Notes size " << clf.getNotes().size() << std::endl; {
// // for (auto note : clf.getNotes()) { auto raw = RawDatasets("iris", true);
// // std::cout << note << std::endl; auto clf = bayesnet::BoostA2DE();
// // } clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// // std::cout << "Score " << score << std::endl; auto graph = clf.graph();
// } REQUIRE(graph.size() == 26);
REQUIRE(graph[0] == "digraph BayesNet {\nlabel=<BayesNet BoostA2DE_0>\nfontsize=30\nfontcolor=blue\nlabelloc=t\nlayout=circo\n");
REQUIRE(graph[1] == "\"class\" [shape=circle, fontcolor=red, fillcolor=lightblue, style=filled ] \n");
}

72
tests/TestMST.cc Normal file
View File

@@ -0,0 +1,72 @@
// ***************************************************************
// SPDX-FileCopyrightText: Copyright 2024 Ricardo Montañana Gómez
// SPDX-FileType: SOURCE
// SPDX-License-Identifier: MIT
// ***************************************************************
#include <catch2/catch_test_macros.hpp>
#include <catch2/catch_approx.hpp>
#include <catch2/generators/catch_generators.hpp>
#include <catch2/matchers/catch_matchers.hpp>
#include <string>
#include <vector>
#include "TestUtils.h"
#include "bayesnet/utils/Mst.h"
TEST_CASE("MST::insertElement tests", "[MST]")
{
bayesnet::MST mst({}, torch::tensor({}), 0);
SECTION("Insert into an empty list")
{
std::list<int> variables;
mst.insertElement(variables, 5);
REQUIRE(variables == std::list<int>{5});
}
SECTION("Insert a non-duplicate element")
{
std::list<int> variables = { 1, 2, 3 };
mst.insertElement(variables, 4);
REQUIRE(variables == std::list<int>{4, 1, 2, 3});
}
SECTION("Insert a duplicate element")
{
std::list<int> variables = { 1, 2, 3 };
mst.insertElement(variables, 2);
REQUIRE(variables == std::list<int>{1, 2, 3});
}
}
TEST_CASE("MST::reorder tests", "[MST]")
{
bayesnet::MST mst({}, torch::tensor({}), 0);
SECTION("Reorder simple graph")
{
std::vector<std::pair<float, std::pair<int, int>>> T = { {2.0, {1, 2}}, {1.0, {0, 1}} };
auto result = mst.reorder(T, 0);
REQUIRE(result == std::vector<std::pair<int, int>>{{0, 1}, { 1, 2 }});
}
SECTION("Reorder with disconnected graph")
{
std::vector<std::pair<float, std::pair<int, int>>> T = { {2.0, {1, 2}}, {1.0, {0, 1}} };
auto result = mst.reorder(T, 0);
REQUIRE(result == std::vector<std::pair<int, int>>{{0, 1}, { 2, 3 }});
}
}
TEST_CASE("MST::maximumSpanningTree tests", "[MST]")
{
std::vector<std::string> features = { "A", "B", "C" };
auto weights = torch::tensor({
{0.0, 1.0, 2.0},
{1.0, 0.0, 3.0},
{2.0, 3.0, 0.0}
});
bayesnet::MST mst(features, weights, 0);
SECTION("MST of a complete graph")
{
auto result = mst.maximumSpanningTree();
REQUIRE(result.size() == 2); // Un MST para 3 nodos tiene 2 aristas
}
}

4811
tests/data/spambase.arff Executable file

File diff suppressed because it is too large Load Diff