Tests XSpode & XBAODE

This commit is contained in:
2025-03-12 13:46:04 +01:00
parent 71b05cc1a7
commit 3bdb14bd65
12 changed files with 450 additions and 644 deletions

View File

@@ -10,14 +10,14 @@ if(ENABLE_TESTING)
)
file(GLOB_RECURSE BayesNet_SOURCES "${BayesNet_SOURCE_DIR}/bayesnet/*.cc")
add_executable(TestBayesNet TestBayesNetwork.cc TestBayesNode.cc TestBayesClassifier.cc
TestBayesModels.cc TestBayesMetrics.cc TestFeatureSelection.cc TestBoostAODE.cc TestXBAODE.cc TestA2DE.cc TestWA2DE.cc
TestUtils.cc TestBayesEnsemble.cc TestModulesVersions.cc TestBoostA2DE.cc TestMST.cc ${BayesNet_SOURCES})
TestBayesModels.cc TestBayesMetrics.cc TestFeatureSelection.cc TestBoostAODE.cc TestXBAODE.cc TestA2DE.cc
TestUtils.cc TestBayesEnsemble.cc TestModulesVersions.cc TestBoostA2DE.cc TestMST.cc TestXSPODE.cc ${BayesNet_SOURCES})
target_link_libraries(TestBayesNet PUBLIC "${TORCH_LIBRARIES}" fimdlp PRIVATE Catch2::Catch2WithMain)
add_test(NAME BayesNetworkTest COMMAND TestBayesNet)
add_test(NAME A2DE COMMAND TestBayesNet "[A2DE]")
add_test(NAME WA2DE COMMAND TestBayesNet "[WA2DE]")
add_test(NAME BoostA2DE COMMAND TestBayesNet "[BoostA2DE]")
add_test(NAME BoostAODE COMMAND TestBayesNet "[BoostAODE]")
add_test(NAME XSPODE COMMAND TestBayesNet "[XSPODE]")
add_test(NAME XBAODE COMMAND TestBayesNet "[XBAODE]")
add_test(NAME Classifier COMMAND TestBayesNet "[Classifier]")
add_test(NAME Ensemble COMMAND TestBayesNet "[Ensemble]")

View File

@@ -1,31 +0,0 @@
// ***************************************************************
// SPDX-FileCopyrightText: Copyright 2024 Ricardo Montañana Gómez
// SPDX-FileType: SOURCE
// SPDX-License-Identifier: MIT
// ***************************************************************
#include <type_traits>
#include <catch2/catch_test_macros.hpp>
#include <catch2/catch_approx.hpp>
#include <catch2/generators/catch_generators.hpp>
#include "bayesnet/ensembles/WA2DE.h"
#include "TestUtils.h"
TEST_CASE("Fit and Score", "[WA2DE]")
{
auto raw = RawDatasets("iris", true);
auto clf = bayesnet::WA2DE();
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
REQUIRE(clf.score(raw.Xt, raw.yt) == Catch::Approx(0.6333333333333333).epsilon(raw.epsilon));
}
TEST_CASE("Test graph", "[WA2DE]")
{
auto raw = RawDatasets("iris", true);
auto clf = bayesnet::WA2DE();
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
auto graph = clf.graph("BayesNet WA2DE");
REQUIRE(graph.size() == 2);
REQUIRE(graph[0] == "BayesNet WA2DE");
REQUIRE(graph[1] == "Graph visualization not implemented.");
}

View File

@@ -4,88 +4,94 @@
// SPDX-License-Identifier: MIT
// ***************************************************************
#include <type_traits>
#include <catch2/catch_test_macros.hpp>
#include <catch2/catch_approx.hpp>
#include <catch2/generators/catch_generators.hpp>
#include <catch2/catch_test_macros.hpp>
#include <catch2/generators/catch_generators.hpp>
#include <catch2/matchers/catch_matchers.hpp>
#include "bayesnet/ensembles/XBAODE.h"
#include "TestUtils.h"
#include "bayesnet/ensembles/XBAODE.h"
TEST_CASE("Normal test", "[XBAODE]")
{
auto raw = RawDatasets("iris", true);
auto clf = bayesnet::XBAODE();
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
REQUIRE(clf.getNumberOfNodes() == 20);
REQUIRE(clf.getNumberOfEdges() == 112);
REQUIRE(clf.getNotes().size() == 1);
TEST_CASE("Normal test", "[XBAODE]") {
auto raw = RawDatasets("iris", true);
auto clf = bayesnet::XBAODE();
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states,
raw.smoothing);
REQUIRE(clf.getNumberOfNodes() == 20);
REQUIRE(clf.getNumberOfEdges() == 36);
REQUIRE(clf.getNotes().size() == 1);
REQUIRE(clf.getVersion() == "0.9.7");
REQUIRE(clf.getNotes()[0] == "Number of models: 4");
REQUIRE(clf.getNumberOfStates() == 256);
REQUIRE(clf.score(raw.X_test, raw.y_test) == Catch::Approx(0.933333));
}
//TEST_CASE("Feature_select CFS", "[XBAODE]")
//{
// auto raw = RawDatasets("glass", true);
// auto clf = bayesnet::XBAODE();
// clf.setHyperparameters({ {"select_features", "CFS"} });
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// REQUIRE(clf.getNumberOfNodes() == 97);
// REQUIRE(clf.getNumberOfEdges() == 153);
// REQUIRE(clf.getNotes().size() == 2);
// REQUIRE(clf.getNotes()[0] == "Used features in initialization: 6 of 9 with CFS");
// REQUIRE(clf.getNotes()[1] == "Number of models: 9");
//}
// TEST_CASE("Feature_select IWSS", "[XBAODE]")
// {
// auto raw = RawDatasets("glass", true);
// auto clf = bayesnet::XBAODE();
// clf.setHyperparameters({ {"select_features", "IWSS"}, {"threshold", 0.5 } });
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// REQUIRE(clf.getNumberOfNodes() == 90);
// REQUIRE(clf.getNumberOfEdges() == 153);
// REQUIRE(clf.getNotes().size() == 2);
// REQUIRE(clf.getNotes()[0] == "Used features in initialization: 4 of 9 with IWSS");
// REQUIRE(clf.getNotes()[1] == "Number of models: 9");
// }
// TEST_CASE("Feature_select FCBF", "[XBAODE]")
// {
// auto raw = RawDatasets("glass", true);
// auto clf = bayesnet::XBAODE();
// clf.setHyperparameters({ {"select_features", "FCBF"}, {"threshold", 1e-7 } });
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// REQUIRE(clf.getNumberOfNodes() == 90);
// REQUIRE(clf.getNumberOfEdges() == 153);
// REQUIRE(clf.getNotes().size() == 2);
// REQUIRE(clf.getNotes()[0] == "Used features in initialization: 4 of 9 with FCBF");
// REQUIRE(clf.getNotes()[1] == "Number of models: 9");
// }
// TEST_CASE("Test used features in train note and score", "[XBAODE]")
// {
// auto raw = RawDatasets("diabetes", true);
// auto clf = bayesnet::XBAODE(true);
// clf.setHyperparameters({
// {"order", "asc"},
// {"convergence", true},
// {"select_features","CFS"},
// });
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// REQUIRE(clf.getNumberOfNodes() == 72);
// REQUIRE(clf.getNumberOfEdges() == 120);
// REQUIRE(clf.getNotes().size() == 2);
// REQUIRE(clf.getNotes()[0] == "Used features in initialization: 6 of 8 with CFS");
// REQUIRE(clf.getNotes()[1] == "Number of models: 8");
// auto score = clf.score(raw.Xv, raw.yv);
// auto scoret = clf.score(raw.Xt, raw.yt);
// REQUIRE(score == Catch::Approx(0.809895813).epsilon(raw.epsilon));
// REQUIRE(scoret == Catch::Approx(0.809895813).epsilon(raw.epsilon));
// }
TEST_CASE("Feature_select CFS", "[XBAODE]") {
auto raw = RawDatasets("glass", true);
auto clf = bayesnet::XBAODE();
clf.setHyperparameters({{"select_features", "CFS"}});
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states,
raw.smoothing);
REQUIRE(clf.getNumberOfNodes() == 90);
REQUIRE(clf.getNumberOfEdges() == 171);
REQUIRE(clf.getNotes().size() == 2);
REQUIRE(clf.getNotes()[0] ==
"Used features in initialization: 6 of 9 with CFS");
REQUIRE(clf.getNotes()[1] == "Number of models: 9");
REQUIRE(clf.score(raw.X_test, raw.y_test) == Catch::Approx(0.720930219));
}
TEST_CASE("Feature_select IWSS", "[XBAODE]") {
auto raw = RawDatasets("glass", true);
auto clf = bayesnet::XBAODE();
clf.setHyperparameters({{"select_features", "IWSS"}, {"threshold", 0.5}});
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states,
raw.smoothing);
REQUIRE(clf.getNumberOfNodes() == 90);
REQUIRE(clf.getNumberOfEdges() == 171);
REQUIRE(clf.getNotes().size() == 2);
REQUIRE(clf.getNotes()[0] ==
"Used features in initialization: 4 of 9 with IWSS");
REQUIRE(clf.getNotes()[1] == "Number of models: 9");
REQUIRE(clf.score(raw.X_test, raw.y_test) == Catch::Approx(0.697674394));
}
TEST_CASE("Feature_select FCBF", "[XBAODE]") {
auto raw = RawDatasets("glass", true);
auto clf = bayesnet::XBAODE();
clf.setHyperparameters({{"select_features", "FCBF"}, {"threshold", 1e-7}});
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states,
raw.smoothing);
REQUIRE(clf.getNumberOfNodes() == 90);
REQUIRE(clf.getNumberOfEdges() == 171);
REQUIRE(clf.getNotes().size() == 2);
REQUIRE(clf.getNotes()[0] ==
"Used features in initialization: 4 of 9 with FCBF");
REQUIRE(clf.getNotes()[1] == "Number of models: 9");
REQUIRE(clf.score(raw.X_test, raw.y_test) == Catch::Approx(0.720930219));
}
TEST_CASE("Test used features in train note and score", "[XBAODE]")
{
auto raw = RawDatasets("diabetes", true);
auto clf = bayesnet::XBAODE();
clf.setHyperparameters({
{"order", "asc"},
{"convergence", true},
{"select_features","CFS"},
});
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states,
raw.smoothing); REQUIRE(clf.getNumberOfNodes() == 72);
REQUIRE(clf.getNumberOfEdges() == 136);
REQUIRE(clf.getNotes().size() == 2);
REQUIRE(clf.getNotes()[0] == "Used features in initialization: 6 of 8 with CFS");
REQUIRE(clf.getNotes()[1] == "Number of models: 8");
auto score = clf.score(raw.Xv, raw.yv); auto scoret = clf.score(raw.Xt, raw.yt);
REQUIRE(score == Catch::Approx(0.819010437f).epsilon(raw.epsilon));
REQUIRE(scoret == Catch::Approx(0.819010437f).epsilon(raw.epsilon));
}
// TEST_CASE("Voting vs proba", "[XBAODE]")
// {
// auto raw = RawDatasets("iris", true);
// auto clf = bayesnet::XBAODE(false);
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// auto score_proba = clf.score(raw.Xv, raw.yv);
// auto pred_proba = clf.predict_proba(raw.Xv);
// clf.setHyperparameters({
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states,
// raw.smoothing); auto score_proba = clf.score(raw.Xv, raw.yv); auto
// pred_proba = clf.predict_proba(raw.Xv); clf.setHyperparameters({
// {"predict_voting",true},
// });
// auto score_voting = clf.score(raw.Xv, raw.yv);
@@ -93,9 +99,9 @@ TEST_CASE("Normal test", "[XBAODE]")
// REQUIRE(score_proba == Catch::Approx(0.97333).epsilon(raw.epsilon));
// REQUIRE(score_voting == Catch::Approx(0.98).epsilon(raw.epsilon));
// REQUIRE(pred_voting[83][2] == Catch::Approx(1.0).epsilon(raw.epsilon));
// REQUIRE(pred_proba[83][2] == Catch::Approx(0.86121525).epsilon(raw.epsilon));
// REQUIRE(clf.dump_cpt() == "");
// REQUIRE(clf.topological_order() == std::vector<std::string>());
// REQUIRE(pred_proba[83][2] ==
// Catch::Approx(0.86121525).epsilon(raw.epsilon)); REQUIRE(clf.dump_cpt()
// == ""); REQUIRE(clf.topological_order() == std::vector<std::string>());
// }
// TEST_CASE("Order asc, desc & random", "[XBAODE]")
// {
@@ -111,10 +117,9 @@ TEST_CASE("Normal test", "[XBAODE]")
// {"maxTolerance", 1},
// {"convergence", false},
// });
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing);
// auto score = clf.score(raw.Xv, raw.yv);
// auto scoret = clf.score(raw.Xt, raw.yt);
// INFO("XBAODE order: " << order);
// clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states,
// raw.smoothing); auto score = clf.score(raw.Xv, raw.yv); auto scoret =
// clf.score(raw.Xt, raw.yt); INFO("XBAODE order: " << order);
// REQUIRE(score == Catch::Approx(scores[order]).epsilon(raw.epsilon));
// REQUIRE(scoret == Catch::Approx(scores[order]).epsilon(raw.epsilon));
// }
@@ -131,10 +136,11 @@ TEST_CASE("Normal test", "[XBAODE]")
// };
// for (const auto& hyper : bad_hyper.items()) {
// INFO("XBAODE hyper: " << hyper.value().dump());
// REQUIRE_THROWS_AS(clf.setHyperparameters(hyper.value()), std::invalid_argument);
// REQUIRE_THROWS_AS(clf.setHyperparameters(hyper.value()),
// std::invalid_argument);
// }
// REQUIRE_THROWS_AS(clf.setHyperparameters({ {"maxTolerance", 0 } }), std::invalid_argument);
// auto bad_hyper_fit = nlohmann::json{
// REQUIRE_THROWS_AS(clf.setHyperparameters({ {"maxTolerance", 0 } }),
// std::invalid_argument); auto bad_hyper_fit = nlohmann::json{
// { { "select_features","IWSS" }, { "threshold", -0.01 } },
// { { "select_features","IWSS" }, { "threshold", 0.51 } },
// { { "select_features","FCBF" }, { "threshold", 1e-8 } },
@@ -143,7 +149,8 @@ TEST_CASE("Normal test", "[XBAODE]")
// for (const auto& hyper : bad_hyper_fit.items()) {
// INFO("XBAODE hyper: " << hyper.value().dump());
// clf.setHyperparameters(hyper.value());
// REQUIRE_THROWS_AS(clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing), std::invalid_argument);
// REQUIRE_THROWS_AS(clf.fit(raw.Xv, raw.yv, raw.features,
// raw.className, raw.states, raw.smoothing), std::invalid_argument);
// }
// auto bad_hyper_fit2 = nlohmann::json{
@@ -152,7 +159,8 @@ TEST_CASE("Normal test", "[XBAODE]")
// };
// for (const auto& hyper : bad_hyper_fit2.items()) {
// INFO("XBAODE hyper: " << hyper.value().dump());
// REQUIRE_THROWS_AS(clf.setHyperparameters(hyper.value()), std::invalid_argument);
// REQUIRE_THROWS_AS(clf.setHyperparameters(hyper.value()),
// std::invalid_argument);
// }
// }
// TEST_CASE("Bisection Best", "[XBAODE]")
@@ -165,8 +173,8 @@ TEST_CASE("Normal test", "[XBAODE]")
// {"convergence", true},
// {"convergence_best", false},
// });
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing);
// REQUIRE(clf.getNumberOfNodes() == 210);
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className,
// raw.states, raw.smoothing); REQUIRE(clf.getNumberOfNodes() == 210);
// REQUIRE(clf.getNumberOfEdges() == 378);
// REQUIRE(clf.getNotes().size() == 1);
// REQUIRE(clf.getNotes().at(0) == "Number of models: 14");
@@ -186,15 +194,17 @@ TEST_CASE("Normal test", "[XBAODE]")
// {"convergence_best", true},
// };
// clf.setHyperparameters(hyperparameters);
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing);
// auto score_best = clf.score(raw.X_test, raw.y_test);
// REQUIRE(score_best == Catch::Approx(0.980000019f).epsilon(raw.epsilon));
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className,
// raw.states, raw.smoothing); auto score_best = clf.score(raw.X_test,
// raw.y_test); REQUIRE(score_best ==
// Catch::Approx(0.980000019f).epsilon(raw.epsilon));
// // Now we will set the hyperparameter to use the last accuracy
// hyperparameters["convergence_best"] = false;
// clf.setHyperparameters(hyperparameters);
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing);
// auto score_last = clf.score(raw.X_test, raw.y_test);
// REQUIRE(score_last == Catch::Approx(0.976666689f).epsilon(raw.epsilon));
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className,
// raw.states, raw.smoothing); auto score_last = clf.score(raw.X_test,
// raw.y_test); REQUIRE(score_last ==
// Catch::Approx(0.976666689f).epsilon(raw.epsilon));
// }
// TEST_CASE("Block Update", "[XBAODE]")
// {
@@ -206,20 +216,21 @@ TEST_CASE("Normal test", "[XBAODE]")
// {"maxTolerance", 3},
// {"convergence", true},
// });
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing);
// REQUIRE(clf.getNumberOfNodes() == 868);
// clf.fit(raw.X_train, raw.y_train, raw.features, raw.className,
// raw.states, raw.smoothing); REQUIRE(clf.getNumberOfNodes() == 868);
// REQUIRE(clf.getNumberOfEdges() == 1724);
// REQUIRE(clf.getNotes().size() == 3);
// REQUIRE(clf.getNotes()[0] == "Convergence threshold reached & 15 models eliminated");
// REQUIRE(clf.getNotes()[1] == "Used features in train: 19 of 216");
// REQUIRE(clf.getNotes()[2] == "Number of models: 4");
// auto score = clf.score(raw.X_test, raw.y_test);
// auto scoret = clf.score(raw.X_test, raw.y_test);
// REQUIRE(score == Catch::Approx(0.99f).epsilon(raw.epsilon));
// REQUIRE(clf.getNotes()[0] == "Convergence threshold reached & 15 models
// eliminated"); REQUIRE(clf.getNotes()[1] == "Used features in train: 19 of
// 216"); REQUIRE(clf.getNotes()[2] == "Number of models: 4"); auto score =
// clf.score(raw.X_test, raw.y_test); auto scoret = clf.score(raw.X_test,
// raw.y_test); REQUIRE(score == Catch::Approx(0.99f).epsilon(raw.epsilon));
// REQUIRE(scoret == Catch::Approx(0.99f).epsilon(raw.epsilon));
// //
// // std::cout << "Number of nodes " << clf.getNumberOfNodes() << std::endl;
// // std::cout << "Number of edges " << clf.getNumberOfEdges() << std::endl;
// // std::cout << "Number of nodes " << clf.getNumberOfNodes() <<
// std::endl;
// // std::cout << "Number of edges " << clf.getNumberOfEdges() <<
// std::endl;
// // std::cout << "Notes size " << clf.getNotes().size() << std::endl;
// // for (auto note : clf.getNotes()) {
// // std::cout << note << std::endl;
@@ -234,10 +245,11 @@ TEST_CASE("Normal test", "[XBAODE]")
// clf_alpha.setHyperparameters({
// {"alpha_block", true},
// });
// clf_alpha.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing);
// clf_no_alpha.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing);
// auto score_alpha = clf_alpha.score(raw.X_test, raw.y_test);
// auto score_no_alpha = clf_no_alpha.score(raw.X_test, raw.y_test);
// REQUIRE(score_alpha == Catch::Approx(0.720779f).epsilon(raw.epsilon));
// REQUIRE(score_no_alpha == Catch::Approx(0.733766f).epsilon(raw.epsilon));
// clf_alpha.fit(raw.X_train, raw.y_train, raw.features, raw.className,
// raw.states, raw.smoothing); clf_no_alpha.fit(raw.X_train, raw.y_train,
// raw.features, raw.className, raw.states, raw.smoothing); auto score_alpha
// = clf_alpha.score(raw.X_test, raw.y_test); auto score_no_alpha =
// clf_no_alpha.score(raw.X_test, raw.y_test); REQUIRE(score_alpha ==
// Catch::Approx(0.720779f).epsilon(raw.epsilon)); REQUIRE(score_no_alpha ==
// Catch::Approx(0.733766f).epsilon(raw.epsilon));
// }

126
tests/TestXSPODE.cc Normal file
View File

@@ -0,0 +1,126 @@
// ***************************************************************
// SPDX-FileCopyrightText: Copyright 2024 Ricardo Montañana Gómez
// SPDX-FileType: SOURCE
// SPDX-License-Identifier: MIT
// ***************************************************************
#include <catch2/catch_test_macros.hpp>
#include <catch2/catch_approx.hpp>
#include <catch2/matchers/catch_matchers.hpp>
#include <stdexcept>
#include "bayesnet/classifiers/XSPODE.h"
#include "TestUtils.h"
TEST_CASE("fit vector test", "[XSPODE]") {
auto raw = RawDatasets("iris", true);
auto scores = std::vector<float>({0.966667, 0.9333333, 0.966667, 0.966667});
for (int i = 0; i < 4; ++i) {
auto clf = bayesnet::XSpode(i);
clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states,
raw.smoothing);
REQUIRE(clf.getNumberOfNodes() == 5);
REQUIRE(clf.getNumberOfEdges() == 9);
REQUIRE(clf.getNotes().size() == 0);
REQUIRE(clf.score(raw.X_test, raw.y_test) == Catch::Approx(scores.at(i)));
}
}
TEST_CASE("fit dataset test", "[XSPODE]") {
auto raw = RawDatasets("iris", true);
auto scores = std::vector<float>({0.966667, 0.9333333, 0.966667, 0.966667});
for (int i = 0; i < 4; ++i) {
auto clf = bayesnet::XSpode(i);
clf.fit(raw.dataset, raw.features, raw.className, raw.states,
raw.smoothing);
REQUIRE(clf.getNumberOfNodes() == 5);
REQUIRE(clf.getNumberOfEdges() == 9);
REQUIRE(clf.getNotes().size() == 0);
REQUIRE(clf.score(raw.X_test, raw.y_test) == Catch::Approx(scores.at(i)));
}
}
TEST_CASE("tensors dataset predict & predict_proba", "[XSPODE]") {
auto raw = RawDatasets("iris", true);
auto scores = std::vector<float>({0.966667, 0.9333333, 0.966667, 0.966667});
auto probs_expected = std::vector<std::vector<float>>({
{0.999017, 0.000306908, 0.000676449},
{0.99831, 0.00119304, 0.000497099},
{0.998432, 0.00078416, 0.00078416},
{0.998801, 0.000599438, 0.000599438}
});
for (int i = 0; i < 4; ++i) {
auto clf = bayesnet::XSpode(i);
clf.fit(raw.Xt, raw.yt, raw.features, raw.className, raw.states,
raw.smoothing);
REQUIRE(clf.getNumberOfNodes() == 5);
REQUIRE(clf.getNumberOfEdges() == 9);
REQUIRE(clf.getNotes().size() == 0);
REQUIRE(clf.score(raw.X_test, raw.y_test) == Catch::Approx(scores.at(i)));
// Get the first 4 lines of X_test to do predict_proba
auto X_reduced = raw.X_test.slice(1, 0, 4);
auto proba = clf.predict_proba(X_reduced);
for (int p = 0; p < 3; ++p) {
REQUIRE(proba[0][p].item<double>() == Catch::Approx(probs_expected.at(i).at(p)));
}
}
}
TEST_CASE("mfeat-factors dataset test", "[XSPODE]") {
auto raw = RawDatasets("mfeat-factors", true);
auto scores = std::vector<float>({0.9825, 0.9775, 0.9775, 0.99});
for (int i = 0; i < 4; ++i) {
auto clf = bayesnet::XSpode(i);
clf.fit(raw.Xt, raw.yt, raw.features, raw.className, raw.states, raw.smoothing);
REQUIRE(clf.getNumberOfNodes() == 217);
REQUIRE(clf.getNumberOfEdges() == 433);
REQUIRE(clf.getNotes().size() == 0);
REQUIRE(clf.getNumberOfStates() == 652320);
REQUIRE(clf.score(raw.X_test, raw.y_test) == Catch::Approx(scores.at(i)));
}
}
TEST_CASE("Laplace predict", "[XSPODE]") {
auto raw = RawDatasets("iris", true);
auto scores = std::vector<float>({0.966666639, 1.0f, 0.933333337, 1.0f});
for (int i = 0; i < 4; ++i) {
auto clf = bayesnet::XSpode(0);
clf.setHyperparameters({ {"parent", i} });
clf.fit(raw.Xt, raw.yt, raw.features, raw.className, raw.states, bayesnet::Smoothing_t::LAPLACE);
REQUIRE(clf.getNumberOfNodes() == 5);
REQUIRE(clf.getNumberOfEdges() == 9);
REQUIRE(clf.getNotes().size() == 0);
REQUIRE(clf.getNumberOfStates() == 64);
REQUIRE(clf.getNFeatures() == 4);
REQUIRE(clf.score(raw.X_test, raw.y_test) == Catch::Approx(scores.at(i)));
}
}
TEST_CASE("Not fitted model predict", "[XSPODE]")
{
auto raw = RawDatasets("iris", true);
auto clf = bayesnet::XSpode(0);
REQUIRE_THROWS_AS(clf.predict(std::vector<int>({1,2,3})), std::logic_error);
}
TEST_CASE("Test instance predict", "[XSPODE]")
{
auto raw = RawDatasets("iris", true);
auto clf = bayesnet::XSpode(0);
clf.fit(raw.Xt, raw.yt, raw.features, raw.className, raw.states, bayesnet::Smoothing_t::ORIGINAL);
REQUIRE(clf.predict(std::vector<int>({1,2,3,4})) == 1);
REQUIRE(clf.score(raw.Xv, raw.yv) == Catch::Approx(0.973333359f));
// Cestnik is not defined in the classifier so it should imply alpha_ = 0
clf.fit(raw.Xt, raw.yt, raw.features, raw.className, raw.states, bayesnet::Smoothing_t::CESTNIK);
REQUIRE(clf.predict(std::vector<int>({1,2,3,4})) == 0);
REQUIRE(clf.score(raw.Xv, raw.yv) == Catch::Approx(0.973333359f));
}
TEST_CASE("Test to_string and fitx", "[XSPODE]")
{
auto raw = RawDatasets("iris", true);
auto clf = bayesnet::XSpode(0);
auto weights = torch::full({raw.Xt.size(1)}, 1.0 / raw.Xt.size(1), torch::kFloat64);
clf.fitx(raw.Xt, raw.yt, weights, bayesnet::Smoothing_t::ORIGINAL);
REQUIRE(clf.getNumberOfNodes() == 5);
REQUIRE(clf.getNumberOfEdges() == 9);
REQUIRE(clf.getNotes().size() == 0);
REQUIRE(clf.getNumberOfStates() == 64);
REQUIRE(clf.getNFeatures() == 4);
REQUIRE(clf.score(raw.X_test, raw.y_test) == Catch::Approx(0.966666639f));
REQUIRE(clf.to_string().size() == 1966);
REQUIRE(clf.graph("Not yet implemented") == std::vector<std::string>({"Not yet implemented"}));
}