update to BayesNet 1.0.3

This commit is contained in:
2024-02-25 18:02:36 +01:00
parent 5a6d38e900
commit bd6f6f5837
21 changed files with 37 additions and 48 deletions

View File

@@ -1,7 +1,7 @@
cmake_minimum_required(VERSION 3.20)
project(PyClassifiers
VERSION 1.0.0
VERSION 1.0.1
DESCRIPTION "Python Classifiers Wrapper."
HOMEPAGE_URL "https://github.com/rmontanana/pyclassifiers"
LANGUAGES CXX
@@ -68,9 +68,9 @@ add_git_submodule("lib/BayesNet")
# Subdirectories
# --------------
file(GLOB PyClassifiers_SOURCES CONFIGURE_DEPENDS ${PyClassifiers_SOURCE_DIR}/src/PyClassifiers/*.cc ${PyClassifiers_SOURCE_DIR}/src/PyClassifiers/*.hpp)
file(GLOB PyClassifiers_SOURCES CONFIGURE_DEPENDS ${PyClassifiers_SOURCE_DIR}/src/*.cc ${PyClassifiers_SOURCE_DIR}/src/*.hpp)
add_subdirectory(config)
add_subdirectory(src/PyClassifiers)
add_subdirectory(src)
# Testing
# -------

View File

@@ -1,7 +1,7 @@
include_directories(
${PyClassifiers_SOURCE_DIR}/lib/Files
${PyClassifiers_SOURCE_DIR}/lib/BayesNet/lib/json/include
${PyClassifiers_SOURCE_DIR}/lib/BayesNet/src/BayesNet
${PyClassifiers_SOURCE_DIR}/lib/BayesNet/src
${CMAKE_BINARY_DIR}/configured_files/include
${Python3_INCLUDE_DIRS}
${TORCH_INCLUDE_DIRS}

View File

@@ -24,9 +24,12 @@ namespace pywrap {
PyClassifier& fit(torch::Tensor& dataset, const std::vector<std::string>& features, const std::string& className, std::map<std::string, std::vector<int>>& states) override { return *this; };
PyClassifier& fit(torch::Tensor& dataset, const std::vector<std::string>& features, const std::string& className, std::map<std::string, std::vector<int>>& states, const torch::Tensor& weights) override { return *this; };
torch::Tensor predict(torch::Tensor& X) override;
std::vector<int> predict(std::vector<std::vector<int >>& X) override { return std::vector<int>(); };
float score(std::vector<std::vector<int>>& X, std::vector<int>& y) override { return 0.0; };
std::vector<int> predict(std::vector<std::vector<int >>& X) override { return std::vector<int>(); }; // Not implemented
torch::Tensor predict_proba(torch::Tensor& X) override { return torch::zeros({ 0, 0 }); } // Not implemented
std::vector<std::vector<double>> predict_proba(std::vector<std::vector<int >>& X) override { return std::vector<std::vector<double>>(); }; // Not implemented
float score(std::vector<std::vector<int>>& X, std::vector<int>& y) override { return 0.0; }; // Not implemented
float score(torch::Tensor& X, torch::Tensor& y) override;
int getClassNumStates() const override { return 0; };
std::string version();
std::string callMethodString(const std::string& method);
int callMethodSumOfItems(const std::string& method) const;

View File

@@ -1,11 +1,11 @@
if(ENABLE_TESTING)
set(TEST_PYCLASSIFIERS "unit_tests_pyclassifiers")
include_directories(
${PyClassifiers_SOURCE_DIR}/src/PyClassifiers
${PyClassifiers_SOURCE_DIR}/src/
${PyClassifiers_SOURCE_DIR}/lib/BayesNet/lib/json/include
${PyClassifiers_SOURCE_DIR}/lib/BayesNet/lib/Files
${PyClassifiers_SOURCE_DIR}/lib/BayesNet/lib/mdlp
${PyClassifiers_SOURCE_DIR}/lib/BayesNet/src/BayesNet
${PyClassifiers_SOURCE_DIR}/lib/BayesNet/src
${Python3_INCLUDE_DIRS}
${TORCH_INCLUDE_DIRS}
${CMAKE_BINARY_DIR}/configured_files/include

View File

@@ -26,36 +26,22 @@ TEST_CASE("Test Python Classifiers score", "[PyClassifiers]")
{{"iris", "STree"}, 0.99333}, {{"iris", "ODTE"}, 0.98667}, {{"iris", "SVC"}, 0.97333}, {{"iris", "RandomForest"}, 1.0},
};
std::string file_name = GENERATE("glass", "iris", "ecoli", "diabetes");
auto raw = RawDatasets(file_name, false);
SECTION("Test STree classifier (" + file_name + ")")
std::string name = GENERATE("ODTE", "STree", "SVC", "RandomForest");
map<std::string, pywrap::PyClassifier*> models = {
{"ODTE", new pywrap::ODTE()},
{"STree", new pywrap::STree()},
{"SVC", new pywrap::SVC()},
{"RandomForest", new pywrap::RandomForest()}
};
SECTION("Test Python Classifier " + name + " score ")
{
auto clf = pywrap::STree();
clf.fit(raw.Xt, raw.yt, raw.featurest, raw.classNamet, raw.statest);
auto score = clf.score(raw.Xt, raw.yt);
REQUIRE(score == Catch::Approx(scores[{file_name, "STree"}]).epsilon(raw.epsilon));
}
SECTION("Test ODTE classifier (" + file_name + ")")
{
auto clf = pywrap::ODTE();
clf.fit(raw.Xt, raw.yt, raw.featurest, raw.classNamet, raw.statest);
auto score = clf.score(raw.Xt, raw.yt);
REQUIRE(score == Catch::Approx(scores[{file_name, "ODTE"}]).epsilon(raw.epsilon));
}
SECTION("Test SVC classifier (" + file_name + ")")
{
auto clf = pywrap::SVC();
clf.fit(raw.Xt, raw.yt, raw.featurest, raw.classNamet, raw.statest);
auto score = clf.score(raw.Xt, raw.yt);
REQUIRE(score == Catch::Approx(scores[{file_name, "SVC"}]).epsilon(raw.epsilon));
}
SECTION("Test RandomForest classifier (" + file_name + ")")
{
auto clf = pywrap::RandomForest();
clf.fit(raw.Xt, raw.yt, raw.featurest, raw.classNamet, raw.statest);
auto score = clf.score(raw.Xt, raw.yt);
REQUIRE(score == Catch::Approx(scores[{file_name, "RandomForest"}]).epsilon(raw.epsilon));
for (auto file_name : { "glass", "iris", "ecoli", "diabetes" }) {
auto raw = RawDatasets(file_name, false);
auto clf = models[name];
clf->fit(raw.Xt, raw.yt, raw.featurest, raw.classNamet, raw.statest);
auto score = clf->score(raw.Xt, raw.yt);
REQUIRE(score == Catch::Approx(scores[{file_name, name}]).epsilon(raw.epsilon));
}
}
}
TEST_CASE("Classifiers features", "[PyClassifiers]")
@@ -74,13 +60,13 @@ TEST_CASE("Get num features & num edges", "[PyClassifiers]")
REQUIRE(clf.getNumberOfNodes() == 10);
REQUIRE(clf.getNumberOfEdges() == 10);
}
TEST_CASE("XGBoost", "[PyClassifiers]")
{
auto raw = RawDatasets("iris", true);
auto clf = pywrap::XGBoost();
clf.fit(raw.Xt, raw.yt, raw.featurest, raw.classNamet, raw.statest);
nlohmann::json hyperparameters = { "n_jobs=1" };
clf.setHyperparameters(hyperparameters);
auto score = clf.score(raw.Xt, raw.yt);
REQUIRE(score == Catch::Approx(0.98).epsilon(raw.epsilon));
}
// TEST_CASE("XGBoost", "[PyClassifiers]")
// {
// auto raw = RawDatasets("iris", true);
// auto clf = pywrap::XGBoost();
// clf.fit(raw.Xt, raw.yt, raw.featurest, raw.classNamet, raw.statest);
// nlohmann::json hyperparameters = { "n_jobs=1" };
// clf.setHyperparameters(hyperparameters);
// auto score = clf.score(raw.Xt, raw.yt);
// REQUIRE(score == Catch::Approx(0.98).epsilon(raw.epsilon));
// }