Begin implementing predict_single hyperparameter in BoostAODE

This commit is contained in:
Ricardo Montañana Gómez 2024-02-26 20:29:08 +01:00
parent 2e325cd114
commit d39a17089e
Signed by: rmontanana
GPG Key ID: 46064262FD9A7ADE
4 changed files with 56 additions and 8 deletions

View File

@ -9,7 +9,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- Change _ascending_ hyperparameter to _order_ with these possible values _{"asc", "desc", "rand"}_
- Change _ascending_ hyperparameter to _order_ with these possible values _{"asc", "desc", "rand"}_, Default is _"desc"_.
- Add the _predict_single_ hyperparameter to control if only the last model created is used to predict in boost training or the whole ensemble (all the models built so far). Default is true.
## [1.0.3]

View File

@ -10,7 +10,10 @@
namespace bayesnet {
BoostAODE::BoostAODE(bool predict_voting) : Ensemble(predict_voting)
{
validHyperparameters = { "repeatSparent", "maxModels", "order", "convergence", "threshold", "select_features", "tolerance", "predict_voting" };
validHyperparameters = {
"repeatSparent", "maxModels", "order", "convergence", "threshold",
"select_features", "tolerance", "predict_voting", "predict_single"
};
}
void BoostAODE::buildModel(const torch::Tensor& weights)
@ -69,6 +72,10 @@ namespace bayesnet {
convergence = hyperparameters["convergence"];
hyperparameters.erase("convergence");
}
if (hyperparameters.contains("predict_single")) {
predict_single = hyperparameters["predict_single"];
hyperparameters.erase("predict_single");
}
if (hyperparameters.contains("threshold")) {
threshold = hyperparameters["threshold"];
hyperparameters.erase("threshold");
@ -116,7 +123,6 @@ namespace bayesnet {
featureSelector->fit();
auto cfsFeatures = featureSelector->getFeatures();
for (const int& feature : cfsFeatures) {
// std::cout << "Feature: [" << feature << "] " << feature << " " << features.at(feature) << std::endl;
featuresUsed.insert(feature);
std::unique_ptr<Classifier> model = std::make_unique<SPODE>(feature);
model->fit(dataset, features, className, states, weights_);
@ -128,8 +134,22 @@ namespace bayesnet {
delete featureSelector;
return featuresUsed;
}
torch::Tensor BoostAODE::ensemble_predict(torch::Tensor& X, SPODE* model)
{
if (initialize_prob_table) {
initialize_prob_table = false;
prob_table = model->predict_proba(X) * 1.0;
} else {
prob_table += model->predict_proba(X) * 1.0;
}
// prob_table doesn't store probabilities but the sum of them
// to have them we need to divide by the sum of the significances but we
// don't need them to predict label values
return prob_table.argmax(1);
}
void BoostAODE::trainModel(const torch::Tensor& weights)
{
initialize_prob_table = true;
fitted = true;
// Algorithm based on the adaboost algorithm for classification
// as explained in Ensemble methods (Zhi-Hua Zhou, 2012)
@ -181,7 +201,12 @@ namespace bayesnet {
std::unique_ptr<Classifier> model;
model = std::make_unique<SPODE>(feature);
model->fit(dataset, features, className, states, weights_);
auto ypred = model->predict(X_train);
torch::Tensor ypred;
if (predict_single) {
ypred = model->predict(X_train);
} else {
ypred = ensemble_predict(X_train, dynamic_cast<SPODE*>(model.get()));
}
// Step 3.1: Compute the classifier amout of say
auto mask_wrong = ypred != y_train;
auto mask_right = ypred == y_train;

View File

@ -15,17 +15,21 @@ namespace bayesnet {
void buildModel(const torch::Tensor& weights) override;
void trainModel(const torch::Tensor& weights) override;
private:
std::unordered_set<int> initializeModels();
torch::Tensor ensemble_predict(torch::Tensor& X, SPODE* model);
torch::Tensor dataset_;
torch::Tensor X_train, y_train, X_test, y_test;
std::unordered_set<int> initializeModels();
// Hyperparameters
bool repeatSparent = false; // if true, a feature can be selected more than once
int maxModels = 0;
int tolerance = 0;
bool predict_single = true; // wether the last model is used to predict in training or the whole ensemble
std::string order_algorithm; // order to process the KBest features asc, desc, rand
bool convergence = false; //if true, stop when the model does not improve
bool selectFeatures = false; // if true, use feature selection
std::string select_features_algorithm = ""; // Selected feature selection algorithm
std::string select_features_algorithm = "desc"; // Selected feature selection algorithm
bool initialize_prob_table; // if true, initialize the prob_table with the first model (used in train)
torch::Tensor prob_table; // Table of probabilities for ensemble predicting if predict_single is false
FeatureSelect* featureSelector = nullptr;
double threshold = -1;
};

View File

@ -240,10 +240,28 @@ TEST_CASE("BoostAODE order asc, desc & random", "[BayesNet]")
clf.fit(raw.Xv, raw.yv, raw.featuresv, raw.classNamev, raw.statesv);
auto score = clf.score(raw.Xv, raw.yv);
auto scoret = clf.score(raw.Xt, raw.yt);
auto score2 = clf.score(raw.Xv, raw.yv);
auto scoret2 = clf.score(raw.Xt, raw.yt);
INFO("order: " + order);
REQUIRE(score == Catch::Approx(scores[order]).epsilon(raw.epsilon));
REQUIRE(scoret == Catch::Approx(scores[order]).epsilon(raw.epsilon));
}
}
TEST_CASE("BoostAODE predict_single", "[BayesNet]")
{
auto raw = RawDatasets("glass", true);
std::map<bool, double> scores{
{true, 0.84579f }, { false, 0.81308f }
};
for (const bool kind : { true, false}) {
auto clf = bayesnet::BoostAODE();
clf.setHyperparameters({
{"predict_single", kind}, {"order", "desc" },
});
clf.fit(raw.Xv, raw.yv, raw.featuresv, raw.classNamev, raw.statesv);
auto score = clf.score(raw.Xv, raw.yv);
auto scoret = clf.score(raw.Xt, raw.yt);
INFO("kind: " + std::string(kind ? "true" : "false"));
REQUIRE(score == Catch::Approx(scores[kind]).epsilon(raw.epsilon));
REQUIRE(scoret == Catch::Approx(scores[kind]).epsilon(raw.epsilon));
}
}