Compare commits
2 Commits
43ceefd2c9
...
3d6b4f0614
Author | SHA1 | Date | |
---|---|---|---|
3d6b4f0614
|
|||
18844c7da7
|
@@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Add a new hyperparameter to the BoostAODE class, *alphablock*, to control the way α is computed, with the last model or with the ensmble built so far. Default value is *false*.
|
||||||
|
|
||||||
## [1.0.6] 2024-11-23
|
## [1.0.6] 2024-11-23
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
@@ -12,7 +12,7 @@
|
|||||||
namespace bayesnet {
|
namespace bayesnet {
|
||||||
Boost::Boost(bool predict_voting) : Ensemble(predict_voting)
|
Boost::Boost(bool predict_voting) : Ensemble(predict_voting)
|
||||||
{
|
{
|
||||||
validHyperparameters = { "order", "convergence", "convergence_best", "bisection", "threshold", "maxTolerance",
|
validHyperparameters = { "alpha_block", "order", "convergence", "convergence_best", "bisection", "threshold", "maxTolerance",
|
||||||
"predict_voting", "select_features", "block_update" };
|
"predict_voting", "select_features", "block_update" };
|
||||||
}
|
}
|
||||||
void Boost::setHyperparameters(const nlohmann::json& hyperparameters_)
|
void Boost::setHyperparameters(const nlohmann::json& hyperparameters_)
|
||||||
@@ -26,6 +26,10 @@ namespace bayesnet {
|
|||||||
}
|
}
|
||||||
hyperparameters.erase("order");
|
hyperparameters.erase("order");
|
||||||
}
|
}
|
||||||
|
if (hyperparameters.contains("alpha_block")) {
|
||||||
|
alpha_block = hyperparameters["alpha_block"];
|
||||||
|
hyperparameters.erase("alpha_block");
|
||||||
|
}
|
||||||
if (hyperparameters.contains("convergence")) {
|
if (hyperparameters.contains("convergence")) {
|
||||||
convergence = hyperparameters["convergence"];
|
convergence = hyperparameters["convergence"];
|
||||||
hyperparameters.erase("convergence");
|
hyperparameters.erase("convergence");
|
||||||
@@ -66,6 +70,12 @@ namespace bayesnet {
|
|||||||
block_update = hyperparameters["block_update"];
|
block_update = hyperparameters["block_update"];
|
||||||
hyperparameters.erase("block_update");
|
hyperparameters.erase("block_update");
|
||||||
}
|
}
|
||||||
|
if (block_update && alpha_block) {
|
||||||
|
throw std::invalid_argument("alpha_block and block_update cannot be true at the same time");
|
||||||
|
}
|
||||||
|
if (block_update && !bisection) {
|
||||||
|
throw std::invalid_argument("block_update needs bisection to be true");
|
||||||
|
}
|
||||||
Classifier::setHyperparameters(hyperparameters);
|
Classifier::setHyperparameters(hyperparameters);
|
||||||
}
|
}
|
||||||
void Boost::buildModel(const torch::Tensor& weights)
|
void Boost::buildModel(const torch::Tensor& weights)
|
||||||
|
@@ -45,8 +45,8 @@ namespace bayesnet {
|
|||||||
std::string select_features_algorithm = Orders.DESC; // Selected feature selection algorithm
|
std::string select_features_algorithm = Orders.DESC; // Selected feature selection algorithm
|
||||||
FeatureSelect* featureSelector = nullptr;
|
FeatureSelect* featureSelector = nullptr;
|
||||||
double threshold = -1;
|
double threshold = -1;
|
||||||
bool block_update = false;
|
bool block_update = false; // if true, use block update algorithm, only meaningful if bisection is true
|
||||||
|
bool alpha_block = false; // if true, the alpha is computed with the ensemble built so far and the new model
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
@@ -92,7 +92,25 @@ namespace bayesnet {
|
|||||||
model->fit(dataset, features, className, states, weights_, smoothing);
|
model->fit(dataset, features, className, states, weights_, smoothing);
|
||||||
alpha_t = 0.0;
|
alpha_t = 0.0;
|
||||||
if (!block_update) {
|
if (!block_update) {
|
||||||
auto ypred = model->predict(X_train);
|
torch::Tensor ypred;
|
||||||
|
if (alpha_block) {
|
||||||
|
//
|
||||||
|
// Compute the prediction with the current ensemble + model
|
||||||
|
//
|
||||||
|
// Add the model to the ensemble
|
||||||
|
n_models++;
|
||||||
|
models.push_back(std::move(model));
|
||||||
|
significanceModels.push_back(1);
|
||||||
|
// Compute the prediction
|
||||||
|
ypred = predict(X_train);
|
||||||
|
// Remove the model from the ensemble
|
||||||
|
model = std::move(models.back());
|
||||||
|
models.pop_back();
|
||||||
|
significanceModels.pop_back();
|
||||||
|
n_models--;
|
||||||
|
} else {
|
||||||
|
ypred = model->predict(X_train);
|
||||||
|
}
|
||||||
// Step 3.1: Compute the classifier amout of say
|
// Step 3.1: Compute the classifier amout of say
|
||||||
std::tie(weights_, alpha_t, finished) = update_weights(y_train, ypred, weights_);
|
std::tie(weights_, alpha_t, finished) = update_weights(y_train, ypred, weights_);
|
||||||
}
|
}
|
||||||
|
@@ -130,6 +130,8 @@ TEST_CASE("Oddities", "[BoostAODE]")
|
|||||||
{ { "select_features","IWSS" }, { "threshold", 0.51 } },
|
{ { "select_features","IWSS" }, { "threshold", 0.51 } },
|
||||||
{ { "select_features","FCBF" }, { "threshold", 1e-8 } },
|
{ { "select_features","FCBF" }, { "threshold", 1e-8 } },
|
||||||
{ { "select_features","FCBF" }, { "threshold", 1.01 } },
|
{ { "select_features","FCBF" }, { "threshold", 1.01 } },
|
||||||
|
{ { "alpha_block", true }, { "block_update", true } },
|
||||||
|
{ { "bisection", false }, { "block_update", true } },
|
||||||
};
|
};
|
||||||
for (const auto& hyper : bad_hyper_fit.items()) {
|
for (const auto& hyper : bad_hyper_fit.items()) {
|
||||||
INFO("BoostAODE hyper: " << hyper.value().dump());
|
INFO("BoostAODE hyper: " << hyper.value().dump());
|
||||||
@@ -137,7 +139,6 @@ TEST_CASE("Oddities", "[BoostAODE]")
|
|||||||
REQUIRE_THROWS_AS(clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing), std::invalid_argument);
|
REQUIRE_THROWS_AS(clf.fit(raw.Xv, raw.yv, raw.features, raw.className, raw.states, raw.smoothing), std::invalid_argument);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_CASE("Bisection Best", "[BoostAODE]")
|
TEST_CASE("Bisection Best", "[BoostAODE]")
|
||||||
{
|
{
|
||||||
auto clf = bayesnet::BoostAODE();
|
auto clf = bayesnet::BoostAODE();
|
||||||
@@ -180,7 +181,6 @@ TEST_CASE("Bisection Best vs Last", "[BoostAODE]")
|
|||||||
auto score_last = clf.score(raw.X_test, raw.y_test);
|
auto score_last = clf.score(raw.X_test, raw.y_test);
|
||||||
REQUIRE(score_last == Catch::Approx(0.976666689f).epsilon(raw.epsilon));
|
REQUIRE(score_last == Catch::Approx(0.976666689f).epsilon(raw.epsilon));
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_CASE("Block Update", "[BoostAODE]")
|
TEST_CASE("Block Update", "[BoostAODE]")
|
||||||
{
|
{
|
||||||
auto clf = bayesnet::BoostAODE();
|
auto clf = bayesnet::BoostAODE();
|
||||||
@@ -211,3 +211,18 @@ TEST_CASE("Block Update", "[BoostAODE]")
|
|||||||
// }
|
// }
|
||||||
// std::cout << "Score " << score << std::endl;
|
// std::cout << "Score " << score << std::endl;
|
||||||
}
|
}
|
||||||
|
TEST_CASE("Alphablock", "[BoostAODE]")
|
||||||
|
{
|
||||||
|
auto clf_alpha = bayesnet::BoostAODE();
|
||||||
|
auto clf_no_alpha = bayesnet::BoostAODE();
|
||||||
|
auto raw = RawDatasets("diabetes", true);
|
||||||
|
clf_alpha.setHyperparameters({
|
||||||
|
{"alpha_block", true},
|
||||||
|
});
|
||||||
|
clf_alpha.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing);
|
||||||
|
clf_no_alpha.fit(raw.X_train, raw.y_train, raw.features, raw.className, raw.states, raw.smoothing);
|
||||||
|
auto score_alpha = clf_alpha.score(raw.X_test, raw.y_test);
|
||||||
|
auto score_no_alpha = clf_no_alpha.score(raw.X_test, raw.y_test);
|
||||||
|
REQUIRE(score_alpha == Catch::Approx(0.720779f).epsilon(raw.epsilon));
|
||||||
|
REQUIRE(score_no_alpha == Catch::Approx(0.733766f).epsilon(raw.epsilon));
|
||||||
|
}
|
Reference in New Issue
Block a user