Add namespace to Smoothing.h

This commit is contained in:
2025-03-09 11:21:31 +01:00
parent b987dcbcc4
commit a70ac3e883
2 changed files with 19 additions and 17 deletions

View File

@@ -37,9 +37,9 @@ namespace bayesnet {
// //
// Logging setup // Logging setup
// //
loguru::set_thread_name("BoostAODE"); // loguru::set_thread_name("BoostAODE");
loguru::g_stderr_verbosity = loguru::Verbosity_OFF; // loguru::g_stderr_verbosity = loguru::Verbosity_OFF;
loguru::add_file("boostAODE.log", loguru::Truncate, loguru::Verbosity_MAX); // loguru::add_file("boostAODE.log", loguru::Truncate, loguru::Verbosity_MAX);
// Algorithm based on the adaboost algorithm for classification // Algorithm based on the adaboost algorithm for classification
// as explained in Ensemble methods (Zhi-Hua Zhou, 2012) // as explained in Ensemble methods (Zhi-Hua Zhou, 2012)
@@ -57,7 +57,7 @@ namespace bayesnet {
for (int i = 0; i < n_models; ++i) { for (int i = 0; i < n_models; ++i) {
significanceModels.push_back(alpha_t); significanceModels.push_back(alpha_t);
} }
VLOG_SCOPE_F(1, "SelectFeatures. alpha_t: %f n_models: %d", alpha_t, n_models); // VLOG_SCOPE_F(1, "SelectFeatures. alpha_t: %f n_models: %d", alpha_t, n_models);
if (finished) { if (finished) {
return; return;
} }
@@ -87,7 +87,7 @@ namespace bayesnet {
); );
int k = bisection ? pow(2, tolerance) : 1; int k = bisection ? pow(2, tolerance) : 1;
int counter = 0; // The model counter of the current pack int counter = 0; // The model counter of the current pack
VLOG_SCOPE_F(1, "counter=%d k=%d featureSelection.size: %zu", counter, k, featureSelection.size()); // VLOG_SCOPE_F(1, "counter=%d k=%d featureSelection.size: %zu", counter, k, featureSelection.size());
while (counter++ < k && featureSelection.size() > 0) { while (counter++ < k && featureSelection.size() > 0) {
auto feature = featureSelection[0]; auto feature = featureSelection[0];
featureSelection.erase(featureSelection.begin()); featureSelection.erase(featureSelection.begin());
@@ -124,7 +124,7 @@ namespace bayesnet {
models.push_back(std::move(model)); models.push_back(std::move(model));
significanceModels.push_back(alpha_t); significanceModels.push_back(alpha_t);
n_models++; n_models++;
VLOG_SCOPE_F(2, "finished: %d numItemsPack: %d n_models: %d featuresUsed: %zu", finished, numItemsPack, n_models, featuresUsed.size()); // VLOG_SCOPE_F(2, "finished: %d numItemsPack: %d n_models: %d featuresUsed: %zu", finished, numItemsPack, n_models, featuresUsed.size());
} }
if (block_update) { if (block_update) {
std::tie(weights_, alpha_t, finished) = update_weights_block(k, y_train, weights_); std::tie(weights_, alpha_t, finished) = update_weights_block(k, y_train, weights_);
@@ -138,10 +138,10 @@ namespace bayesnet {
improvement = accuracy - priorAccuracy; improvement = accuracy - priorAccuracy;
} }
if (improvement < convergence_threshold) { if (improvement < convergence_threshold) {
VLOG_SCOPE_F(3, " (improvement<threshold) tolerance: %d numItemsPack: %d improvement: %f prior: %f current: %f", tolerance, numItemsPack, improvement, priorAccuracy, accuracy); // VLOG_SCOPE_F(3, " (improvement<threshold) tolerance: %d numItemsPack: %d improvement: %f prior: %f current: %f", tolerance, numItemsPack, improvement, priorAccuracy, accuracy);
tolerance++; tolerance++;
} else { } else {
VLOG_SCOPE_F(3, "* (improvement>=threshold) Reset. tolerance: %d numItemsPack: %d improvement: %f prior: %f current: %f", tolerance, numItemsPack, improvement, priorAccuracy, accuracy); // VLOG_SCOPE_F(3, "* (improvement>=threshold) Reset. tolerance: %d numItemsPack: %d improvement: %f prior: %f current: %f", tolerance, numItemsPack, improvement, priorAccuracy, accuracy);
tolerance = 0; // Reset the counter if the model performs better tolerance = 0; // Reset the counter if the model performs better
numItemsPack = 0; numItemsPack = 0;
} }
@@ -153,13 +153,13 @@ namespace bayesnet {
priorAccuracy = accuracy; priorAccuracy = accuracy;
} }
} }
VLOG_SCOPE_F(1, "tolerance: %d featuresUsed.size: %zu features.size: %zu", tolerance, featuresUsed.size(), features.size()); // VLOG_SCOPE_F(1, "tolerance: %d featuresUsed.size: %zu features.size: %zu", tolerance, featuresUsed.size(), features.size());
finished = finished || tolerance > maxTolerance || featuresUsed.size() == features.size(); finished = finished || tolerance > maxTolerance || featuresUsed.size() == features.size();
} }
if (tolerance > maxTolerance) { if (tolerance > maxTolerance) {
if (numItemsPack < n_models) { if (numItemsPack < n_models) {
notes.push_back("Convergence threshold reached & " + std::to_string(numItemsPack) + " models eliminated"); notes.push_back("Convergence threshold reached & " + std::to_string(numItemsPack) + " models eliminated");
VLOG_SCOPE_F(4, "Convergence threshold reached & %d models eliminated of %d", numItemsPack, n_models); // VLOG_SCOPE_F(4, "Convergence threshold reached & %d models eliminated of %d", numItemsPack, n_models);
for (int i = 0; i < numItemsPack; ++i) { for (int i = 0; i < numItemsPack; ++i) {
significanceModels.pop_back(); significanceModels.pop_back();
models.pop_back(); models.pop_back();
@@ -167,7 +167,7 @@ namespace bayesnet {
} }
} else { } else {
notes.push_back("Convergence threshold reached & 0 models eliminated"); notes.push_back("Convergence threshold reached & 0 models eliminated");
VLOG_SCOPE_F(4, "Convergence threshold reached & 0 models eliminated n_models=%d numItemsPack=%d", n_models, numItemsPack); // VLG_SCOPE_F(4, "Convergence threshold reached & 0 models eliminated n_models=%d numItemsPack=%d", n_models, numItemsPack);
} }
} }
if (featuresUsed.size() != features.size()) { if (featuresUsed.size() != features.size()) {

View File

@@ -6,10 +6,12 @@
#ifndef SMOOTHING_H #ifndef SMOOTHING_H
#define SMOOTHING_H #define SMOOTHING_H
enum class Smoothing_t { namespace bayesnet {
enum class Smoothing_t {
NONE = -1, NONE = -1,
ORIGINAL = 0, ORIGINAL = 0,
LAPLACE, LAPLACE,
CESTNIK CESTNIK
}; };
}
#endif // SMOOTHING_H #endif // SMOOTHING_H