From a6bb22dfb5e4f6fffc43bae9cd3d5959382835ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ricardo=20Montan=CC=83ana?= Date: Fri, 18 Aug 2023 11:50:34 +0200 Subject: [PATCH] Complete first BoostAODE --- sample/sample.cc | 199 ++++++++++++++++++---------------- src/BayesNet/AODE.cc | 2 + src/BayesNet/BayesMetrics.cc | 2 +- src/BayesNet/BoostAODE.cc | 87 +++++++++++---- src/BayesNet/Ensemble.cc | 4 +- src/BayesNet/Ensemble.h | 1 + src/BayesNet/KDB.cc | 2 +- src/BayesNet/bayesnetUtils.cc | 2 +- src/BayesNet/bayesnetUtils.h | 2 +- 9 files changed, 184 insertions(+), 117 deletions(-) diff --git a/sample/sample.cc b/sample/sample.cc index ecf76be..1045c2f 100644 --- a/sample/sample.cc +++ b/sample/sample.cc @@ -141,96 +141,111 @@ int main(int argc, char** argv) /* * Begin Processing */ - auto handler = ArffFiles(); - handler.load(complete_file_name, class_last); - // Get Dataset X, y - vector& X = handler.getX(); - mdlp::labels_t& y = handler.getY(); - // Get className & Features - auto className = handler.getClassName(); - vector features; - auto attributes = handler.getAttributes(); - transform(attributes.begin(), attributes.end(), back_inserter(features), - [](const pair& item) { return item.first; }); - // Discretize Dataset - auto [Xd, maxes] = discretize(X, y, features); - maxes[className] = *max_element(y.begin(), y.end()) + 1; - map> states; - for (auto feature : features) { - states[feature] = vector(maxes[feature]); - } - states[className] = vector(maxes[className]); - auto clf = platform::Models::instance()->create(model_name); - clf->fit(Xd, y, features, className, states); - if (dump_cpt) { - cout << "--- CPT Tables ---" << endl; - clf->dump_cpt(); - } - auto lines = clf->show(); - for (auto line : lines) { - cout << line << endl; - } - cout << "--- Topological Order ---" << endl; - auto order = clf->topological_order(); - for (auto name : order) { - cout << name << ", "; - } - cout << "end." << endl; - auto score = clf->score(Xd, y); - cout << "Score: " << score << endl; - auto graph = clf->graph(); - auto dot_file = model_name + "_" + file_name; - ofstream file(dot_file + ".dot"); - file << graph; - file.close(); - cout << "Graph saved in " << model_name << "_" << file_name << ".dot" << endl; - cout << "dot -Tpng -o " + dot_file + ".png " + dot_file + ".dot " << endl; - string stratified_string = stratified ? " Stratified" : ""; - cout << nFolds << " Folds" << stratified_string << " Cross validation" << endl; - cout << "==========================================" << endl; - torch::Tensor Xt = torch::zeros({ static_cast(Xd.size()), static_cast(Xd[0].size()) }, torch::kInt32); - torch::Tensor yt = torch::tensor(y, torch::kInt32); - for (int i = 0; i < features.size(); ++i) { - Xt.index_put_({ i, "..." }, torch::tensor(Xd[i], torch::kInt32)); - } - float total_score = 0, total_score_train = 0, score_train, score_test; - Fold* fold; - if (stratified) - fold = new StratifiedKFold(nFolds, y, seed); - else - fold = new KFold(nFolds, y.size(), seed); - for (auto i = 0; i < nFolds; ++i) { - auto [train, test] = fold->getFold(i); - cout << "Fold: " << i + 1 << endl; - if (tensors) { - auto ttrain = torch::tensor(train, torch::kInt64); - auto ttest = torch::tensor(test, torch::kInt64); - torch::Tensor Xtraint = torch::index_select(Xt, 1, ttrain); - torch::Tensor ytraint = yt.index({ ttrain }); - torch::Tensor Xtestt = torch::index_select(Xt, 1, ttest); - torch::Tensor ytestt = yt.index({ ttest }); - clf->fit(Xtraint, ytraint, features, className, states); - auto temp = clf->predict(Xtraint); - score_train = clf->score(Xtraint, ytraint); - score_test = clf->score(Xtestt, ytestt); - } else { - auto [Xtrain, ytrain] = extract_indices(train, Xd, y); - auto [Xtest, ytest] = extract_indices(test, Xd, y); - clf->fit(Xtrain, ytrain, features, className, states); - score_train = clf->score(Xtrain, ytrain); - score_test = clf->score(Xtest, ytest); - } - if (dump_cpt) { - cout << "--- CPT Tables ---" << endl; - clf->dump_cpt(); - } - total_score_train += score_train; - total_score += score_test; - cout << "Score Train: " << score_train << endl; - cout << "Score Test : " << score_test << endl; - cout << "-------------------------------------------------------------------------------" << endl; - } - cout << "**********************************************************************************" << endl; - cout << "Average Score Train: " << total_score_train / nFolds << endl; - cout << "Average Score Test : " << total_score / nFolds << endl;return 0; + auto ypred = torch::tensor({ 1,2,3,2,2,3,4,5,2,1 }); + auto y = torch::tensor({ 0,0,0,0,2,3,4,0,0,0 }); + auto weights = torch::ones({ 10 }, kDouble); + auto mask = ypred == y; + cout << "ypred:" << ypred << endl; + cout << "y:" << y << endl; + cout << "weights:" << weights << endl; + cout << "mask:" << mask << endl; + double value_to_add = 0.5; + weights += mask.to(torch::kDouble) * value_to_add; + cout << "New weights:" << weights << endl; + auto masked_weights = weights * mask.to(weights.dtype()); + double sum_of_weights = masked_weights.sum().item(); + cout << "Sum of weights: " << sum_of_weights << endl; + //weights.index_put_({ mask }, weights + 10); + // auto handler = ArffFiles(); + // handler.load(complete_file_name, class_last); + // // Get Dataset X, y + // vector& X = handler.getX(); + // mdlp::labels_t& y = handler.getY(); + // // Get className & Features + // auto className = handler.getClassName(); + // vector features; + // auto attributes = handler.getAttributes(); + // transform(attributes.begin(), attributes.end(), back_inserter(features), + // [](const pair& item) { return item.first; }); + // // Discretize Dataset + // auto [Xd, maxes] = discretize(X, y, features); + // maxes[className] = *max_element(y.begin(), y.end()) + 1; + // map> states; + // for (auto feature : features) { + // states[feature] = vector(maxes[feature]); + // } + // states[className] = vector(maxes[className]); + // auto clf = platform::Models::instance()->create(model_name); + // clf->fit(Xd, y, features, className, states); + // if (dump_cpt) { + // cout << "--- CPT Tables ---" << endl; + // clf->dump_cpt(); + // } + // auto lines = clf->show(); + // for (auto line : lines) { + // cout << line << endl; + // } + // cout << "--- Topological Order ---" << endl; + // auto order = clf->topological_order(); + // for (auto name : order) { + // cout << name << ", "; + // } + // cout << "end." << endl; + // auto score = clf->score(Xd, y); + // cout << "Score: " << score << endl; + // auto graph = clf->graph(); + // auto dot_file = model_name + "_" + file_name; + // ofstream file(dot_file + ".dot"); + // file << graph; + // file.close(); + // cout << "Graph saved in " << model_name << "_" << file_name << ".dot" << endl; + // cout << "dot -Tpng -o " + dot_file + ".png " + dot_file + ".dot " << endl; + // string stratified_string = stratified ? " Stratified" : ""; + // cout << nFolds << " Folds" << stratified_string << " Cross validation" << endl; + // cout << "==========================================" << endl; + // torch::Tensor Xt = torch::zeros({ static_cast(Xd.size()), static_cast(Xd[0].size()) }, torch::kInt32); + // torch::Tensor yt = torch::tensor(y, torch::kInt32); + // for (int i = 0; i < features.size(); ++i) { + // Xt.index_put_({ i, "..." }, torch::tensor(Xd[i], torch::kInt32)); + // } + // float total_score = 0, total_score_train = 0, score_train, score_test; + // Fold* fold; + // if (stratified) + // fold = new StratifiedKFold(nFolds, y, seed); + // else + // fold = new KFold(nFolds, y.size(), seed); + // for (auto i = 0; i < nFolds; ++i) { + // auto [train, test] = fold->getFold(i); + // cout << "Fold: " << i + 1 << endl; + // if (tensors) { + // auto ttrain = torch::tensor(train, torch::kInt64); + // auto ttest = torch::tensor(test, torch::kInt64); + // torch::Tensor Xtraint = torch::index_select(Xt, 1, ttrain); + // torch::Tensor ytraint = yt.index({ ttrain }); + // torch::Tensor Xtestt = torch::index_select(Xt, 1, ttest); + // torch::Tensor ytestt = yt.index({ ttest }); + // clf->fit(Xtraint, ytraint, features, className, states); + // auto temp = clf->predict(Xtraint); + // score_train = clf->score(Xtraint, ytraint); + // score_test = clf->score(Xtestt, ytestt); + // } else { + // auto [Xtrain, ytrain] = extract_indices(train, Xd, y); + // auto [Xtest, ytest] = extract_indices(test, Xd, y); + // clf->fit(Xtrain, ytrain, features, className, states); + // score_train = clf->score(Xtrain, ytrain); + // score_test = clf->score(Xtest, ytest); + // } + // if (dump_cpt) { + // cout << "--- CPT Tables ---" << endl; + // clf->dump_cpt(); + // } + // total_score_train += score_train; + // total_score += score_test; + // cout << "Score Train: " << score_train << endl; + // cout << "Score Test : " << score_test << endl; + // cout << "-------------------------------------------------------------------------------" << endl; + // } + // cout << "**********************************************************************************" << endl; + // cout << "Average Score Train: " << total_score_train / nFolds << endl; + // cout << "Average Score Test : " << total_score / nFolds << endl;return 0; } \ No newline at end of file diff --git a/src/BayesNet/AODE.cc b/src/BayesNet/AODE.cc index d90c495..6db843e 100644 --- a/src/BayesNet/AODE.cc +++ b/src/BayesNet/AODE.cc @@ -8,6 +8,8 @@ namespace bayesnet { for (int i = 0; i < features.size(); ++i) { models.push_back(std::make_unique(i)); } + n_models = models.size(); + significanceModels = vector(n_models, 1.0); } vector AODE::graph(const string& title) const { diff --git a/src/BayesNet/BayesMetrics.cc b/src/BayesNet/BayesMetrics.cc index 88f0306..2c08836 100644 --- a/src/BayesNet/BayesMetrics.cc +++ b/src/BayesNet/BayesMetrics.cc @@ -23,7 +23,7 @@ namespace bayesnet { } vector Metrics::SelectKBestWeighted(const torch::Tensor& weights, unsigned k) { - auto n = samples.size(1); + auto n = samples.size(0) - 1; if (k == 0) { k = n; } diff --git a/src/BayesNet/BoostAODE.cc b/src/BayesNet/BoostAODE.cc index e9b5e62..b6a535d 100644 --- a/src/BayesNet/BoostAODE.cc +++ b/src/BayesNet/BoostAODE.cc @@ -5,30 +5,79 @@ namespace bayesnet { BoostAODE::BoostAODE() : Ensemble() {} void BoostAODE::buildModel(const torch::Tensor& weights) { - models.clear(); - for (int i = 0; i < features.size(); ++i) { - models.push_back(std::make_unique(i)); - } + // models.clear(); + // for (int i = 0; i < features.size(); ++i) { + // models.push_back(std::make_unique(i)); + // } + // n_models = models.size(); } void BoostAODE::trainModel(const torch::Tensor& weights) { - // End building vectors - Tensor weights_ = torch::full({ m }, 1.0 / m, torch::kDouble); + models.clear(); + n_models = 0; + int max_models = .1 * n > 10 ? .1 * n : n; + Tensor weights_ = torch::full({ m }, 1.0 / m, torch::kFloat64); auto X_ = dataset.index({ torch::indexing::Slice(0, dataset.size(0) - 1), "..." }); - auto featureSelection = metrics.SelectKBestWeighted(weights_, n); // Get all the features sorted - for (int i = 0; i < features.size(); ++i) { - models[i].fit(dataset, features, className, states, weights_); - auto ypred = models[i].predict(X_); - // em = np.sum(weights * (y_pred != self.y_)) / np.sum(weights) - // am = np.log((1 - em) / em) + np.log(estimator.n_classes_ - 1) - // # Step 3.2: Update weights for next classifier - // weights = [ - // wm * np.exp(am * (ym != yp)) - // for wm, ym, yp in zip(weights, self.y_, y_pred) - // ] - // # Step 4: Add the new model - // self.estimators_.append(estimator) + auto y_ = dataset.index({ -1, "..." }); + bool exitCondition = false; + bool repeatSparent = true; + vector featuresUsed; + // Step 0: Set the finish condition + // if not repeatSparent a finish condition is run out of features + // n_models == max_models + int numClasses = states[className].size(); + while (!exitCondition) { + // Step 1: Build ranking with mutual information + auto featureSelection = metrics.SelectKBestWeighted(weights_, n); // Get all the features sorted + auto feature = featureSelection[0]; + unique_ptr model; + if (!repeatSparent) { + if (n_models == 0) { + models.resize(n); // Resize for n==nfeatures SPODEs + significanceModels.resize(n); + } + bool found = false; + for (int i = 0; i < featureSelection.size(); ++i) { + if (find(featuresUsed.begin(), featuresUsed.end(), i) != featuresUsed.end()) { + continue; + } + found = true; + feature = i; + featuresUsed.push_back(feature); + n_models++; + break; + } + if (!found) { + exitCondition = true; + continue; + } + } + model = std::make_unique(feature); + model->fit(dataset, features, className, states, weights_); + auto ypred = model->predict(X_); + // Step 3.1: Compute the classifier amout of say + auto mask_wrong = ypred != y_; + auto masked_weights = weights_ * mask_wrong.to(weights_.dtype()); + double wrongWeights = masked_weights.sum().item(); + double significance = wrongWeights == 0 ? 1 : 0.5 * log((1 - wrongWeights) / wrongWeights); + // Step 3.2: Update weights for next classifier + // Step 3.2.1: Update weights of wrong samples + weights_ += mask_wrong.to(weights_.dtype()) * exp(significance) * weights_; + // Step 3.3: Normalise the weights + double totalWeights = torch::sum(weights_).item(); + weights_ = weights_ / totalWeights; + // Step 3.4: Store classifier and its accuracy to weigh its future vote + if (!repeatSparent) { + models[feature] = std::move(model); + significanceModels[feature] = significance; + } else { + models.push_back(std::move(model)); + significanceModels.push_back(significance); + n_models++; + } + exitCondition = n_models == max_models; } + weights.copy_(weights_); } vector BoostAODE::graph(const string& title) const { diff --git a/src/BayesNet/Ensemble.cc b/src/BayesNet/Ensemble.cc index 926fa5b..33a11a2 100644 --- a/src/BayesNet/Ensemble.cc +++ b/src/BayesNet/Ensemble.cc @@ -18,9 +18,9 @@ namespace bayesnet { auto y_pred_ = y_pred.accessor(); vector y_pred_final; for (int i = 0; i < y_pred.size(0); ++i) { - vector votes(y_pred.size(1), 0); + vector votes(y_pred.size(1), 0); for (int j = 0; j < y_pred.size(1); ++j) { - votes[y_pred_[i][j]] += 1; + votes[y_pred_[i][j]] += significanceModels[j]; } // argsort in descending order auto indices = argsort(votes); diff --git a/src/BayesNet/Ensemble.h b/src/BayesNet/Ensemble.h index 95c1da6..58a1d63 100644 --- a/src/BayesNet/Ensemble.h +++ b/src/BayesNet/Ensemble.h @@ -14,6 +14,7 @@ namespace bayesnet { protected: unsigned n_models; vector> models; + vector significanceModels; void trainModel(const torch::Tensor& weights) override; vector voting(Tensor& y_pred); public: diff --git a/src/BayesNet/KDB.cc b/src/BayesNet/KDB.cc index 471f3fd..cfbbca1 100644 --- a/src/BayesNet/KDB.cc +++ b/src/BayesNet/KDB.cc @@ -29,7 +29,7 @@ namespace bayesnet { // where C is the class. addNodes(); const Tensor& y = dataset.index({ -1, "..." }); - vector mi; + vector mi; for (auto i = 0; i < features.size(); i++) { Tensor firstFeature = dataset.index({ i, "..." }); mi.push_back(metrics.mutualInformation(firstFeature, y, weights)); diff --git a/src/BayesNet/bayesnetUtils.cc b/src/BayesNet/bayesnetUtils.cc index 8b69006..480034b 100644 --- a/src/BayesNet/bayesnetUtils.cc +++ b/src/BayesNet/bayesnetUtils.cc @@ -4,7 +4,7 @@ namespace bayesnet { using namespace std; using namespace torch; // Return the indices in descending order - vector argsort(vector& nums) + vector argsort(vector& nums) { int n = nums.size(); vector indices(n); diff --git a/src/BayesNet/bayesnetUtils.h b/src/BayesNet/bayesnetUtils.h index adfa8d7..b5811f7 100644 --- a/src/BayesNet/bayesnetUtils.h +++ b/src/BayesNet/bayesnetUtils.h @@ -5,7 +5,7 @@ namespace bayesnet { using namespace std; using namespace torch; - vector argsort(vector& nums); + vector argsort(vector& nums); vector> tensorToVector(Tensor& tensor); } #endif //BAYESNET_UTILS_H \ No newline at end of file