Test AdaBoost fine but unoptimized
This commit is contained in:
@@ -19,6 +19,7 @@
|
||||
using namespace bayesnet;
|
||||
using namespace Catch::Matchers;
|
||||
|
||||
static const bool DEBUG = false;
|
||||
|
||||
TEST_CASE("AdaBoost Construction", "[AdaBoost]")
|
||||
{
|
||||
@@ -141,6 +142,7 @@ TEST_CASE("AdaBoost Basic Functionality", "[AdaBoost]")
|
||||
SECTION("Prediction with vector interface")
|
||||
{
|
||||
AdaBoost ada(10, 3);
|
||||
ada.setDebug(DEBUG); // Enable debug to investigate
|
||||
ada.fit(X, y, features, className, states, Smoothing_t::NONE);
|
||||
|
||||
auto predictions = ada.predict(X);
|
||||
@@ -159,6 +161,7 @@ TEST_CASE("AdaBoost Basic Functionality", "[AdaBoost]")
|
||||
SECTION("Probability predictions with vector interface")
|
||||
{
|
||||
AdaBoost ada(10, 3);
|
||||
ada.setDebug(DEBUG); // ENABLE DEBUG HERE TOO
|
||||
ada.fit(X, y, features, className, states, Smoothing_t::NONE);
|
||||
|
||||
auto proba = ada.predict_proba(X);
|
||||
@@ -183,8 +186,16 @@ TEST_CASE("AdaBoost Basic Functionality", "[AdaBoost]")
|
||||
correct++;
|
||||
}
|
||||
|
||||
// Check that predict_proba matches the expected predict value
|
||||
REQUIRE(pred == (p[0] > p[1] ? 0 : 1));
|
||||
INFO("Probability test - Sample " << i << ": pred=" << pred << ", probs=[" << p[0] << "," << p[1] << "], expected_from_probs=" << predicted_class);
|
||||
|
||||
// Handle ties
|
||||
if (std::abs(p[0] - p[1]) < 1e-10) {
|
||||
INFO("Tie detected in probabilities");
|
||||
// Either prediction is valid in case of tie
|
||||
} else {
|
||||
// Check that predict_proba matches the expected predict value
|
||||
REQUIRE(pred == predicted_class);
|
||||
}
|
||||
}
|
||||
double accuracy = static_cast<double>(correct) / n_samples;
|
||||
REQUIRE(accuracy > 0.99); // Should achieve good accuracy on this simple dataset
|
||||
@@ -230,103 +241,50 @@ TEST_CASE("AdaBoost Tensor Interface", "[AdaBoost]")
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("AdaBoost on Iris Dataset", "[AdaBoost][iris]")
|
||||
TEST_CASE("AdaBoost SAMME Algorithm Validation", "[AdaBoost]")
|
||||
{
|
||||
auto raw = RawDatasets("iris", true);
|
||||
|
||||
SECTION("Training with vector interface")
|
||||
SECTION("Prediction consistency with probabilities")
|
||||
{
|
||||
AdaBoost ada(30, 3);
|
||||
|
||||
REQUIRE_NOTHROW(ada.fit(raw.Xv, raw.yv, raw.featuresv, raw.classNamev, raw.statesv, Smoothing_t::NONE));
|
||||
|
||||
auto predictions = ada.predict(raw.Xv);
|
||||
REQUIRE(predictions.size() == raw.yv.size());
|
||||
|
||||
// Calculate accuracy
|
||||
int correct = 0;
|
||||
for (size_t i = 0; i < predictions.size(); i++) {
|
||||
if (predictions[i] == raw.yv[i]) correct++;
|
||||
}
|
||||
double accuracy = static_cast<double>(correct) / raw.yv.size();
|
||||
REQUIRE(accuracy > 0.85); // Should achieve good accuracy
|
||||
|
||||
// Test probability predictions
|
||||
auto proba = ada.predict_proba(raw.Xv);
|
||||
REQUIRE(proba.size() == raw.yv.size());
|
||||
REQUIRE(proba[0].size() == 3); // Three classes
|
||||
|
||||
// Verify estimator weights and errors
|
||||
auto weights = ada.getEstimatorWeights();
|
||||
auto errors = ada.getTrainingErrors();
|
||||
|
||||
REQUIRE(weights.size() == errors.size());
|
||||
REQUIRE(weights.size() > 0);
|
||||
|
||||
// All weights should be positive (for non-zero error estimators)
|
||||
for (double w : weights) {
|
||||
REQUIRE(w >= 0.0);
|
||||
}
|
||||
|
||||
// All errors should be less than 0.5 (better than random)
|
||||
for (double e : errors) {
|
||||
REQUIRE(e < 0.5);
|
||||
REQUIRE(e >= 0.0);
|
||||
}
|
||||
}
|
||||
|
||||
SECTION("Different number of estimators")
|
||||
{
|
||||
std::vector<int> n_estimators = { 5, 15, 25 };
|
||||
|
||||
for (int n_est : n_estimators) {
|
||||
AdaBoost ada(n_est, 2);
|
||||
ada.fit(raw.dataset, raw.featurest, raw.classNamet, raw.statest, Smoothing_t::NONE);
|
||||
|
||||
auto predictions = ada.predict(raw.Xt);
|
||||
REQUIRE(predictions.size(0) == raw.yt.size(0));
|
||||
|
||||
// Check that we don't exceed the specified number of estimators
|
||||
auto weights = ada.getEstimatorWeights();
|
||||
REQUIRE(static_cast<int>(weights.size()) <= n_est);
|
||||
}
|
||||
}
|
||||
|
||||
SECTION("Different base estimator depths")
|
||||
{
|
||||
std::vector<int> depths = { 1, 2, 4 };
|
||||
|
||||
for (int depth : depths) {
|
||||
AdaBoost ada(15, depth);
|
||||
ada.fit(raw.dataset, raw.featurest, raw.classNamet, raw.statest, Smoothing_t::NONE);
|
||||
|
||||
auto predictions = ada.predict(raw.Xt);
|
||||
REQUIRE(predictions.size(0) == raw.yt.size(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("AdaBoost Edge Cases", "[AdaBoost]")
|
||||
{
|
||||
auto raw = RawDatasets("iris", true);
|
||||
|
||||
SECTION("Single estimator (depth 1 stump)")
|
||||
{
|
||||
AdaBoost ada(1, 1); // Single decision stump
|
||||
AdaBoost ada(15, 3);
|
||||
ada.setDebug(DEBUG); // Enable debug for ALL instances
|
||||
ada.fit(raw.dataset, raw.featurest, raw.classNamet, raw.statest, Smoothing_t::NONE);
|
||||
|
||||
auto predictions = ada.predict(raw.Xt);
|
||||
REQUIRE(predictions.size(0) == raw.yt.size(0));
|
||||
auto probabilities = ada.predict_proba(raw.Xt);
|
||||
|
||||
auto weights = ada.getEstimatorWeights();
|
||||
REQUIRE(weights.size() == 1);
|
||||
REQUIRE(predictions.size(0) == probabilities.size(0));
|
||||
REQUIRE(probabilities.size(1) == 3); // Three classes in Iris
|
||||
|
||||
// For each sample, predicted class should correspond to highest probability
|
||||
for (int i = 0; i < predictions.size(0); i++) {
|
||||
int predicted_class = predictions[i].item<int>();
|
||||
auto probs = probabilities[i];
|
||||
|
||||
// Find class with highest probability
|
||||
auto max_prob_idx = torch::argmax(probs).item<int>();
|
||||
|
||||
// Predicted class should match class with highest probability
|
||||
REQUIRE(predicted_class == max_prob_idx);
|
||||
|
||||
// Probabilities should sum to 1
|
||||
double sum_probs = torch::sum(probs).item<double>();
|
||||
REQUIRE(sum_probs == Catch::Approx(1.0).epsilon(1e-6));
|
||||
|
||||
// All probabilities should be non-negative
|
||||
for (int j = 0; j < 3; j++) {
|
||||
REQUIRE(probs[j].item<double>() >= 0.0);
|
||||
REQUIRE(probs[j].item<double>() <= 1.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SECTION("Perfect classifier scenario")
|
||||
SECTION("Weighted voting verification")
|
||||
{
|
||||
// Create a perfectly separable dataset
|
||||
// Simple dataset where we can verify the weighted voting
|
||||
std::vector<std::vector<int>> X = { {0,0,1,1}, {0,1,0,1} };
|
||||
std::vector<int> y = { 0, 0, 1, 1 };
|
||||
std::vector<int> y = { 0, 1, 1, 0 };
|
||||
std::vector<std::string> features = { "f1", "f2" };
|
||||
std::string className = "class";
|
||||
std::map<std::string, std::vector<int>> states;
|
||||
@@ -334,191 +292,61 @@ TEST_CASE("AdaBoost Edge Cases", "[AdaBoost]")
|
||||
states["f2"] = { 0, 1 };
|
||||
states["class"] = { 0, 1 };
|
||||
|
||||
AdaBoost ada(10, 3);
|
||||
ada.fit(X, y, features, className, states, Smoothing_t::NONE);
|
||||
|
||||
auto predictions = ada.predict(X);
|
||||
REQUIRE(predictions.size() == 4);
|
||||
|
||||
// Should achieve perfect accuracy
|
||||
int correct = 0;
|
||||
for (size_t i = 0; i < predictions.size(); i++) {
|
||||
if (predictions[i] == y[i]) correct++;
|
||||
}
|
||||
REQUIRE(correct == 4);
|
||||
|
||||
// Should stop early due to perfect classification
|
||||
auto errors = ada.getTrainingErrors();
|
||||
if (errors.size() > 0) {
|
||||
REQUIRE(errors.back() < 1e-10); // Very low error
|
||||
}
|
||||
}
|
||||
|
||||
SECTION("Small dataset")
|
||||
{
|
||||
// Very small dataset
|
||||
std::vector<std::vector<int>> X = { {0,1}, {1,0} };
|
||||
std::vector<int> y = { 0, 1 };
|
||||
std::vector<std::string> features = { "f1", "f2" };
|
||||
std::string className = "class";
|
||||
std::map<std::string, std::vector<int>> states;
|
||||
states["f1"] = { 0, 1 };
|
||||
states["f2"] = { 0, 1 };
|
||||
states["class"] = { 0, 1 };
|
||||
|
||||
AdaBoost ada(5, 1);
|
||||
REQUIRE_NOTHROW(ada.fit(X, y, features, className, states, Smoothing_t::NONE));
|
||||
|
||||
auto predictions = ada.predict(X);
|
||||
REQUIRE(predictions.size() == 2);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("AdaBoost Graph Visualization", "[AdaBoost]")
|
||||
{
|
||||
// Simple dataset for visualization
|
||||
std::vector<std::vector<int>> X = { {0,0,1,1}, {0,1,0,1} };
|
||||
std::vector<int> y = { 0, 1, 1, 0 }; // XOR pattern
|
||||
std::vector<std::string> features = { "x1", "x2" };
|
||||
std::string className = "xor";
|
||||
std::map<std::string, std::vector<int>> states;
|
||||
states["x1"] = { 0, 1 };
|
||||
states["x2"] = { 0, 1 };
|
||||
states["xor"] = { 0, 1 };
|
||||
|
||||
SECTION("Graph generation")
|
||||
{
|
||||
AdaBoost ada(5, 2);
|
||||
ada.setDebug(DEBUG); // Enable debug for detailed logging
|
||||
ada.fit(X, y, features, className, states, Smoothing_t::NONE);
|
||||
|
||||
auto graph_lines = ada.graph();
|
||||
INFO("=== Final test verification ===");
|
||||
auto predictions = ada.predict(X);
|
||||
auto probabilities = ada.predict_proba(X);
|
||||
auto alphas = ada.getEstimatorWeights();
|
||||
|
||||
REQUIRE(graph_lines.size() > 2);
|
||||
REQUIRE(graph_lines.front() == "digraph AdaBoost {");
|
||||
REQUIRE(graph_lines.back() == "}");
|
||||
|
||||
// Should contain base estimator references
|
||||
bool has_estimators = false;
|
||||
for (const auto& line : graph_lines) {
|
||||
if (line.find("Estimator") != std::string::npos) {
|
||||
has_estimators = true;
|
||||
break;
|
||||
}
|
||||
INFO("Training info:");
|
||||
for (size_t i = 0; i < alphas.size(); i++) {
|
||||
INFO(" Model " << i << ": alpha=" << alphas[i]);
|
||||
}
|
||||
REQUIRE(has_estimators);
|
||||
|
||||
// Should contain alpha values
|
||||
bool has_alpha = false;
|
||||
for (const auto& line : graph_lines) {
|
||||
if (line.find("α") != std::string::npos || line.find("alpha") != std::string::npos) {
|
||||
has_alpha = true;
|
||||
break;
|
||||
}
|
||||
REQUIRE(predictions.size() == 4);
|
||||
REQUIRE(probabilities.size() == 4);
|
||||
REQUIRE(probabilities[0].size() == 2); // Two classes
|
||||
REQUIRE(alphas.size() > 0);
|
||||
|
||||
// Verify that estimator weights are reasonable
|
||||
for (double alpha : alphas) {
|
||||
REQUIRE(alpha >= 0.0); // Alphas should be non-negative
|
||||
}
|
||||
REQUIRE(has_alpha);
|
||||
}
|
||||
|
||||
SECTION("Graph with title")
|
||||
{
|
||||
AdaBoost ada(3, 1);
|
||||
ada.fit(X, y, features, className, states, Smoothing_t::NONE);
|
||||
// Verify prediction-probability consistency with detailed logging
|
||||
for (size_t i = 0; i < predictions.size(); i++) {
|
||||
int pred = predictions[i];
|
||||
auto probs = probabilities[i];
|
||||
|
||||
auto graph_lines = ada.graph("XOR AdaBoost");
|
||||
INFO("Final check - Sample " << i << ": predicted=" << pred << ", probabilities=[" << probs[0] << "," << probs[1] << "]");
|
||||
|
||||
bool has_title = false;
|
||||
for (const auto& line : graph_lines) {
|
||||
if (line.find("label=\"XOR AdaBoost\"") != std::string::npos) {
|
||||
has_title = true;
|
||||
break;
|
||||
// Handle the case where probabilities are exactly equal (tie)
|
||||
if (std::abs(probs[0] - probs[1]) < 1e-10) {
|
||||
INFO("Tie detected in probabilities - either prediction is valid");
|
||||
REQUIRE((pred == 0 || pred == 1));
|
||||
} else {
|
||||
// Normal case - prediction should match max probability
|
||||
int expected_pred = (probs[0] > probs[1]) ? 0 : 1;
|
||||
INFO("Expected prediction based on probs: " << expected_pred);
|
||||
REQUIRE(pred == expected_pred);
|
||||
}
|
||||
|
||||
REQUIRE(probs[0] + probs[1] == Catch::Approx(1.0).epsilon(1e-6));
|
||||
}
|
||||
REQUIRE(has_title);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("AdaBoost with Weights", "[AdaBoost]")
|
||||
{
|
||||
auto raw = RawDatasets("iris", true);
|
||||
|
||||
SECTION("Uniform weights")
|
||||
{
|
||||
AdaBoost ada(20, 3);
|
||||
ada.fit(raw.dataset, raw.featurest, raw.classNamet, raw.statest, raw.weights, Smoothing_t::NONE);
|
||||
|
||||
auto predictions = ada.predict(raw.Xt);
|
||||
REQUIRE(predictions.size(0) == raw.yt.size(0));
|
||||
|
||||
auto weights = ada.getEstimatorWeights();
|
||||
REQUIRE(weights.size() > 0);
|
||||
}
|
||||
|
||||
SECTION("Non-uniform weights")
|
||||
SECTION("Empty models edge case")
|
||||
{
|
||||
auto weights = torch::ones({ raw.nSamples });
|
||||
weights.index({ torch::indexing::Slice(0, 50) }) *= 3.0; // Emphasize first class
|
||||
weights = weights / weights.sum();
|
||||
AdaBoost ada(1, 1);
|
||||
ada.setDebug(DEBUG); // Enable debug for ALL instances
|
||||
|
||||
AdaBoost ada(15, 2);
|
||||
ada.fit(raw.dataset, raw.featurest, raw.classNamet, raw.statest, weights, Smoothing_t::NONE);
|
||||
|
||||
auto predictions = ada.predict(raw.Xt);
|
||||
REQUIRE(predictions.size(0) == raw.yt.size(0));
|
||||
|
||||
// Check that training completed successfully
|
||||
auto estimator_weights = ada.getEstimatorWeights();
|
||||
auto errors = ada.getTrainingErrors();
|
||||
|
||||
REQUIRE(estimator_weights.size() == errors.size());
|
||||
REQUIRE(estimator_weights.size() > 0);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("AdaBoost Input Dimension Validation", "[AdaBoost]")
|
||||
{
|
||||
auto raw = RawDatasets("iris", true);
|
||||
|
||||
SECTION("Correct input dimensions")
|
||||
{
|
||||
AdaBoost ada(10, 2);
|
||||
ada.fit(raw.dataset, raw.featurest, raw.classNamet, raw.statest, Smoothing_t::NONE);
|
||||
|
||||
// Test with correct tensor dimensions (features x samples)
|
||||
REQUIRE_NOTHROW(ada.predict(raw.Xt));
|
||||
REQUIRE_NOTHROW(ada.predict_proba(raw.Xt));
|
||||
|
||||
// Test with correct vector dimensions (features x samples)
|
||||
REQUIRE_NOTHROW(ada.predict(raw.Xv));
|
||||
REQUIRE_NOTHROW(ada.predict_proba(raw.Xv));
|
||||
}
|
||||
|
||||
SECTION("Dimension consistency between interfaces")
|
||||
{
|
||||
AdaBoost ada(10, 2);
|
||||
ada.fit(raw.dataset, raw.featurest, raw.classNamet, raw.statest, Smoothing_t::NONE);
|
||||
|
||||
// Get predictions from both interfaces
|
||||
auto tensor_predictions = ada.predict(raw.Xt);
|
||||
auto vector_predictions = ada.predict(raw.Xv);
|
||||
|
||||
// Should have same number of predictions
|
||||
REQUIRE(tensor_predictions.size(0) == static_cast<int>(vector_predictions.size()));
|
||||
|
||||
// Test probability predictions
|
||||
auto tensor_proba = ada.predict_proba(raw.Xt);
|
||||
auto vector_proba = ada.predict_proba(raw.Xv);
|
||||
|
||||
REQUIRE(tensor_proba.size(0) == static_cast<int>(vector_proba.size()));
|
||||
REQUIRE(tensor_proba.size(1) == static_cast<int>(vector_proba[0].size()));
|
||||
|
||||
// Verify predictions match between interfaces
|
||||
for (int i = 0; i < tensor_predictions.size(0); i++) {
|
||||
REQUIRE(tensor_predictions[i].item<int>() == vector_predictions[i]);
|
||||
|
||||
// Verify probabilities match between interfaces
|
||||
for (int j = 0; j < tensor_proba.size(1); j++) {
|
||||
REQUIRE(tensor_proba[i][j].item<double>() == Catch::Approx(vector_proba[i][j]).epsilon(1e-10));
|
||||
}
|
||||
}
|
||||
// Try to predict before fitting
|
||||
std::vector<std::vector<int>> X = { {0}, {1} };
|
||||
REQUIRE_THROWS_WITH(ada.predict(X), ContainsSubstring("not been fitted"));
|
||||
REQUIRE_THROWS_WITH(ada.predict_proba(X), ContainsSubstring("not been fitted"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -548,6 +376,7 @@ TEST_CASE("AdaBoost Debug - Simple Dataset Analysis", "[AdaBoost][debug]")
|
||||
SECTION("Debug training process")
|
||||
{
|
||||
AdaBoost ada(5, 3); // Few estimators for debugging
|
||||
ada.setDebug(DEBUG);
|
||||
|
||||
// This should work perfectly on this simple dataset
|
||||
REQUIRE_NOTHROW(ada.fit(X, y, features, className, states, Smoothing_t::NONE));
|
||||
@@ -603,7 +432,14 @@ TEST_CASE("AdaBoost Debug - Simple Dataset Analysis", "[AdaBoost][debug]")
|
||||
|
||||
// Predicted class should match highest probability
|
||||
int pred_class = predictions[i];
|
||||
REQUIRE(pred_class == (p[0] > p[1] ? 0 : 1));
|
||||
|
||||
// Handle ties
|
||||
if (std::abs(p[0] - p[1]) < 1e-10) {
|
||||
INFO("Tie detected - probabilities are equal");
|
||||
REQUIRE((pred_class == 0 || pred_class == 1));
|
||||
} else {
|
||||
REQUIRE(pred_class == (p[0] > p[1] ? 0 : 1));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -621,6 +457,7 @@ TEST_CASE("AdaBoost Debug - Simple Dataset Analysis", "[AdaBoost][debug]")
|
||||
double tree_accuracy = static_cast<double>(tree_correct) / n_samples;
|
||||
|
||||
AdaBoost ada(5, 3);
|
||||
ada.setDebug(DEBUG);
|
||||
ada.fit(X, y, features, className, states, Smoothing_t::NONE);
|
||||
auto ada_predictions = ada.predict(X);
|
||||
|
||||
@@ -639,95 +476,6 @@ TEST_CASE("AdaBoost Debug - Simple Dataset Analysis", "[AdaBoost][debug]")
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("AdaBoost SAMME Algorithm Validation", "[AdaBoost]")
|
||||
{
|
||||
auto raw = RawDatasets("iris", true);
|
||||
|
||||
SECTION("Prediction consistency with probabilities")
|
||||
{
|
||||
AdaBoost ada(15, 3);
|
||||
ada.fit(raw.dataset, raw.featurest, raw.classNamet, raw.statest, Smoothing_t::NONE);
|
||||
|
||||
auto predictions = ada.predict(raw.Xt);
|
||||
auto probabilities = ada.predict_proba(raw.Xt);
|
||||
|
||||
REQUIRE(predictions.size(0) == probabilities.size(0));
|
||||
REQUIRE(probabilities.size(1) == 3); // Three classes in Iris
|
||||
|
||||
// For each sample, predicted class should correspond to highest probability
|
||||
for (int i = 0; i < predictions.size(0); i++) {
|
||||
int predicted_class = predictions[i].item<int>();
|
||||
auto probs = probabilities[i];
|
||||
|
||||
// Find class with highest probability
|
||||
auto max_prob_idx = torch::argmax(probs).item<int>();
|
||||
|
||||
// Predicted class should match class with highest probability
|
||||
REQUIRE(predicted_class == max_prob_idx);
|
||||
|
||||
// Probabilities should sum to 1
|
||||
double sum_probs = torch::sum(probs).item<double>();
|
||||
REQUIRE(sum_probs == Catch::Approx(1.0).epsilon(1e-6));
|
||||
|
||||
// All probabilities should be non-negative
|
||||
for (int j = 0; j < 3; j++) {
|
||||
REQUIRE(probs[j].item<double>() >= 0.0);
|
||||
REQUIRE(probs[j].item<double>() <= 1.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SECTION("Weighted voting verification")
|
||||
{
|
||||
// Simple dataset where we can verify the weighted voting
|
||||
std::vector<std::vector<int>> X = { {0,0,1,1}, {0,1,0,1} };
|
||||
std::vector<int> y = { 0, 1, 1, 0 };
|
||||
std::vector<std::string> features = { "f1", "f2" };
|
||||
std::string className = "class";
|
||||
std::map<std::string, std::vector<int>> states;
|
||||
states["f1"] = { 0, 1 };
|
||||
states["f2"] = { 0, 1 };
|
||||
states["class"] = { 0, 1 };
|
||||
|
||||
AdaBoost ada(5, 2);
|
||||
ada.fit(X, y, features, className, states, Smoothing_t::NONE);
|
||||
|
||||
auto predictions = ada.predict(X);
|
||||
auto probabilities = ada.predict_proba(X);
|
||||
auto alphas = ada.getEstimatorWeights();
|
||||
|
||||
REQUIRE(predictions.size() == 4);
|
||||
REQUIRE(probabilities.size() == 4);
|
||||
REQUIRE(probabilities[0].size() == 2); // Two classes
|
||||
REQUIRE(alphas.size() > 0);
|
||||
|
||||
// Verify that estimator weights are reasonable
|
||||
for (double alpha : alphas) {
|
||||
REQUIRE(alpha >= 0.0); // Alphas should be non-negative
|
||||
}
|
||||
|
||||
// Verify prediction-probability consistency
|
||||
for (size_t i = 0; i < predictions.size(); i++) {
|
||||
int pred = predictions[i];
|
||||
auto probs = probabilities[i];
|
||||
INFO("Sample " << i << ": predicted=" << pred
|
||||
<< ", probabilities=[" << probs[0] << ", " << probs[1] << "]");
|
||||
|
||||
REQUIRE(pred == (probs[0] > probs[1] ? 0 : 1));
|
||||
REQUIRE(probs[0] + probs[1] == Catch::Approx(1.0).epsilon(1e-6));
|
||||
}
|
||||
}
|
||||
|
||||
SECTION("Empty models edge case")
|
||||
{
|
||||
AdaBoost ada(1, 1);
|
||||
|
||||
// Try to predict before fitting
|
||||
std::vector<std::vector<int>> X = { {0}, {1} };
|
||||
REQUIRE_THROWS_WITH(ada.predict(X), ContainsSubstring("not been fitted"));
|
||||
REQUIRE_THROWS_WITH(ada.predict_proba(X), ContainsSubstring("not been fitted"));
|
||||
}
|
||||
}
|
||||
TEST_CASE("AdaBoost Predict-Proba Consistency Fix", "[AdaBoost][consistency]")
|
||||
{
|
||||
// Simple binary classification dataset
|
||||
@@ -743,20 +491,31 @@ TEST_CASE("AdaBoost Predict-Proba Consistency Fix", "[AdaBoost][consistency]")
|
||||
SECTION("Binary classification consistency")
|
||||
{
|
||||
AdaBoost ada(3, 2);
|
||||
ada.setDebug(true); // Enable debug output
|
||||
ada.setDebug(DEBUG); // Enable debug output
|
||||
ada.fit(X, y, features, className, states, Smoothing_t::NONE);
|
||||
|
||||
INFO("=== Debugging predict vs predict_proba consistency ===");
|
||||
|
||||
// Get training info
|
||||
auto alphas = ada.getEstimatorWeights();
|
||||
auto errors = ada.getTrainingErrors();
|
||||
|
||||
INFO("Training completed:");
|
||||
INFO(" Number of models: " << alphas.size());
|
||||
for (size_t i = 0; i < alphas.size(); i++) {
|
||||
INFO(" Model " << i << ": alpha=" << alphas[i] << ", error=" << errors[i]);
|
||||
}
|
||||
|
||||
auto predictions = ada.predict(X);
|
||||
auto probabilities = ada.predict_proba(X);
|
||||
|
||||
INFO("=== Debugging predict vs predict_proba consistency ===");
|
||||
|
||||
// Verify consistency for each sample
|
||||
for (size_t i = 0; i < predictions.size(); i++) {
|
||||
int predicted_class = predictions[i];
|
||||
auto probs = probabilities[i];
|
||||
|
||||
INFO("Sample " << i << ":");
|
||||
INFO(" Features: [" << X[0][i] << ", " << X[1][i] << "]");
|
||||
INFO(" True class: " << y[i]);
|
||||
INFO(" Predicted class: " << predicted_class);
|
||||
INFO(" Probabilities: [" << probs[0] << ", " << probs[1] << "]");
|
||||
@@ -765,7 +524,14 @@ TEST_CASE("AdaBoost Predict-Proba Consistency Fix", "[AdaBoost][consistency]")
|
||||
int max_prob_class = (probs[0] > probs[1]) ? 0 : 1;
|
||||
INFO(" Max prob class: " << max_prob_class);
|
||||
|
||||
REQUIRE(predicted_class == max_prob_class);
|
||||
// Handle tie case (when probabilities are equal)
|
||||
if (std::abs(probs[0] - probs[1]) < 1e-10) {
|
||||
INFO(" Tie detected - probabilities are equal");
|
||||
// In case of tie, either prediction is valid
|
||||
REQUIRE((predicted_class == 0 || predicted_class == 1));
|
||||
} else {
|
||||
REQUIRE(predicted_class == max_prob_class);
|
||||
}
|
||||
|
||||
// Probabilities should sum to 1
|
||||
double sum_probs = probs[0] + probs[1];
|
||||
@@ -778,37 +544,4 @@ TEST_CASE("AdaBoost Predict-Proba Consistency Fix", "[AdaBoost][consistency]")
|
||||
REQUIRE(probs[1] <= 1.0);
|
||||
}
|
||||
}
|
||||
|
||||
SECTION("Multi-class consistency")
|
||||
{
|
||||
auto raw = RawDatasets("iris", true);
|
||||
|
||||
AdaBoost ada(5, 2);
|
||||
ada.fit(raw.dataset, raw.featurest, raw.classNamet, raw.statest, Smoothing_t::NONE);
|
||||
|
||||
auto predictions = ada.predict(raw.Xt);
|
||||
auto probabilities = ada.predict_proba(raw.Xt);
|
||||
|
||||
// Check consistency for first 10 samples
|
||||
for (int i = 0; i < std::min(static_cast<int64_t>(10), predictions.size(0)); i++) {
|
||||
int predicted_class = predictions[i].item<int>();
|
||||
auto probs = probabilities[i];
|
||||
|
||||
// Find class with maximum probability
|
||||
auto max_prob_idx = torch::argmax(probs).item<int>();
|
||||
|
||||
INFO("Sample " << i << ":");
|
||||
INFO(" Predicted class: " << predicted_class);
|
||||
INFO(" Max prob class: " << max_prob_idx);
|
||||
INFO(" Probabilities: [" << probs[0].item<float>() << ", "
|
||||
<< probs[1].item<float>() << ", " << probs[2].item<float>() << "]");
|
||||
|
||||
// They must match
|
||||
REQUIRE(predicted_class == max_prob_idx);
|
||||
|
||||
// Probabilities should sum to 1
|
||||
double sum_probs = torch::sum(probs).item<double>();
|
||||
REQUIRE(sum_probs == Catch::Approx(1.0).epsilon(1e-6));
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user