diff --git a/sample/main.cc b/sample/main.cc index 0973c27..8034ba4 100644 --- a/sample/main.cc +++ b/sample/main.cc @@ -221,41 +221,44 @@ int main(int argc, char** argv) cout << endl; cout << "Class name: " << className << endl; // Build Network - auto network = bayesnet::Network(1.0); - build_network(network, network_name, maxes); - network.fit(Xd, y, features, className); - cout << "Hello, Bayesian Networks!" << endl; - showNodesInfo(network, className); - //showCPDS(network); - cout << "Score: " << network.score(Xd, y) << endl; - cout << "PyTorch version: " << TORCH_VERSION << endl; - cout << "BayesNet version: " << network.version() << endl; - unsigned int nthreads = std::thread::hardware_concurrency(); - cout << "Computer has " << nthreads << " cores." << endl; - cout << "****************** First ******************" << endl; - auto metrics = bayesnet::Metrics(network.getSamples(), features, className, network.getClassNumStates()); - cout << "conditionalEdgeWeight " << endl; - auto conditional = metrics.conditionalEdgeWeights(); - cout << conditional << endl; - long m = features.size() + 1; - auto matrix = torch::from_blob(conditional.data(), { m, m }); - cout << matrix << endl; - cout << "****************** Second ******************" << endl; - auto metrics2 = bayesnet::Metrics(Xd, y, features, className, network.getClassNumStates()); - cout << "conditionalEdgeWeight " << endl; - auto conditional2 = metrics2.conditionalEdgeWeights(); - cout << conditional2 << endl; - long m2 = features.size() + 1; - auto matrix2 = torch::from_blob(conditional2.data(), { m, m }); - cout << matrix2 << endl; + // auto network = bayesnet::Network(1.0); + // build_network(network, network_name, maxes); + // network.fit(Xd, y, features, className); + // cout << "Hello, Bayesian Networks!" << endl; + // showNodesInfo(network, className); + // //showCPDS(network); + // cout << "Score: " << network.score(Xd, y) << endl; + // cout << "PyTorch version: " << TORCH_VERSION << endl; + // cout << "BayesNet version: " << network.version() << endl; + // unsigned int nthreads = std::thread::hardware_concurrency(); + // cout << "Computer has " << nthreads << " cores." << endl; + // cout << "****************** First ******************" << endl; + // auto metrics = bayesnet::Metrics(network.getSamples(), features, className, network.getClassNumStates()); + // cout << "conditionalEdgeWeight " << endl; + // auto conditional = metrics.conditionalEdgeWeights(); + // cout << conditional << endl; + // long m = features.size() + 1; + // auto matrix = torch::from_blob(conditional.data(), { m, m }); + // cout << matrix << endl; + // cout << "****************** Second ******************" << endl; + // auto metrics2 = bayesnet::Metrics(Xd, y, features, className, network.getClassNumStates()); + // cout << "conditionalEdgeWeight " << endl; + // auto conditional2 = metrics2.conditionalEdgeWeights(); + // cout << conditional2 << endl; + // long m2 = features.size() + 1; + // auto matrix2 = torch::from_blob(conditional2.data(), { m, m }); + // cout << matrix2 << endl; cout << "****************** KDB ******************" << endl; map> states; for (auto feature : features) { states[feature] = vector(maxes[feature]); } states[className] = vector(maxes[className]); - auto kdb = bayesnet::KDB(1); + auto kdb = bayesnet::KDB(2); kdb.fit(Xd, y, features, className, states); + for (auto line : kdb.show()) { + cout << line << endl; + } cout << "****************** KDB ******************" << endl; return 0; } \ No newline at end of file diff --git a/src/BaseClassifier.cc b/src/BaseClassifier.cc index 0673b0d..57d02d3 100644 --- a/src/BaseClassifier.cc +++ b/src/BaseClassifier.cc @@ -8,7 +8,7 @@ namespace bayesnet { BaseClassifier& BaseClassifier::build(vector& features, string className, map>& states) { - dataset = torch::cat({ X, y.view({150, 1}) }, 1); + dataset = torch::cat({ X, y.view({y.size(0), 1}) }, 1); this->features = features; this->className = className; this->states = states; @@ -86,4 +86,8 @@ namespace bayesnet { Tensor y_pred = predict(X); return (y_pred == y).sum().item() / y.size(0); } + void BaseClassifier::show() + { + model.show(); + } } \ No newline at end of file diff --git a/src/BaseClassifier.h b/src/BaseClassifier.h index 0a6b744..15395d3 100644 --- a/src/BaseClassifier.h +++ b/src/BaseClassifier.h @@ -28,8 +28,8 @@ namespace bayesnet { BaseClassifier& fit(vector>& X, vector& y, vector& features, string className, map>& states); Tensor predict(Tensor& X); float score(Tensor& X, Tensor& y); + void show(); }; - } #endif diff --git a/src/KDB.cc b/src/KDB.cc index 152c4e0..5a8f11f 100644 --- a/src/KDB.cc +++ b/src/KDB.cc @@ -12,7 +12,7 @@ namespace bayesnet { sort(indices.begin(), indices.end(), [&nums](int i, int j) {return nums[i] > nums[j];}); return indices; } - KDB::KDB(int k, float theta = 0.03) : BaseClassifier(Network()), k(k), theta(theta) {} + KDB::KDB(int k, float theta) : BaseClassifier(Network()), k(k), theta(theta) {} void KDB::train() { /* @@ -76,23 +76,39 @@ namespace bayesnet { { auto n_edges = min(k, static_cast(S.size())); auto cond_w = clone(weights); + cout << "Conditional edge weights cloned for idx " << idx << endl; + cout << cond_w << endl; bool exit_cond = k == 0; int num = 0; while (!exit_cond) { - auto max_minfo = argmax(cond_w.index({ "...", idx })).item(); + auto max_minfo = argmax(cond_w.index({ idx, "..." })).item(); auto belongs = find(S.begin(), S.end(), max_minfo) != S.end(); if (belongs && cond_w.index({ idx, max_minfo }).item() > theta) { try { - model.addEdge(features[idx], features[max_minfo]); + model.addEdge(features[max_minfo], features[idx]); num++; } catch (const invalid_argument& e) { // Loops are not allowed } } - cond_w.index_put_({ "...", max_minfo }, -1); - auto candidates = cond_w.gt(theta); + cond_w.index_put_({ idx, max_minfo }, -1); + cout << "Conditional edge weights cloned for idx " << idx << " After -1" << endl; + cout << cond_w << endl; + cout << "cond_w.index({ idx, '...'})" << endl; + cout << cond_w.index({ idx, "..." }) << endl; + auto candidates_mask = cond_w.index({ idx, "..." }).gt(theta); + auto candidates = candidates_mask.nonzero(); + cout << "Candidates mask" << endl; + cout << candidates_mask << endl; + cout << "Candidates: " << endl; + cout << candidates << endl; + cout << "Candidates size: " << candidates.size(0) << endl; exit_cond = num == n_edges || candidates.size(0) == 0; } } + vector KDB::show() + { + return model.show(); + } } \ No newline at end of file diff --git a/src/KDB.h b/src/KDB.h index 5830140..930a125 100644 --- a/src/KDB.h +++ b/src/KDB.h @@ -10,9 +10,10 @@ namespace bayesnet { float theta; void add_m_edges(int idx, vector& S, Tensor& weights); protected: - void train(); + void train() override; public: - KDB(int k, float theta); + KDB(int k, float theta = 0.03); + vector show(); }; } #endif \ No newline at end of file diff --git a/src/Network.cc b/src/Network.cc index b1094c3..52414c3 100644 --- a/src/Network.cc +++ b/src/Network.cc @@ -245,5 +245,18 @@ namespace bayesnet { } return result; } + vector Network::show() + { + vector result; + // Draw the network + for (auto node : nodes) { + string line = node.first + " -> "; + for (auto child : node.second->getChildren()) { + line += child->getName() + ", "; + } + result.push_back(line); + } + return result; + } } diff --git a/src/Network.h b/src/Network.h index b8a75f4..0459bd1 100644 --- a/src/Network.h +++ b/src/Network.h @@ -44,6 +44,7 @@ namespace bayesnet { torch::Tensor conditionalEdgeWeight(); vector> predict_proba(const vector>&); double score(const vector>&, const vector&); + vector show(); inline string version() { return "0.1.0"; } }; }