Complete and fix KDB
This commit is contained in:
parent
3fcf1e40c9
commit
e52fdc718f
@ -221,41 +221,44 @@ int main(int argc, char** argv)
|
||||
cout << endl;
|
||||
cout << "Class name: " << className << endl;
|
||||
// Build Network
|
||||
auto network = bayesnet::Network(1.0);
|
||||
build_network(network, network_name, maxes);
|
||||
network.fit(Xd, y, features, className);
|
||||
cout << "Hello, Bayesian Networks!" << endl;
|
||||
showNodesInfo(network, className);
|
||||
//showCPDS(network);
|
||||
cout << "Score: " << network.score(Xd, y) << endl;
|
||||
cout << "PyTorch version: " << TORCH_VERSION << endl;
|
||||
cout << "BayesNet version: " << network.version() << endl;
|
||||
unsigned int nthreads = std::thread::hardware_concurrency();
|
||||
cout << "Computer has " << nthreads << " cores." << endl;
|
||||
cout << "****************** First ******************" << endl;
|
||||
auto metrics = bayesnet::Metrics(network.getSamples(), features, className, network.getClassNumStates());
|
||||
cout << "conditionalEdgeWeight " << endl;
|
||||
auto conditional = metrics.conditionalEdgeWeights();
|
||||
cout << conditional << endl;
|
||||
long m = features.size() + 1;
|
||||
auto matrix = torch::from_blob(conditional.data(), { m, m });
|
||||
cout << matrix << endl;
|
||||
cout << "****************** Second ******************" << endl;
|
||||
auto metrics2 = bayesnet::Metrics(Xd, y, features, className, network.getClassNumStates());
|
||||
cout << "conditionalEdgeWeight " << endl;
|
||||
auto conditional2 = metrics2.conditionalEdgeWeights();
|
||||
cout << conditional2 << endl;
|
||||
long m2 = features.size() + 1;
|
||||
auto matrix2 = torch::from_blob(conditional2.data(), { m, m });
|
||||
cout << matrix2 << endl;
|
||||
// auto network = bayesnet::Network(1.0);
|
||||
// build_network(network, network_name, maxes);
|
||||
// network.fit(Xd, y, features, className);
|
||||
// cout << "Hello, Bayesian Networks!" << endl;
|
||||
// showNodesInfo(network, className);
|
||||
// //showCPDS(network);
|
||||
// cout << "Score: " << network.score(Xd, y) << endl;
|
||||
// cout << "PyTorch version: " << TORCH_VERSION << endl;
|
||||
// cout << "BayesNet version: " << network.version() << endl;
|
||||
// unsigned int nthreads = std::thread::hardware_concurrency();
|
||||
// cout << "Computer has " << nthreads << " cores." << endl;
|
||||
// cout << "****************** First ******************" << endl;
|
||||
// auto metrics = bayesnet::Metrics(network.getSamples(), features, className, network.getClassNumStates());
|
||||
// cout << "conditionalEdgeWeight " << endl;
|
||||
// auto conditional = metrics.conditionalEdgeWeights();
|
||||
// cout << conditional << endl;
|
||||
// long m = features.size() + 1;
|
||||
// auto matrix = torch::from_blob(conditional.data(), { m, m });
|
||||
// cout << matrix << endl;
|
||||
// cout << "****************** Second ******************" << endl;
|
||||
// auto metrics2 = bayesnet::Metrics(Xd, y, features, className, network.getClassNumStates());
|
||||
// cout << "conditionalEdgeWeight " << endl;
|
||||
// auto conditional2 = metrics2.conditionalEdgeWeights();
|
||||
// cout << conditional2 << endl;
|
||||
// long m2 = features.size() + 1;
|
||||
// auto matrix2 = torch::from_blob(conditional2.data(), { m, m });
|
||||
// cout << matrix2 << endl;
|
||||
cout << "****************** KDB ******************" << endl;
|
||||
map<string, vector<int>> states;
|
||||
for (auto feature : features) {
|
||||
states[feature] = vector<int>(maxes[feature]);
|
||||
}
|
||||
states[className] = vector<int>(maxes[className]);
|
||||
auto kdb = bayesnet::KDB(1);
|
||||
auto kdb = bayesnet::KDB(2);
|
||||
kdb.fit(Xd, y, features, className, states);
|
||||
for (auto line : kdb.show()) {
|
||||
cout << line << endl;
|
||||
}
|
||||
cout << "****************** KDB ******************" << endl;
|
||||
return 0;
|
||||
}
|
@ -8,7 +8,7 @@ namespace bayesnet {
|
||||
BaseClassifier& BaseClassifier::build(vector<string>& features, string className, map<string, vector<int>>& states)
|
||||
{
|
||||
|
||||
dataset = torch::cat({ X, y.view({150, 1}) }, 1);
|
||||
dataset = torch::cat({ X, y.view({y.size(0), 1}) }, 1);
|
||||
this->features = features;
|
||||
this->className = className;
|
||||
this->states = states;
|
||||
@ -86,4 +86,8 @@ namespace bayesnet {
|
||||
Tensor y_pred = predict(X);
|
||||
return (y_pred == y).sum().item<float>() / y.size(0);
|
||||
}
|
||||
void BaseClassifier::show()
|
||||
{
|
||||
model.show();
|
||||
}
|
||||
}
|
@ -28,8 +28,8 @@ namespace bayesnet {
|
||||
BaseClassifier& fit(vector<vector<int>>& X, vector<int>& y, vector<string>& features, string className, map<string, vector<int>>& states);
|
||||
Tensor predict(Tensor& X);
|
||||
float score(Tensor& X, Tensor& y);
|
||||
void show();
|
||||
};
|
||||
|
||||
}
|
||||
#endif
|
||||
|
||||
|
26
src/KDB.cc
26
src/KDB.cc
@ -12,7 +12,7 @@ namespace bayesnet {
|
||||
sort(indices.begin(), indices.end(), [&nums](int i, int j) {return nums[i] > nums[j];});
|
||||
return indices;
|
||||
}
|
||||
KDB::KDB(int k, float theta = 0.03) : BaseClassifier(Network()), k(k), theta(theta) {}
|
||||
KDB::KDB(int k, float theta) : BaseClassifier(Network()), k(k), theta(theta) {}
|
||||
void KDB::train()
|
||||
{
|
||||
/*
|
||||
@ -76,23 +76,39 @@ namespace bayesnet {
|
||||
{
|
||||
auto n_edges = min(k, static_cast<int>(S.size()));
|
||||
auto cond_w = clone(weights);
|
||||
cout << "Conditional edge weights cloned for idx " << idx << endl;
|
||||
cout << cond_w << endl;
|
||||
bool exit_cond = k == 0;
|
||||
int num = 0;
|
||||
while (!exit_cond) {
|
||||
auto max_minfo = argmax(cond_w.index({ "...", idx })).item<int>();
|
||||
auto max_minfo = argmax(cond_w.index({ idx, "..." })).item<int>();
|
||||
auto belongs = find(S.begin(), S.end(), max_minfo) != S.end();
|
||||
if (belongs && cond_w.index({ idx, max_minfo }).item<float>() > theta) {
|
||||
try {
|
||||
model.addEdge(features[idx], features[max_minfo]);
|
||||
model.addEdge(features[max_minfo], features[idx]);
|
||||
num++;
|
||||
}
|
||||
catch (const invalid_argument& e) {
|
||||
// Loops are not allowed
|
||||
}
|
||||
}
|
||||
cond_w.index_put_({ "...", max_minfo }, -1);
|
||||
auto candidates = cond_w.gt(theta);
|
||||
cond_w.index_put_({ idx, max_minfo }, -1);
|
||||
cout << "Conditional edge weights cloned for idx " << idx << " After -1" << endl;
|
||||
cout << cond_w << endl;
|
||||
cout << "cond_w.index({ idx, '...'})" << endl;
|
||||
cout << cond_w.index({ idx, "..." }) << endl;
|
||||
auto candidates_mask = cond_w.index({ idx, "..." }).gt(theta);
|
||||
auto candidates = candidates_mask.nonzero();
|
||||
cout << "Candidates mask" << endl;
|
||||
cout << candidates_mask << endl;
|
||||
cout << "Candidates: " << endl;
|
||||
cout << candidates << endl;
|
||||
cout << "Candidates size: " << candidates.size(0) << endl;
|
||||
exit_cond = num == n_edges || candidates.size(0) == 0;
|
||||
}
|
||||
}
|
||||
vector<string> KDB::show()
|
||||
{
|
||||
return model.show();
|
||||
}
|
||||
}
|
@ -10,9 +10,10 @@ namespace bayesnet {
|
||||
float theta;
|
||||
void add_m_edges(int idx, vector<int>& S, Tensor& weights);
|
||||
protected:
|
||||
void train();
|
||||
void train() override;
|
||||
public:
|
||||
KDB(int k, float theta);
|
||||
KDB(int k, float theta = 0.03);
|
||||
vector<string> show();
|
||||
};
|
||||
}
|
||||
#endif
|
@ -245,5 +245,18 @@ namespace bayesnet {
|
||||
}
|
||||
return result;
|
||||
}
|
||||
vector<string> Network::show()
|
||||
{
|
||||
vector<string> result;
|
||||
// Draw the network
|
||||
for (auto node : nodes) {
|
||||
string line = node.first + " -> ";
|
||||
for (auto child : node.second->getChildren()) {
|
||||
line += child->getName() + ", ";
|
||||
}
|
||||
result.push_back(line);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -44,6 +44,7 @@ namespace bayesnet {
|
||||
torch::Tensor conditionalEdgeWeight();
|
||||
vector<vector<double>> predict_proba(const vector<vector<int>>&);
|
||||
double score(const vector<vector<int>>&, const vector<int>&);
|
||||
vector<string> show();
|
||||
inline string version() { return "0.1.0"; }
|
||||
};
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user