Begin with parameter estimation

This commit is contained in:
2023-06-30 21:24:12 +02:00
parent 0a31aa2ff1
commit 71d730d228
8 changed files with 236 additions and 87 deletions

View File

@@ -1,7 +1,7 @@
#include "Network.h"
namespace bayesnet {
Network::Network() : laplaceSmoothing(1), root(nullptr) {}
Network::Network(int smoothing) : laplaceSmoothing(smoothing), root(nullptr) {}
Network::Network() : laplaceSmoothing(1), root(nullptr), features(vector<string>()), className("") {}
Network::Network(int smoothing) : laplaceSmoothing(smoothing), root(nullptr), features(vector<string>()), className("") {}
Network::~Network()
{
for (auto& pair : nodes) {
@@ -10,6 +10,9 @@ namespace bayesnet {
}
void Network::addNode(string name, int numStates)
{
if (nodes.find(name) != nodes.end()) {
throw invalid_argument("Node " + name + " already exists");
}
nodes[name] = new Node(name, numStates);
if (root == nullptr) {
root = nodes[name];
@@ -32,7 +35,6 @@ namespace bayesnet {
{
visited.insert(nodeId);
recStack.insert(nodeId);
for (Node* child : nodes[nodeId]->getChildren()) {
if (visited.find(child->getName()) == visited.end() && isCyclic(child->getName(), visited, recStack))
return true;
@@ -55,13 +57,11 @@ namespace bayesnet {
nodes[parent]->addChild(nodes[child]);
nodes[child]->addParent(nodes[parent]);
// temporarily add edge
unordered_set<string> visited;
unordered_set<string> recStack;
if (isCyclic(nodes[child]->getName(), visited, recStack)) // if adding this edge forms a cycle
{
// remove edge
// remove problematic edge
nodes[parent]->removeChild(nodes[child]);
nodes[child]->removeParent(nodes[parent]);
throw invalid_argument("Adding this edge forms a cycle in the graph.");
@@ -72,71 +72,162 @@ namespace bayesnet {
{
return nodes;
}
void Network::buildNetwork(const vector<vector<int>>& dataset, const vector<int>& labels, const vector<string>& featureNames, const string& className)
void Network::buildNetwork()
{
// Add features as nodes to the network
for (int i = 0; i < featureNames.size(); ++i) {
addNode(featureNames[i], *max_element(dataset[i].begin(), dataset[i].end()) + 1);
for (int i = 0; i < features.size(); ++i) {
addNode(features[i], *max_element(dataset[features[i]].begin(), dataset[features[i]].end()) + 1);
}
// Add class as node to the network
addNode(className, *max_element(labels.begin(), labels.end()) + 1);
addNode(className, *max_element(dataset[className].begin(), dataset[className].end()) + 1);
// Add edges from class to features => naive Bayes
for (auto feature : featureNames) {
for (auto feature : features) {
addEdge(className, feature);
}
addEdge("petalwidth", "petallength");
}
void Network::fit(const vector<vector<int>>& dataset, const vector<int>& labels, const vector<string>& featureNames, const string& className)
{
buildNetwork(dataset, labels, featureNames, className);
//estimateParameters(dataset);
// auto jointCounts = [](const vector<vector<int>>& data, const vector<int>& indices, int numStates) {
// int size = indices.size();
// vector<int64_t> sizes(size, numStates);
// torch::Tensor counts = torch::zeros(sizes, torch::kLong);
// for (const auto& row : data) {
// int idx = 0;
// for (int i = 0; i < size; ++i) {
// idx = idx * numStates + row[indices[i]];
// }
// counts.view({ -1 }).add_(idx, 1);
// }
// return counts;
// };
// auto marginalCounts = [](const torch::Tensor& jointCounts) {
// return jointCounts.sum(-1);
// };
// for (auto& pair : nodes) {
// Node* node = pair.second;
// vector<int> indices;
// for (const auto& parent : node->getParents()) {
// indices.push_back(nodes[parent->getName()]->getId());
// }
// indices.push_back(node->getId());
// for (auto& child : node->getChildren()) {
// torch::Tensor counts = jointCounts(dataset, indices, node->getNumStates()) + laplaceSmoothing;
// torch::Tensor parentCounts = marginalCounts(counts);
// parentCounts = parentCounts.unsqueeze(-1);
// torch::Tensor cpt = counts.to(torch::kDouble) / parentCounts.to(torch::kDouble);
// setCPD(node->getCPDKey(child), cpt);
// }
// }
features = featureNames;
this->className = className;
// Build dataset
for (int i = 0; i < featureNames.size(); ++i) {
this->dataset[featureNames[i]] = dataset[i];
}
this->dataset[className] = labels;
buildNetwork();
estimateParameters();
}
torch::Tensor& Network::getCPD(const string& key)
// void Network::estimateParameters()
// {
// auto dimensions = vector<int64_t>();
// for (auto [name, node] : nodes) {
// // Get dimensions of the CPT
// dimensions.clear();
// dimensions.push_back(node->getNumStates());
// for (auto father : node->getParents()) {
// dimensions.push_back(father->getNumStates());
// }
// auto length = dimensions.size();
// // Create a tensor of zeros with the dimensions of the CPT
// torch::Tensor cpt = torch::zeros(dimensions, torch::kFloat);
// // Fill table with counts
// for (int n_sample = 0; n_sample < dataset[name].size(); ++n_sample) {
// torch::List<c10::optional<torch::Tensor>> coordinates;
// coordinates.push_back(torch::tensor(dataset[name][n_sample]));
// for (auto father : node->getParents()) {
// coordinates.push_back(torch::tensor(dataset[father->getName()][n_sample]));
// }
// // Increment the count of the corresponding coordinate
// cpt.index_put_({ coordinates }, cpt.index({ coordinates }) + 1);
// }
// // store thre resulting cpt in the node
// node->setCPT(cpt);
// }
// }
// void Network::estimateParameters()
// {
// // Lambda function to compute joint counts of states
// auto jointCounts = [this](const vector<string>& nodeNames) {
// int size = nodeNames.size();
// std::vector<int64_t> sizes(size);
// for (int i = 0; i < size; ++i) {
// sizes[i] = this->nodes[nodeNames[i]]->getNumStates();
// }
// torch::Tensor counts = torch::zeros(sizes, torch::kLong);
// int dataSize = this->dataset[nodeNames[0]].size();
// for (int dataIdx = 0; dataIdx < dataSize; ++dataIdx) {
// std::vector<torch::Tensor> idx(size);
// for (int i = 0; i < size; ++i) {
// idx[i] = torch::tensor(this->dataset[nodeNames[i]][dataIdx], torch::kLong);
// }
// torch::Tensor indices = torch::stack(idx);
// counts.index_put_({ indices }, counts.index({ indices }) + 1);
// }
// return counts;
// };
// // Lambda function to compute marginal counts of states
// auto marginalCounts = [](const torch::Tensor& jointCounts) {
// return jointCounts.sum(-1);
// };
// for (auto& pair : nodes) {
// Node* node = pair.second;
// // Create a list of names of the node and its parents
// std::vector<string> nodeNames;
// nodeNames.push_back(node->getName());
// for (Node* parent : node->getParents()) {
// nodeNames.push_back(parent->getName());
// }
// // Compute counts and normalize to get probabilities
// torch::Tensor counts = jointCounts(nodeNames) + laplaceSmoothing;
// torch::Tensor parentCounts = marginalCounts(counts);
// parentCounts = parentCounts.unsqueeze(-1);
// // The CPT is represented as a tensor and stored in the Node
// node->setCPT((counts.to(torch::kDouble) / parentCounts.to(torch::kDouble)));
// }
// }
void Network::estimateParameters()
{
return cpds[key];
// Lambda function to compute joint counts of states
auto jointCounts = [this](const vector<string>& nodeNames) {
int size = nodeNames.size();
std::vector<int64_t> sizes(size);
for (int i = 0; i < size; ++i) {
sizes[i] = this->nodes[nodeNames[i]]->getNumStates();
}
torch::Tensor counts = torch::zeros(sizes, torch::kLong);
int dataSize = this->dataset[nodeNames[0]].size();
torch::List<c10::optional<torch::Tensor>> indices;
for (int dataIdx = 0; dataIdx < dataSize; ++dataIdx) {
indices.clear();
for (int i = 0; i < size; ++i) {
indices.push_back(torch::tensor(this->dataset[nodeNames[i]][dataIdx], torch::kLong));
}
//torch::Tensor indices = torch::stack(idx);
counts.index_put_({ indices }, counts.index({ indices }) + 1);
}
return counts;
};
// Lambda function to compute marginal counts of states
auto marginalCounts = [](const torch::Tensor& jointCounts) {
return jointCounts.sum(-1);
};
for (auto& pair : nodes) {
Node* node = pair.second;
// Create a list of names of the node and its parents
std::vector<string> nodeNames;
nodeNames.push_back(node->getName());
for (Node* parent : node->getParents()) {
nodeNames.push_back(parent->getName());
}
// Compute counts and normalize to get probabilities
torch::Tensor counts = jointCounts(nodeNames) + laplaceSmoothing;
torch::Tensor parentCounts = marginalCounts(counts);
parentCounts = parentCounts.unsqueeze(-1);
// The CPT is represented as a tensor and stored in the Node
node->setCPT((counts.to(torch::kDouble) / parentCounts.to(torch::kDouble)));
}
}
void Network::setCPD(const string& key, const torch::Tensor& cpt)
{
cpds[key] = cpt;
}
}