Complete fixing the linter warnings

This commit is contained in:
Ricardo Montañana Gómez 2023-07-30 00:16:58 +02:00
parent b882569169
commit 4ebc9c2013
Signed by: rmontanana
GPG Key ID: 46064262FD9A7ADE
4 changed files with 6 additions and 13 deletions

View File

@ -125,7 +125,6 @@ namespace bayesnet {
} }
void Classifier::addNodes() void Classifier::addNodes()
{ {
auto test = model.getEdges();
// Add all nodes to the network // Add all nodes to the network
for (auto feature : features) { for (auto feature : features) {
model.addNode(feature, states[feature].size()); model.addNode(feature, states[feature].size());

View File

@ -148,10 +148,10 @@ namespace bayesnet {
} }
int Ensemble::getNumberOfStates() int Ensemble::getNumberOfStates()
{ {
int states = 0; int nstates = 0;
for (auto i = 0; i < n_models; ++i) { for (auto i = 0; i < n_models; ++i) {
states += models[i]->getNumberOfStates(); nstates += models[i]->getNumberOfStates();
} }
return states; return nstates;
} }
} }

View File

@ -7,9 +7,8 @@
namespace bayesnet { namespace bayesnet {
using namespace std; using namespace std;
Graph::Graph(int V) : V(V) Graph::Graph(int V) : V(V), parent(vector<int>(V))
{ {
parent = vector<int>(V);
for (int i = 0; i < V; i++) for (int i = 0; i < V; i++)
parent[i] = i; parent[i] = i;
G.clear(); G.clear();

View File

@ -8,7 +8,7 @@ namespace bayesnet {
Network::Network(float maxT, int smoothing) : laplaceSmoothing(smoothing), features(vector<string>()), className(""), classNumStates(0), maxThreads(maxT), fitted(false) {} Network::Network(float maxT, int smoothing) : laplaceSmoothing(smoothing), features(vector<string>()), className(""), classNumStates(0), maxThreads(maxT), fitted(false) {}
Network::Network(Network& other) : laplaceSmoothing(other.laplaceSmoothing), features(other.features), className(other.className), classNumStates(other.getClassNumStates()), maxThreads(other.getmaxThreads()), fitted(other.fitted) Network::Network(Network& other) : laplaceSmoothing(other.laplaceSmoothing), features(other.features), className(other.className), classNumStates(other.getClassNumStates()), maxThreads(other.getmaxThreads()), fitted(other.fitted)
{ {
for (auto& pair : other.nodes) { for (const auto& pair : other.nodes) {
nodes[pair.first] = std::make_unique<Node>(*pair.second); nodes[pair.first] = std::make_unique<Node>(*pair.second);
} }
} }
@ -145,9 +145,6 @@ namespace bayesnet {
while (nextNodeIndex < nodes.size()) { while (nextNodeIndex < nodes.size()) {
unique_lock<mutex> lock(mtx); unique_lock<mutex> lock(mtx);
cv.wait(lock, [&activeThreads, &maxThreadsRunning]() { return activeThreads < maxThreadsRunning; }); cv.wait(lock, [&activeThreads, &maxThreadsRunning]() { return activeThreads < maxThreadsRunning; });
if (nextNodeIndex >= nodes.size()) {
break; // No more work remaining
}
threads.emplace_back([this, &nextNodeIndex, &mtx, &cv, &activeThreads]() { threads.emplace_back([this, &nextNodeIndex, &mtx, &cv, &activeThreads]() {
while (true) { while (true) {
unique_lock<mutex> lock(mtx); unique_lock<mutex> lock(mtx);
@ -262,9 +259,7 @@ namespace bayesnet {
// Normalize result // Normalize result
double sum = accumulate(result.begin(), result.end(), 0.0); double sum = accumulate(result.begin(), result.end(), 0.0);
for (double& value : result) { transform(result.begin(), result.end(), result.begin(), [sum](double& value) { return value / sum; });
value /= sum;
}
return result; return result;
} }
vector<string> Network::show() vector<string> Network::show()