boostAode #5
8
.vscode/launch.json
vendored
8
.vscode/launch.json
vendored
@ -46,6 +46,14 @@
|
|||||||
],
|
],
|
||||||
"cwd": "/Users/rmontanana/Code/discretizbench",
|
"cwd": "/Users/rmontanana/Code/discretizbench",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "lldb",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "list",
|
||||||
|
"program": "${workspaceFolder}/build/src/Platform/list",
|
||||||
|
"args": [],
|
||||||
|
"cwd": "/Users/rmontanana/Code/discretizbench",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "Build & debug active file",
|
"name": "Build & debug active file",
|
||||||
"type": "cppdbg",
|
"type": "cppdbg",
|
||||||
|
23
.vscode/tasks.json
vendored
23
.vscode/tasks.json
vendored
@ -32,6 +32,29 @@
|
|||||||
],
|
],
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"detail": "Task generated by Debugger."
|
"detail": "Task generated by Debugger."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "cppbuild",
|
||||||
|
"label": "C/C++: g++ build active file",
|
||||||
|
"command": "/usr/bin/g++",
|
||||||
|
"args": [
|
||||||
|
"-fdiagnostics-color=always",
|
||||||
|
"-g",
|
||||||
|
"${file}",
|
||||||
|
"-o",
|
||||||
|
"${fileDirname}/${fileBasenameNoExtension}"
|
||||||
|
],
|
||||||
|
"options": {
|
||||||
|
"cwd": "${fileDirname}"
|
||||||
|
},
|
||||||
|
"problemMatcher": [
|
||||||
|
"$gcc"
|
||||||
|
],
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"detail": "Task generated by Debugger."
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
4
Makefile
4
Makefile
@ -15,7 +15,7 @@ dependency: ## Create a dependency graph diagram of the project (build/dependenc
|
|||||||
cd build && cmake .. --graphviz=dependency.dot && dot -Tpng dependency.dot -o dependency.png
|
cd build && cmake .. --graphviz=dependency.dot && dot -Tpng dependency.dot -o dependency.png
|
||||||
|
|
||||||
build: ## Build the main and BayesNetSample
|
build: ## Build the main and BayesNetSample
|
||||||
cmake --build build -t main -t BayesNetSample -t manage -j 32
|
cmake --build build -t main -t BayesNetSample -t manage -t list -j 32
|
||||||
|
|
||||||
clean: ## Clean the debug info
|
clean: ## Clean the debug info
|
||||||
@echo ">>> Cleaning Debug BayesNet ...";
|
@echo ">>> Cleaning Debug BayesNet ...";
|
||||||
@ -35,7 +35,7 @@ release: ## Build a Release version of the project
|
|||||||
@if [ -d ./build ]; then rm -rf ./build; fi
|
@if [ -d ./build ]; then rm -rf ./build; fi
|
||||||
@mkdir build;
|
@mkdir build;
|
||||||
cmake -S . -B build -D CMAKE_BUILD_TYPE=Release; \
|
cmake -S . -B build -D CMAKE_BUILD_TYPE=Release; \
|
||||||
cmake --build build -t main -t BayesNetSample -t manage -j 32;
|
cmake --build build -t main -t BayesNetSample -t manage -t list -j 32;
|
||||||
@echo ">>> Done";
|
@echo ">>> Done";
|
||||||
|
|
||||||
test: ## Run tests
|
test: ## Run tests
|
||||||
|
@ -6,5 +6,7 @@ include_directories(${BayesNet_SOURCE_DIR}/lib/argparse/include)
|
|||||||
include_directories(${BayesNet_SOURCE_DIR}/lib/json/include)
|
include_directories(${BayesNet_SOURCE_DIR}/lib/json/include)
|
||||||
add_executable(main main.cc Folding.cc platformUtils.cc Experiment.cc Datasets.cc Models.cc Report.cc)
|
add_executable(main main.cc Folding.cc platformUtils.cc Experiment.cc Datasets.cc Models.cc Report.cc)
|
||||||
add_executable(manage manage.cc Results.cc Report.cc)
|
add_executable(manage manage.cc Results.cc Report.cc)
|
||||||
|
add_executable(list list.cc platformUtils Datasets.cc)
|
||||||
target_link_libraries(main BayesNet ArffFiles mdlp "${TORCH_LIBRARIES}")
|
target_link_libraries(main BayesNet ArffFiles mdlp "${TORCH_LIBRARIES}")
|
||||||
target_link_libraries(manage "${TORCH_LIBRARIES}")
|
target_link_libraries(manage "${TORCH_LIBRARIES}")
|
||||||
|
target_link_libraries(list ArffFiles mdlp "${TORCH_LIBRARIES}")
|
@ -24,75 +24,110 @@ namespace platform {
|
|||||||
transform(datasets.begin(), datasets.end(), back_inserter(result), [](const auto& d) { return d.first; });
|
transform(datasets.begin(), datasets.end(), back_inserter(result), [](const auto& d) { return d.first; });
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
vector<string> Datasets::getFeatures(string name)
|
vector<string> Datasets::getFeatures(const string& name) const
|
||||||
{
|
{
|
||||||
if (datasets[name]->isLoaded()) {
|
if (datasets.at(name)->isLoaded()) {
|
||||||
return datasets[name]->getFeatures();
|
return datasets.at(name)->getFeatures();
|
||||||
} else {
|
} else {
|
||||||
throw invalid_argument("Dataset not loaded.");
|
throw invalid_argument("Dataset not loaded.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
map<string, vector<int>> Datasets::getStates(string name)
|
map<string, vector<int>> Datasets::getStates(const string& name) const
|
||||||
{
|
{
|
||||||
if (datasets[name]->isLoaded()) {
|
if (datasets.at(name)->isLoaded()) {
|
||||||
return datasets[name]->getStates();
|
return datasets.at(name)->getStates();
|
||||||
} else {
|
} else {
|
||||||
throw invalid_argument("Dataset not loaded.");
|
throw invalid_argument("Dataset not loaded.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
string Datasets::getClassName(string name)
|
void Datasets::loadDataset(const string& name) const
|
||||||
{
|
{
|
||||||
if (datasets[name]->isLoaded()) {
|
if (datasets.at(name)->isLoaded()) {
|
||||||
return datasets[name]->getClassName();
|
return;
|
||||||
|
} else {
|
||||||
|
datasets.at(name)->load();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
string Datasets::getClassName(const string& name) const
|
||||||
|
{
|
||||||
|
if (datasets.at(name)->isLoaded()) {
|
||||||
|
return datasets.at(name)->getClassName();
|
||||||
} else {
|
} else {
|
||||||
throw invalid_argument("Dataset not loaded.");
|
throw invalid_argument("Dataset not loaded.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
int Datasets::getNSamples(string name)
|
int Datasets::getNSamples(const string& name) const
|
||||||
{
|
{
|
||||||
if (datasets[name]->isLoaded()) {
|
if (datasets.at(name)->isLoaded()) {
|
||||||
return datasets[name]->getNSamples();
|
return datasets.at(name)->getNSamples();
|
||||||
} else {
|
} else {
|
||||||
throw invalid_argument("Dataset not loaded.");
|
throw invalid_argument("Dataset not loaded.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pair<vector<vector<float>>&, vector<int>&> Datasets::getVectors(string name)
|
int Datasets::getNClasses(const string& name)
|
||||||
|
{
|
||||||
|
if (datasets.at(name)->isLoaded()) {
|
||||||
|
auto className = datasets.at(name)->getClassName();
|
||||||
|
if (discretize) {
|
||||||
|
auto states = getStates(name);
|
||||||
|
return states.at(className).size();
|
||||||
|
}
|
||||||
|
auto [Xv, yv] = getVectors(name);
|
||||||
|
return *max_element(yv.begin(), yv.end()) + 1;
|
||||||
|
} else {
|
||||||
|
throw invalid_argument("Dataset not loaded.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
vector<int> Datasets::getClassesCounts(const string& name) const
|
||||||
|
{
|
||||||
|
if (datasets.at(name)->isLoaded()) {
|
||||||
|
auto [Xv, yv] = datasets.at(name)->getVectors();
|
||||||
|
vector<int> counts(*max_element(yv.begin(), yv.end()) + 1);
|
||||||
|
for (auto y : yv) {
|
||||||
|
counts[y]++;
|
||||||
|
}
|
||||||
|
return counts;
|
||||||
|
} else {
|
||||||
|
throw invalid_argument("Dataset not loaded.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pair<vector<vector<float>>&, vector<int>&> Datasets::getVectors(const string& name)
|
||||||
{
|
{
|
||||||
if (!datasets[name]->isLoaded()) {
|
if (!datasets[name]->isLoaded()) {
|
||||||
datasets[name]->load();
|
datasets[name]->load();
|
||||||
}
|
}
|
||||||
return datasets[name]->getVectors();
|
return datasets[name]->getVectors();
|
||||||
}
|
}
|
||||||
pair<vector<vector<int>>&, vector<int>&> Datasets::getVectorsDiscretized(string name)
|
pair<vector<vector<int>>&, vector<int>&> Datasets::getVectorsDiscretized(const string& name)
|
||||||
{
|
{
|
||||||
if (!datasets[name]->isLoaded()) {
|
if (!datasets[name]->isLoaded()) {
|
||||||
datasets[name]->load();
|
datasets[name]->load();
|
||||||
}
|
}
|
||||||
return datasets[name]->getVectorsDiscretized();
|
return datasets[name]->getVectorsDiscretized();
|
||||||
}
|
}
|
||||||
pair<torch::Tensor&, torch::Tensor&> Datasets::getTensors(string name)
|
pair<torch::Tensor&, torch::Tensor&> Datasets::getTensors(const string& name)
|
||||||
{
|
{
|
||||||
if (!datasets[name]->isLoaded()) {
|
if (!datasets[name]->isLoaded()) {
|
||||||
datasets[name]->load();
|
datasets[name]->load();
|
||||||
}
|
}
|
||||||
return datasets[name]->getTensors();
|
return datasets[name]->getTensors();
|
||||||
}
|
}
|
||||||
bool Datasets::isDataset(const string& name)
|
bool Datasets::isDataset(const string& name) const
|
||||||
{
|
{
|
||||||
return datasets.find(name) != datasets.end();
|
return datasets.find(name) != datasets.end();
|
||||||
}
|
}
|
||||||
Dataset::Dataset(const Dataset& dataset) : path(dataset.path), name(dataset.name), className(dataset.className), n_samples(dataset.n_samples), n_features(dataset.n_features), features(dataset.features), states(dataset.states), loaded(dataset.loaded), discretize(dataset.discretize), X(dataset.X), y(dataset.y), Xv(dataset.Xv), Xd(dataset.Xd), yv(dataset.yv), fileType(dataset.fileType)
|
Dataset::Dataset(const Dataset& dataset) : path(dataset.path), name(dataset.name), className(dataset.className), n_samples(dataset.n_samples), n_features(dataset.n_features), features(dataset.features), states(dataset.states), loaded(dataset.loaded), discretize(dataset.discretize), X(dataset.X), y(dataset.y), Xv(dataset.Xv), Xd(dataset.Xd), yv(dataset.yv), fileType(dataset.fileType)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
string Dataset::getName()
|
string Dataset::getName() const
|
||||||
{
|
{
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
string Dataset::getClassName()
|
string Dataset::getClassName() const
|
||||||
{
|
{
|
||||||
return className;
|
return className;
|
||||||
}
|
}
|
||||||
vector<string> Dataset::getFeatures()
|
vector<string> Dataset::getFeatures() const
|
||||||
{
|
{
|
||||||
if (loaded) {
|
if (loaded) {
|
||||||
return features;
|
return features;
|
||||||
@ -100,7 +135,7 @@ namespace platform {
|
|||||||
throw invalid_argument("Dataset not loaded.");
|
throw invalid_argument("Dataset not loaded.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
int Dataset::getNFeatures()
|
int Dataset::getNFeatures() const
|
||||||
{
|
{
|
||||||
if (loaded) {
|
if (loaded) {
|
||||||
return n_features;
|
return n_features;
|
||||||
@ -108,7 +143,7 @@ namespace platform {
|
|||||||
throw invalid_argument("Dataset not loaded.");
|
throw invalid_argument("Dataset not loaded.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
int Dataset::getNSamples()
|
int Dataset::getNSamples() const
|
||||||
{
|
{
|
||||||
if (loaded) {
|
if (loaded) {
|
||||||
return n_samples;
|
return n_samples;
|
||||||
@ -116,7 +151,7 @@ namespace platform {
|
|||||||
throw invalid_argument("Dataset not loaded.");
|
throw invalid_argument("Dataset not loaded.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
map<string, vector<int>> Dataset::getStates()
|
map<string, vector<int>> Dataset::getStates() const
|
||||||
{
|
{
|
||||||
if (loaded) {
|
if (loaded) {
|
||||||
return states;
|
return states;
|
||||||
|
@ -29,15 +29,15 @@ namespace platform {
|
|||||||
public:
|
public:
|
||||||
Dataset(const string& path, const string& name, const string& className, bool discretize, fileType_t fileType) : path(path), name(name), className(className), discretize(discretize), loaded(false), fileType(fileType) {};
|
Dataset(const string& path, const string& name, const string& className, bool discretize, fileType_t fileType) : path(path), name(name), className(className), discretize(discretize), loaded(false), fileType(fileType) {};
|
||||||
explicit Dataset(const Dataset&);
|
explicit Dataset(const Dataset&);
|
||||||
string getName();
|
string getName() const;
|
||||||
string getClassName();
|
string getClassName() const;
|
||||||
vector<string> getFeatures();
|
vector<string> getFeatures() const;
|
||||||
map<string, vector<int>> getStates();
|
map<string, vector<int>> getStates() const;
|
||||||
pair<vector<vector<float>>&, vector<int>&> getVectors();
|
pair<vector<vector<float>>&, vector<int>&> getVectors();
|
||||||
pair<vector<vector<int>>&, vector<int>&> getVectorsDiscretized();
|
pair<vector<vector<int>>&, vector<int>&> getVectorsDiscretized();
|
||||||
pair<torch::Tensor&, torch::Tensor&> getTensors();
|
pair<torch::Tensor&, torch::Tensor&> getTensors();
|
||||||
int getNFeatures();
|
int getNFeatures() const;
|
||||||
int getNSamples();
|
int getNSamples() const;
|
||||||
void load();
|
void load();
|
||||||
const bool inline isLoaded() const { return loaded; };
|
const bool inline isLoaded() const { return loaded; };
|
||||||
};
|
};
|
||||||
@ -51,14 +51,17 @@ namespace platform {
|
|||||||
public:
|
public:
|
||||||
explicit Datasets(const string& path, bool discretize = false, fileType_t fileType = ARFF) : path(path), discretize(discretize), fileType(fileType) { load(); };
|
explicit Datasets(const string& path, bool discretize = false, fileType_t fileType = ARFF) : path(path), discretize(discretize), fileType(fileType) { load(); };
|
||||||
vector<string> getNames();
|
vector<string> getNames();
|
||||||
vector<string> getFeatures(string name);
|
vector<string> getFeatures(const string& name) const;
|
||||||
int getNSamples(string name);
|
int getNSamples(const string& name) const;
|
||||||
string getClassName(string name);
|
string getClassName(const string& name) const;
|
||||||
map<string, vector<int>> getStates(string name);
|
int getNClasses(const string& name);
|
||||||
pair<vector<vector<float>>&, vector<int>&> getVectors(string name);
|
vector<int> getClassesCounts(const string& name) const;
|
||||||
pair<vector<vector<int>>&, vector<int>&> getVectorsDiscretized(string name);
|
map<string, vector<int>> getStates(const string& name) const;
|
||||||
pair<torch::Tensor&, torch::Tensor&> getTensors(string name);
|
pair<vector<vector<float>>&, vector<int>&> getVectors(const string& name);
|
||||||
bool isDataset(const string& name);
|
pair<vector<vector<int>>&, vector<int>&> getVectorsDiscretized(const string& name);
|
||||||
|
pair<torch::Tensor&, torch::Tensor&> getTensors(const string& name);
|
||||||
|
bool isDataset(const string& name) const;
|
||||||
|
void loadDataset(const string& name) const;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
#ifndef PATHS_H
|
#ifndef PATHS_H
|
||||||
#define PATHS_H
|
#define PATHS_H
|
||||||
|
#include <string>
|
||||||
namespace platform {
|
namespace platform {
|
||||||
class Paths {
|
class Paths {
|
||||||
public:
|
public:
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
|
#include <sstream>
|
||||||
|
#include <locale>
|
||||||
#include "Report.h"
|
#include "Report.h"
|
||||||
#include "BestResult.h"
|
#include "BestResult.h"
|
||||||
|
|
||||||
|
|
||||||
namespace platform {
|
namespace platform {
|
||||||
string headerLine(const string& text)
|
string headerLine(const string& text)
|
||||||
{
|
{
|
||||||
@ -31,21 +34,31 @@ namespace platform {
|
|||||||
body();
|
body();
|
||||||
footer();
|
footer();
|
||||||
}
|
}
|
||||||
|
struct separated : numpunct<char> {
|
||||||
|
char do_decimal_point() const { return ','; }
|
||||||
|
char do_thousands_sep() const { return '.'; }
|
||||||
|
string do_grouping() const { return "\03"; }
|
||||||
|
};
|
||||||
void Report::header()
|
void Report::header()
|
||||||
{
|
{
|
||||||
|
locale mylocale(cout.getloc(), new separated);
|
||||||
|
locale::global(mylocale);
|
||||||
|
cout.imbue(mylocale);
|
||||||
|
stringstream oss;
|
||||||
cout << Colors::MAGENTA() << string(MAXL, '*') << endl;
|
cout << Colors::MAGENTA() << string(MAXL, '*') << endl;
|
||||||
cout << headerLine("Report " + data["model"].get<string>() + " ver. " + data["version"].get<string>() + " with " + to_string(data["folds"].get<int>()) + " Folds cross validation and " + to_string(data["seeds"].size()) + " random seeds. " + data["date"].get<string>() + " " + data["time"].get<string>());
|
cout << headerLine("Report " + data["model"].get<string>() + " ver. " + data["version"].get<string>() + " with " + to_string(data["folds"].get<int>()) + " Folds cross validation and " + to_string(data["seeds"].size()) + " random seeds. " + data["date"].get<string>() + " " + data["time"].get<string>());
|
||||||
cout << headerLine(data["title"].get<string>());
|
cout << headerLine(data["title"].get<string>());
|
||||||
cout << headerLine("Random seeds: " + fromVector("seeds") + " Stratified: " + (data["stratified"].get<bool>() ? "True" : "False"));
|
cout << headerLine("Random seeds: " + fromVector("seeds") + " Stratified: " + (data["stratified"].get<bool>() ? "True" : "False"));
|
||||||
cout << headerLine("Execution took " + to_string(data["duration"].get<float>()) + " seconds, " + to_string(data["duration"].get<float>() / 3600) + " hours, on " + data["platform"].get<string>());
|
oss << "Execution took " << setprecision(2) << fixed << data["duration"].get<float>() << " seconds, " << data["duration"].get<float>() / 3600 << " hours, on " << data["platform"].get<string>();
|
||||||
|
cout << headerLine(oss.str());
|
||||||
cout << headerLine("Score is " + data["score_name"].get<string>());
|
cout << headerLine("Score is " + data["score_name"].get<string>());
|
||||||
cout << string(MAXL, '*') << endl;
|
cout << string(MAXL, '*') << endl;
|
||||||
cout << endl;
|
cout << endl;
|
||||||
}
|
}
|
||||||
void Report::body()
|
void Report::body()
|
||||||
{
|
{
|
||||||
cout << Colors::GREEN() << "Dataset Sampl. Feat. Cls Nodes Edges States Score Time Hyperparameters" << endl;
|
cout << Colors::GREEN() << "Dataset Sampl. Feat. Cls Nodes Edges States Score Time Hyperparameters" << endl;
|
||||||
cout << "============================== ====== ===== === ======= ======= ======= =============== ================== ===============" << endl;
|
cout << "============================== ====== ===== === ========= ========= ========= =============== ================== ===============" << endl;
|
||||||
json lastResult;
|
json lastResult;
|
||||||
totalScore = 0;
|
totalScore = 0;
|
||||||
bool odd = true;
|
bool odd = true;
|
||||||
@ -55,9 +68,9 @@ namespace platform {
|
|||||||
cout << setw(6) << right << r["samples"].get<int>() << " ";
|
cout << setw(6) << right << r["samples"].get<int>() << " ";
|
||||||
cout << setw(5) << right << r["features"].get<int>() << " ";
|
cout << setw(5) << right << r["features"].get<int>() << " ";
|
||||||
cout << setw(3) << right << r["classes"].get<int>() << " ";
|
cout << setw(3) << right << r["classes"].get<int>() << " ";
|
||||||
cout << setw(7) << setprecision(2) << fixed << r["nodes"].get<float>() << " ";
|
cout << setw(9) << setprecision(2) << fixed << r["nodes"].get<float>() << " ";
|
||||||
cout << setw(7) << setprecision(2) << fixed << r["leaves"].get<float>() << " ";
|
cout << setw(9) << setprecision(2) << fixed << r["leaves"].get<float>() << " ";
|
||||||
cout << setw(7) << setprecision(2) << fixed << r["depth"].get<float>() << " ";
|
cout << setw(9) << setprecision(2) << fixed << r["depth"].get<float>() << " ";
|
||||||
cout << setw(8) << right << setprecision(6) << fixed << r["score"].get<double>() << "±" << setw(6) << setprecision(4) << fixed << r["score_std"].get<double>() << " ";
|
cout << setw(8) << right << setprecision(6) << fixed << r["score"].get<double>() << "±" << setw(6) << setprecision(4) << fixed << r["score_std"].get<double>() << " ";
|
||||||
cout << setw(11) << right << setprecision(6) << fixed << r["time"].get<double>() << "±" << setw(6) << setprecision(4) << fixed << r["time_std"].get<double>() << " ";
|
cout << setw(11) << right << setprecision(6) << fixed << r["time"].get<double>() << "±" << setw(6) << setprecision(4) << fixed << r["time_std"].get<double>() << " ";
|
||||||
try {
|
try {
|
||||||
@ -85,7 +98,9 @@ namespace platform {
|
|||||||
cout << Colors::MAGENTA() << string(MAXL, '*') << endl;
|
cout << Colors::MAGENTA() << string(MAXL, '*') << endl;
|
||||||
auto score = data["score_name"].get<string>();
|
auto score = data["score_name"].get<string>();
|
||||||
if (score == BestResult::scoreName()) {
|
if (score == BestResult::scoreName()) {
|
||||||
cout << headerLine(score + " compared to " + BestResult::title() + " .: " + to_string(totalScore / BestResult::score()));
|
stringstream oss;
|
||||||
|
oss << score << " compared to " << BestResult::title() << " .: " << totalScore / BestResult::score();
|
||||||
|
cout << headerLine(oss.str());
|
||||||
}
|
}
|
||||||
cout << string(MAXL, '*') << endl << Colors::RESET();
|
cout << string(MAXL, '*') << endl << Colors::RESET();
|
||||||
|
|
||||||
|
57
src/Platform/list.cc
Normal file
57
src/Platform/list.cc
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
#include <iostream>
|
||||||
|
#include <locale>
|
||||||
|
#include "Paths.h"
|
||||||
|
#include "Colors.h"
|
||||||
|
#include "Datasets.h"
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
const int BALANCE_LENGTH = 75;
|
||||||
|
|
||||||
|
struct separated : numpunct<char> {
|
||||||
|
char do_decimal_point() const { return ','; }
|
||||||
|
char do_thousands_sep() const { return '.'; }
|
||||||
|
string do_grouping() const { return "\03"; }
|
||||||
|
};
|
||||||
|
|
||||||
|
void outputBalance(const string& balance)
|
||||||
|
{
|
||||||
|
auto temp = string(balance);
|
||||||
|
while (temp.size() > BALANCE_LENGTH - 1) {
|
||||||
|
auto part = temp.substr(0, BALANCE_LENGTH);
|
||||||
|
cout << part << endl;
|
||||||
|
cout << setw(48) << " ";
|
||||||
|
temp = temp.substr(BALANCE_LENGTH);
|
||||||
|
}
|
||||||
|
cout << temp << endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char** argv)
|
||||||
|
{
|
||||||
|
auto data = platform::Datasets(platform::Paths().datasets(), false);
|
||||||
|
locale mylocale(cout.getloc(), new separated);
|
||||||
|
locale::global(mylocale);
|
||||||
|
cout.imbue(mylocale);
|
||||||
|
cout << Colors::GREEN() << "Dataset Sampl. Feat. Cls. Balance" << endl;
|
||||||
|
string balanceBars = string(BALANCE_LENGTH, '=');
|
||||||
|
cout << "============================== ====== ===== === " << balanceBars << endl;
|
||||||
|
bool odd = true;
|
||||||
|
for (const auto& dataset : data.getNames()) {
|
||||||
|
auto color = odd ? Colors::CYAN() : Colors::BLUE();
|
||||||
|
cout << color << setw(30) << left << dataset << " ";
|
||||||
|
data.loadDataset(dataset);
|
||||||
|
auto nSamples = data.getNSamples(dataset);
|
||||||
|
cout << setw(6) << right << nSamples << " ";
|
||||||
|
cout << setw(5) << right << data.getFeatures(dataset).size() << " ";
|
||||||
|
cout << setw(3) << right << data.getNClasses(dataset) << " ";
|
||||||
|
stringstream oss;
|
||||||
|
string sep = "";
|
||||||
|
for (auto number : data.getClassesCounts(dataset)) {
|
||||||
|
oss << sep << setprecision(2) << fixed << (float)number / nSamples * 100.0 << "% (" << number << ")";
|
||||||
|
sep = " / ";
|
||||||
|
}
|
||||||
|
outputBalance(oss.str());
|
||||||
|
odd = !odd;
|
||||||
|
}
|
||||||
|
cout << Colors::RESET() << endl;
|
||||||
|
return 0;
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user