Merge pull request 'list_results' (#3) from list_results into main
Reviewed-on: #3
This commit is contained in:
@@ -4,8 +4,8 @@ diagrams:
|
||||
Platform:
|
||||
type: class
|
||||
glob:
|
||||
- src/*.cc
|
||||
- src/modules/*.cc
|
||||
- src/*.cpp
|
||||
- src/modules/*.cpp
|
||||
using_namespace: platform
|
||||
include:
|
||||
namespaces:
|
||||
@@ -17,7 +17,7 @@ diagrams:
|
||||
sequence:
|
||||
type: sequence
|
||||
glob:
|
||||
- src/b_main.cc
|
||||
- src/b_main.cpp
|
||||
combine_free_functions_into_file_participants: true
|
||||
using_namespace:
|
||||
- std
|
||||
|
@@ -85,7 +85,7 @@ add_subdirectory(lib/Files)
|
||||
add_subdirectory(config)
|
||||
add_subdirectory(src)
|
||||
add_subdirectory(sample)
|
||||
file(GLOB Platform_SOURCES CONFIGURE_DEPENDS ${Platform_SOURCE_DIR}/src/*.cc)
|
||||
file(GLOB Platform_SOURCES CONFIGURE_DEPENDS ${Platform_SOURCE_DIR}/src/*.cpp)
|
||||
|
||||
# Testing
|
||||
# -------
|
||||
|
4
Doxyfile
4
Doxyfile
@@ -976,7 +976,7 @@ INPUT_FILE_ENCODING =
|
||||
# Note the list of default checked file patterns might differ from the list of
|
||||
# default file extension mappings.
|
||||
#
|
||||
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
|
||||
# If left blank the following patterns are tested:*.c, *.cpp, *.cxx, *.cpp,
|
||||
# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
|
||||
# *.hh, *.hxx, *.hpp, *.h++, *.l, *.cs, *.d, *.php, *.php4, *.php5, *.phtml,
|
||||
# *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C
|
||||
@@ -984,7 +984,7 @@ INPUT_FILE_ENCODING =
|
||||
# *.vhdl, *.ucf, *.qsf and *.ice.
|
||||
|
||||
FILE_PATTERNS = *.c \
|
||||
*.cc \
|
||||
*.cpp \
|
||||
*.cxx \
|
||||
*.cpp \
|
||||
*.c++ \
|
||||
|
10
Makefile
10
Makefile
@@ -96,6 +96,14 @@ test: ## Run tests (opt="-s") to verbose output the tests, (opt="-c='Test Maximu
|
||||
done
|
||||
@echo ">>> Done";
|
||||
|
||||
fname = iris
|
||||
example: ## Build sample
|
||||
@echo ">>> Building Sample...";
|
||||
@cmake --build build_debug -t sample
|
||||
build_debug/sample/PlatformSample --model BoostAODE --dataset $(fname) --discretize --stratified
|
||||
@echo ">>> Done";
|
||||
|
||||
|
||||
coverage: ## Run tests and generate coverage report (build/index.html)
|
||||
@echo ">>> Building tests with coverage..."
|
||||
@$(MAKE) test
|
||||
@@ -105,7 +113,7 @@ coverage: ## Run tests and generate coverage report (build/index.html)
|
||||
|
||||
help: ## Show help message
|
||||
@IFS=$$'\n' ; \
|
||||
help_lines=(`fgrep -h "##" $(MAKEFILE_LIST) | fgrep -v fgrep | sed -e 's/\\$$//' | sed -e 's/##/:/'`); \
|
||||
help_lines=(`grep -Fh "##" $(MAKEFILE_LIST) | grep -Fv fgrep | sed -e 's/\\$$//' | sed -e 's/##/:/'`); \
|
||||
printf "%s\n\n" "Usage: make [task]"; \
|
||||
printf "%-20s %s\n" "task" "help" ; \
|
||||
printf "%-20s %s\n" "------" "----" ; \
|
||||
|
@@ -10,5 +10,5 @@ include_directories(
|
||||
${CMAKE_BINARY_DIR}/configured_files/include
|
||||
/usr/local/include
|
||||
)
|
||||
add_executable(PlatformSample sample.cc ${Platform_SOURCE_DIR}/src/main/Models.cc)
|
||||
add_executable(PlatformSample sample.cpp ${Platform_SOURCE_DIR}/src/main/Models.cpp)
|
||||
target_link_libraries(PlatformSample "${PyClassifiers}" "${BayesNet}" ArffFiles mdlp ${Python3_LIBRARIES} "${TORCH_LIBRARIES}" ${LIBTORCH_PYTHON} Boost::python Boost::numpy)
|
@@ -85,13 +85,13 @@ int main(int argc, char** argv)
|
||||
.default_value(std::string{ PATH }
|
||||
);
|
||||
program.add_argument("-m", "--model")
|
||||
.help("Model to use " + platform::Models::instance()->tostring())
|
||||
.help("Model to use " + platform::Models::instance()->toString())
|
||||
.action([](const std::string& value) {
|
||||
static const std::vector<std::string> choices = platform::Models::instance()->getNames();
|
||||
if (find(choices.begin(), choices.end(), value) != choices.end()) {
|
||||
return value;
|
||||
}
|
||||
throw runtime_error("Model must be one of " + platform::Models::instance()->tostring());
|
||||
throw runtime_error("Model must be one of " + platform::Models::instance()->toString());
|
||||
}
|
||||
);
|
||||
program.add_argument("--discretize").help("Discretize input dataset").default_value(false).implicit_value(true);
|
@@ -16,37 +16,52 @@ include_directories(
|
||||
)
|
||||
|
||||
# b_best
|
||||
set(best_sources b_best.cc BestResults.cc Statistics.cc BestResultsExcel.cc)
|
||||
set(best_sources b_best.cpp BestResults.cpp Statistics.cpp BestResultsExcel.cpp)
|
||||
list(TRANSFORM best_sources PREPEND best/)
|
||||
add_executable(
|
||||
b_best ${best_sources} main/Result.cc
|
||||
reports/ReportExcel.cc reports/ReportBase.cc reports/ExcelFile.cc common/Datasets.cc common/Dataset.cc)
|
||||
target_link_libraries(b_best Boost::boost "${TORCH_LIBRARIES}" "${XLSXWRITER_LIB}" ArffFiles mdlp)
|
||||
b_best ${best_sources}
|
||||
common/Datasets.cpp common/Dataset.cpp
|
||||
main/Result.cpp main/Models.cpp
|
||||
reports/ReportExcel.cpp reports/ReportBase.cpp reports/ExcelFile.cpp
|
||||
)
|
||||
target_link_libraries(b_best Boost::boost "${PyClassifiers}" "${BayesNet}" ArffFiles mdlp ${Python3_LIBRARIES} "${TORCH_LIBRARIES}" ${LIBTORCH_PYTHON} Boost::python Boost::numpy "${XLSXWRITER_LIB}")
|
||||
|
||||
# b_grid
|
||||
set(grid_sources b_grid.cc GridSearch.cc GridData.cc)
|
||||
set(grid_sources b_grid.cpp GridSearch.cpp GridData.cpp)
|
||||
list(TRANSFORM grid_sources PREPEND grid/)
|
||||
add_executable(b_grid ${grid_sources} main/HyperParameters.cc main/Models.cc common/Datasets.cc common/Dataset.cc)
|
||||
add_executable(b_grid ${grid_sources}
|
||||
common/Datasets.cpp common/Dataset.cpp
|
||||
main/HyperParameters.cpp main/Models.cpp
|
||||
)
|
||||
target_link_libraries(b_grid ${MPI_CXX_LIBRARIES} "${PyClassifiers}" "${BayesNet}" ArffFiles mdlp ${Python3_LIBRARIES} "${TORCH_LIBRARIES}" ${LIBTORCH_PYTHON} Boost::python Boost::numpy)
|
||||
|
||||
# b_list
|
||||
set(list_sources b_list.cc DatasetsExcel.cc)
|
||||
set(list_sources b_list.cpp DatasetsExcel.cpp ResultsDataset.cpp ResultsDatasetExcel.cpp)
|
||||
list(TRANSFORM list_sources PREPEND list/)
|
||||
add_executable(b_list ${list_sources} common/Datasets.cc common/Dataset.cc reports/ReportExcel.cc reports/ExcelFile.cc reports/ReportBase.cc)
|
||||
target_link_libraries(b_list "${TORCH_LIBRARIES}" "${XLSXWRITER_LIB}" ArffFiles mdlp)
|
||||
add_executable(b_list ${list_sources}
|
||||
common/Datasets.cpp common/Dataset.cpp
|
||||
main/Models.cpp
|
||||
reports/ReportExcel.cpp reports/ExcelFile.cpp reports/ReportBase.cpp main/Result.cpp
|
||||
)
|
||||
target_link_libraries(b_list "${PyClassifiers}" "${BayesNet}" ArffFiles mdlp ${Python3_LIBRARIES} "${TORCH_LIBRARIES}" ${LIBTORCH_PYTHON} Boost::python Boost::numpy "${XLSXWRITER_LIB}")
|
||||
|
||||
# b_main
|
||||
set(main_sources b_main.cc Experiment.cc Models.cc HyperParameters.cc)
|
||||
set(main_sources b_main.cpp Experiment.cpp Models.cpp HyperParameters.cpp)
|
||||
list(TRANSFORM main_sources PREPEND main/)
|
||||
add_executable(b_main ${main_sources} common/Datasets.cc common/Dataset.cc reports/ReportConsole.cc reports/ReportBase.cc main/Result.cc)
|
||||
add_executable(b_main ${main_sources}
|
||||
common/Datasets.cpp common/Dataset.cpp
|
||||
main/Result.cpp
|
||||
reports/ReportConsole.cpp reports/ReportBase.cpp
|
||||
)
|
||||
target_link_libraries(b_main "${PyClassifiers}" "${BayesNet}" ArffFiles mdlp ${Python3_LIBRARIES} "${TORCH_LIBRARIES}" ${LIBTORCH_PYTHON} Boost::python Boost::numpy)
|
||||
|
||||
# b_manage
|
||||
set(manage_sources b_manage.cc ManageResults.cc CommandParser.cc Results.cc)
|
||||
set(manage_sources b_manage.cpp ManageResults.cpp CommandParser.cpp ResultsManager.cpp)
|
||||
list(TRANSFORM manage_sources PREPEND manage/)
|
||||
add_executable(
|
||||
b_manage ${manage_sources} main/Result.cc
|
||||
reports/ReportConsole.cc reports/ReportExcel.cc reports/ReportExcelCompared.cc reports/ReportBase.cc reports/ExcelFile.cc
|
||||
common/Datasets.cc common/Dataset.cc
|
||||
b_manage ${manage_sources}
|
||||
common/Datasets.cpp common/Dataset.cpp
|
||||
main/Result.cpp
|
||||
reports/ReportConsole.cpp reports/ReportExcel.cpp reports/ReportExcelCompared.cpp reports/ReportBase.cpp reports/ExcelFile.cpp
|
||||
)
|
||||
target_link_libraries(b_manage "${TORCH_LIBRARIES}" "${XLSXWRITER_LIB}" ArffFiles mdlp)
|
||||
|
@@ -170,10 +170,9 @@ namespace platform {
|
||||
std::cout << Colors::GREEN() << " # " << std::setw(maxDatasetName + 1) << std::left << "Dataset" << "Score " << std::setw(maxFileName) << "File" << " Hyperparameters" << std::endl;
|
||||
std::cout << "=== " << std::string(maxDatasetName, '=') << " =========== " << std::string(maxFileName, '=') << " " << std::string(maxHyper, '=') << std::endl;
|
||||
auto i = 0;
|
||||
bool odd = true;
|
||||
double total = 0;
|
||||
for (auto const& item : data.items()) {
|
||||
auto color = odd ? Colors::BLUE() : Colors::CYAN();
|
||||
auto color = (i % 2) ? Colors::BLUE() : Colors::CYAN();
|
||||
double value = item.value().at(0).get<double>();
|
||||
std::cout << color << std::setw(3) << std::fixed << std::right << i++ << " ";
|
||||
std::cout << std::setw(maxDatasetName) << std::left << item.key() << " ";
|
||||
@@ -182,7 +181,6 @@ namespace platform {
|
||||
std::cout << item.value().at(1) << " ";
|
||||
std::cout << std::endl;
|
||||
total += value;
|
||||
odd = !odd;
|
||||
}
|
||||
std::cout << Colors::GREEN() << "=== " << std::string(maxDatasetName, '=') << " ===========" << std::endl;
|
||||
std::cout << Colors::GREEN() << " Total" << std::string(maxDatasetName - 5, '.') << " " << std::setw(11) << std::setprecision(8) << std::fixed << total << std::endl;
|
@@ -1,5 +1,5 @@
|
||||
#ifndef BESTRESULTS_H
|
||||
#define BESTRESULTS_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <nlohmann/json.hpp>
|
||||
using json = nlohmann::json;
|
||||
@@ -34,4 +34,3 @@ namespace platform {
|
||||
int maxDatasetName = 0;
|
||||
};
|
||||
}
|
||||
#endif //BESTRESULTS_H
|
@@ -1,5 +1,5 @@
|
||||
#ifndef BESTRESULTS_EXCEL_H
|
||||
#define BESTRESULTS_EXCEL_H
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <nlohmann/json.hpp>
|
||||
@@ -34,4 +34,3 @@ namespace platform {
|
||||
int datasetNameSize = 25; // Min size of the column
|
||||
};
|
||||
}
|
||||
#endif //BESTRESULTS_EXCEL_H
|
@@ -1,5 +1,5 @@
|
||||
#ifndef BESTSCORE_H
|
||||
#define BESTSCORE_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <utility>
|
||||
@@ -24,5 +24,3 @@ namespace platform {
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endif
|
@@ -1,5 +1,5 @@
|
||||
#ifndef STATISTICS_H
|
||||
#define STATISTICS_H
|
||||
#pragma once
|
||||
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
@@ -60,4 +60,3 @@ namespace platform {
|
||||
std::map<std::string, std::map<std::string, float>> ranksModels;
|
||||
};
|
||||
}
|
||||
#endif // !STATISTICS_H
|
@@ -1,5 +1,7 @@
|
||||
#include <iostream>
|
||||
#include <argparse/argparse.hpp>
|
||||
#include "main/Models.h"
|
||||
#include "main/modelRegister.h"
|
||||
#include "common/Paths.h"
|
||||
#include "common/Colors.h"
|
||||
#include "BestResults.h"
|
||||
@@ -7,7 +9,18 @@
|
||||
|
||||
void manageArguments(argparse::ArgumentParser& program)
|
||||
{
|
||||
program.add_argument("-m", "--model").default_value("").help("Filter results of the selected model) (any for all models)");
|
||||
program.add_argument("-m", "--model")
|
||||
.help("Model to use: " + platform::Models::instance()->toString() + " or any")
|
||||
.action([](const std::string& value) {
|
||||
std::vector<std::string> valid(platform::Models::instance()->getNames());
|
||||
valid.push_back("any");
|
||||
static const std::vector<std::string> choices = valid;
|
||||
if (find(choices.begin(), choices.end(), value) != choices.end()) {
|
||||
return value;
|
||||
}
|
||||
throw std::runtime_error("Model must be one of " + platform::Models::instance()->toString() + " or any");
|
||||
}
|
||||
);
|
||||
program.add_argument("-d", "--dataset").default_value("any").help("Filter results of the selected model) (any for all datasets)");
|
||||
program.add_argument("-s", "--score").default_value("accuracy").help("Filter results of the score name supplied");
|
||||
program.add_argument("--friedman").help("Friedman test").default_value(false).implicit_value(true);
|
@@ -1,5 +1,5 @@
|
||||
#ifndef LOCALE_H
|
||||
#define LOCALE_H
|
||||
#pragma once
|
||||
|
||||
#include <locale>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
@@ -19,4 +19,3 @@ namespace platform {
|
||||
}
|
||||
};
|
||||
}
|
||||
#endif
|
@@ -1,5 +1,5 @@
|
||||
#ifndef COLORS_H
|
||||
#define COLORS_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
class Colors {
|
||||
public:
|
||||
@@ -13,4 +13,3 @@ public:
|
||||
static std::string IBLUE() { return "\033[0;94m"; }
|
||||
static std::string RESET() { return "\033[0m"; }
|
||||
};
|
||||
#endif // COLORS_H
|
@@ -1,5 +1,5 @@
|
||||
#ifndef DATASET_H
|
||||
#define DATASET_H
|
||||
#pragma once
|
||||
|
||||
#include <torch/torch.h>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
@@ -75,4 +75,3 @@ namespace platform {
|
||||
};
|
||||
};
|
||||
|
||||
#endif
|
@@ -126,4 +126,14 @@ namespace platform {
|
||||
{
|
||||
return datasets.find(name) != datasets.end();
|
||||
}
|
||||
std::string Datasets::toString() const
|
||||
{
|
||||
std::string result;
|
||||
std::string sep = "";
|
||||
for (const auto& d : datasets) {
|
||||
result += sep + d.first;
|
||||
sep = ", ";
|
||||
}
|
||||
return "{" + result + "}";
|
||||
}
|
||||
}
|
@@ -1,5 +1,5 @@
|
||||
#ifndef DATASETS_H
|
||||
#define DATASETS_H
|
||||
#pragma once
|
||||
|
||||
#include "Dataset.h"
|
||||
namespace platform {
|
||||
class Datasets {
|
||||
@@ -24,7 +24,6 @@ namespace platform {
|
||||
std::pair<torch::Tensor&, torch::Tensor&> getTensors(const std::string& name);
|
||||
bool isDataset(const std::string& name) const;
|
||||
void loadDataset(const std::string& name) const;
|
||||
std::string toString() const;
|
||||
};
|
||||
};
|
||||
|
||||
#endif
|
@@ -1,5 +1,5 @@
|
||||
#ifndef DOTENV_H
|
||||
#define DOTENV_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <fstream>
|
||||
@@ -52,4 +52,3 @@ namespace platform {
|
||||
}
|
||||
};
|
||||
}
|
||||
#endif
|
@@ -1,5 +1,5 @@
|
||||
#ifndef PATHS_H
|
||||
#define PATHS_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <filesystem>
|
||||
#include "DotEnv.h"
|
||||
@@ -36,4 +36,3 @@ namespace platform {
|
||||
}
|
||||
};
|
||||
}
|
||||
#endif
|
@@ -1,5 +1,5 @@
|
||||
#ifndef SYMBOLS_H
|
||||
#define SYMBOLS_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
namespace platform {
|
||||
class Symbols {
|
||||
@@ -15,4 +15,3 @@ namespace platform {
|
||||
inline static const std::string notebook{ "\U0001F5C8" };
|
||||
};
|
||||
}
|
||||
#endif // !SYMBOLS_H
|
@@ -1,5 +1,5 @@
|
||||
#ifndef TIMER_H
|
||||
#define TIMER_H
|
||||
#pragma once
|
||||
|
||||
#include <chrono>
|
||||
#include <string>
|
||||
#include <sstream>
|
||||
@@ -40,4 +40,3 @@ namespace platform {
|
||||
}
|
||||
};
|
||||
} /* namespace platform */
|
||||
#endif /* TIMER_H */
|
@@ -1,5 +1,5 @@
|
||||
#ifndef UTILS_H
|
||||
#define UTILS_H
|
||||
#pragma once
|
||||
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
@@ -27,4 +27,3 @@ namespace platform {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
#endif
|
@@ -1,5 +1,5 @@
|
||||
#ifndef GRIDDATA_H
|
||||
#define GRIDDATA_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
@@ -23,4 +23,3 @@ namespace platform {
|
||||
std::map<std::string, json> grid;
|
||||
};
|
||||
} /* namespace platform */
|
||||
#endif /* GRIDDATA_H */
|
@@ -1,5 +1,5 @@
|
||||
#ifndef GRIDSEARCH_H
|
||||
#define GRIDSEARCH_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <mpi.h>
|
||||
@@ -57,4 +57,3 @@ namespace platform {
|
||||
Timer timer; // used to measure the time of the whole process
|
||||
};
|
||||
} /* namespace platform */
|
||||
#endif /* GRIDSEARCH_H */
|
@@ -20,14 +20,14 @@ void assignModel(argparse::ArgumentParser& parser)
|
||||
{
|
||||
auto models = platform::Models::instance();
|
||||
parser.add_argument("-m", "--model")
|
||||
.help("Model to use " + models->tostring())
|
||||
.help("Model to use " + models->toString())
|
||||
.required()
|
||||
.action([models](const std::string& value) {
|
||||
static const std::vector<std::string> choices = models->getNames();
|
||||
if (find(choices.begin(), choices.end(), value) != choices.end()) {
|
||||
return value;
|
||||
}
|
||||
throw std::runtime_error("Model must be one of " + models->tostring());
|
||||
throw std::runtime_error("Model must be one of " + models->toString());
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -259,7 +259,7 @@ int main(int argc, char** argv)
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
throw std::runtime_error("You must specify one of the following commands: dump, report, compute, export\n");
|
||||
throw std::runtime_error("You must specify one of the following commands: dump, report, compute\n");
|
||||
}
|
||||
}
|
||||
catch (const exception& err) {
|
@@ -1,8 +1,5 @@
|
||||
#include <sstream>
|
||||
#include "common/Paths.h"
|
||||
#include "DatasetsExcel.h"
|
||||
|
||||
|
||||
namespace platform {
|
||||
DatasetsExcel::DatasetsExcel()
|
||||
{
|
@@ -1,7 +1,5 @@
|
||||
#ifndef DATASETS_EXCEL_H
|
||||
#define DATASETS_EXCEL_H
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#pragma once
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include "reports/ExcelFile.h"
|
||||
|
||||
@@ -16,4 +14,3 @@ namespace platform {
|
||||
void report(json& data);
|
||||
};
|
||||
}
|
||||
#endif //DATASETS_EXCEL_H
|
57
src/list/ResultsDataset.cpp
Normal file
57
src/list/ResultsDataset.cpp
Normal file
@@ -0,0 +1,57 @@
|
||||
#include <algorithm>
|
||||
#include "common/Paths.h"
|
||||
#include "ResultsDataset.h"
|
||||
|
||||
namespace platform {
|
||||
ResultsDataset::ResultsDataset(const std::string& dataset, const std::string& model, const std::string& score) :
|
||||
path(Paths::results()), dataset(dataset), model(model), scoreName(score), maxModel(0), maxFile(0), maxHyper(15), maxResult(0)
|
||||
{
|
||||
}
|
||||
void ResultsDataset::load()
|
||||
{
|
||||
using std::filesystem::directory_iterator;
|
||||
for (const auto& file : directory_iterator(path)) {
|
||||
auto filename = file.path().filename().string();
|
||||
if (filename.find(".json") != std::string::npos && filename.find("results_") == 0) {
|
||||
auto result = Result();
|
||||
result.load(path, filename);
|
||||
if (model != "any" && result.getModel() != model)
|
||||
continue;
|
||||
auto data = result.getData()["results"];
|
||||
for (auto const& item : data) {
|
||||
if (item["dataset"] == dataset) {
|
||||
auto hyper_length = item["hyperparameters"].dump().size();
|
||||
if (hyper_length > maxHyper)
|
||||
maxHyper = hyper_length;
|
||||
if (item["score"].get<double>() > maxResult)
|
||||
maxResult = item["score"].get<double>();
|
||||
files.push_back(result);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
maxModel = std::max(size_t(5), (*max_element(files.begin(), files.end(), [](const Result& a, const Result& b) { return a.getModel().size() < b.getModel().size(); })).getModel().size());
|
||||
maxFile = std::max(size_t(4), (*max_element(files.begin(), files.end(), [](const Result& a, const Result& b) { return a.getFilename().size() < b.getFilename().size(); })).getFilename().size());
|
||||
}
|
||||
int ResultsDataset::size() const
|
||||
{
|
||||
return files.size();
|
||||
}
|
||||
void ResultsDataset::sortModel()
|
||||
{
|
||||
sort(files.begin(), files.end(), [](const Result& a, const Result& b) {
|
||||
if (a.getModel() == b.getModel()) {
|
||||
if (a.getDate() == b.getDate()) {
|
||||
return a.getTime() > b.getTime();
|
||||
}
|
||||
return a.getDate() > b.getDate();
|
||||
}
|
||||
return a.getModel() < b.getModel();
|
||||
});
|
||||
}
|
||||
bool ResultsDataset::empty() const
|
||||
{
|
||||
return files.empty();
|
||||
}
|
||||
}
|
34
src/list/ResultsDataset.h
Normal file
34
src/list/ResultsDataset.h
Normal file
@@ -0,0 +1,34 @@
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <nlohmann/json.hpp>
|
||||
#include "main/Result.h"
|
||||
namespace platform {
|
||||
using json = nlohmann::json;
|
||||
class ResultsDataset {
|
||||
public:
|
||||
ResultsDataset(const std::string& dataset, const std::string& model, const std::string& score);
|
||||
void load(); // Loads the list of results
|
||||
void sortModel();
|
||||
int maxModelSize() const { return maxModel; };
|
||||
int maxFileSize() const { return maxFile; };
|
||||
int maxHyperSize() const { return maxHyper; };
|
||||
double maxResultScore() const { return maxResult; };
|
||||
int size() const;
|
||||
bool empty() const;
|
||||
std::vector<Result>::iterator begin() { return files.begin(); };
|
||||
std::vector<Result>::iterator end() { return files.end(); };
|
||||
Result& at(int index) { return files.at(index); };
|
||||
private:
|
||||
std::string path;
|
||||
std::string dataset;
|
||||
std::string model;
|
||||
std::string scoreName;
|
||||
int maxModel;
|
||||
int maxFile;
|
||||
int maxHyper;
|
||||
double maxResult;
|
||||
std::vector<Result> files;
|
||||
};
|
||||
};
|
49
src/list/ResultsDatasetExcel.cpp
Normal file
49
src/list/ResultsDatasetExcel.cpp
Normal file
@@ -0,0 +1,49 @@
|
||||
#include "ResultsDatasetExcel.h"
|
||||
#include <iostream>
|
||||
namespace platform {
|
||||
ResultsDatasetExcel::ResultsDatasetExcel()
|
||||
{
|
||||
file_name = "some_results.xlsx";
|
||||
workbook = workbook_new(getFileName().c_str());
|
||||
createFormats();
|
||||
setProperties("Results");
|
||||
}
|
||||
ResultsDatasetExcel::~ResultsDatasetExcel()
|
||||
{
|
||||
workbook_close(workbook);
|
||||
}
|
||||
void ResultsDatasetExcel::report(json& data)
|
||||
{
|
||||
worksheet = workbook_add_worksheet(workbook, data["dataset"].get<std::string>().c_str());
|
||||
// Header
|
||||
std::string title = "Results of dataset " + data["dataset"].get<std::string>() + " - for " + data["model"].get<std::string>() + " model";
|
||||
worksheet_merge_range(worksheet, 0, 0, 0, 5, title.c_str(), styles["headerFirst"]);
|
||||
// Body header
|
||||
row = 2;
|
||||
int col = 0;
|
||||
for (const auto& name : { "Nº", "Model", "Date", "Time", "Score", "Hyperparameters" }) {
|
||||
writeString(row, col++, name, "bodyHeader");
|
||||
}
|
||||
// Body
|
||||
double maxResult = data["maxResult"].get<double>();
|
||||
for (const auto& item : data["results"]) {
|
||||
row++;
|
||||
col = 0;
|
||||
std::string style = item["score"] == data["maxResult"] ? "_bold" : "";
|
||||
writeInt(row, col++, row - 3, "ints" + style);
|
||||
writeString(row, col++, item["model"], "text" + style);
|
||||
writeString(row, col++, item["date"], "text" + style);
|
||||
writeString(row, col++, item["time"], "text" + style);
|
||||
writeDouble(row, col++, item["score"], "result" + style);
|
||||
writeString(row, col++, item["hyperparameters"].get<std::string>().c_str(), "text" + style);
|
||||
}
|
||||
// Format columns
|
||||
worksheet_freeze_panes(worksheet, 3, 2);
|
||||
auto modelSize = data["maxModel"].get<int>();
|
||||
auto hyperSize = data["maxHyper"].get<int>();
|
||||
std::vector<int> columns_sizes = { 5, modelSize + 3, 12, 9, 11, hyperSize + 10 };
|
||||
for (int i = 0; i < columns_sizes.size(); ++i) {
|
||||
worksheet_set_column(worksheet, i, i, columns_sizes.at(i), NULL);
|
||||
}
|
||||
}
|
||||
}
|
16
src/list/ResultsDatasetExcel.h
Normal file
16
src/list/ResultsDatasetExcel.h
Normal file
@@ -0,0 +1,16 @@
|
||||
#pragma once
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include "reports/ExcelFile.h"
|
||||
|
||||
using json = nlohmann::json;
|
||||
|
||||
namespace platform {
|
||||
|
||||
class ResultsDatasetExcel : public ExcelFile {
|
||||
public:
|
||||
ResultsDatasetExcel();
|
||||
~ResultsDatasetExcel();
|
||||
void report(json& data);
|
||||
};
|
||||
}
|
@@ -1,80 +0,0 @@
|
||||
#include <iostream>
|
||||
#include <locale>
|
||||
#include <argparse/argparse.hpp>
|
||||
#include <nlohmann/json.hpp>
|
||||
#include "common/Paths.h"
|
||||
#include "common/Colors.h"
|
||||
#include "common/Datasets.h"
|
||||
#include "DatasetsExcel.h"
|
||||
#include "config.h"
|
||||
|
||||
const int BALANCE_LENGTH = 75;
|
||||
|
||||
struct separated : numpunct<char> {
|
||||
char do_decimal_point() const { return ','; }
|
||||
char do_thousands_sep() const { return '.'; }
|
||||
std::string do_grouping() const { return "\03"; }
|
||||
};
|
||||
|
||||
std::string outputBalance(const std::string& balance)
|
||||
{
|
||||
auto temp = std::string(balance);
|
||||
while (temp.size() > BALANCE_LENGTH - 1) {
|
||||
auto part = temp.substr(0, BALANCE_LENGTH);
|
||||
std::cout << part << std::endl;
|
||||
std::cout << setw(52) << " ";
|
||||
temp = temp.substr(BALANCE_LENGTH);
|
||||
}
|
||||
return temp;
|
||||
}
|
||||
|
||||
int main(int argc, char** argv)
|
||||
{
|
||||
auto datasets = platform::Datasets(false, platform::Paths::datasets());
|
||||
argparse::ArgumentParser program("b_list", { platform_project_version.begin(), platform_project_version.end() });
|
||||
program.add_argument("--excel")
|
||||
.help("Output in Excel format")
|
||||
.default_value(false)
|
||||
.implicit_value(true);
|
||||
program.parse_args(argc, argv);
|
||||
auto excel = program.get<bool>("--excel");
|
||||
locale mylocale(std::cout.getloc(), new separated);
|
||||
locale::global(mylocale);
|
||||
std::cout.imbue(mylocale);
|
||||
std::cout << Colors::GREEN() << " # Dataset Sampl. Feat. Cls Balance" << std::endl;
|
||||
std::string balanceBars = std::string(BALANCE_LENGTH, '=');
|
||||
std::cout << "=== ============================== ====== ===== === " << balanceBars << std::endl;
|
||||
int num = 0;
|
||||
json data;
|
||||
for (const auto& dataset : datasets.getNames()) {
|
||||
auto color = num % 2 ? Colors::CYAN() : Colors::BLUE();
|
||||
std::cout << color << setw(3) << right << num++ << " ";
|
||||
std::cout << setw(30) << left << dataset << " ";
|
||||
datasets.loadDataset(dataset);
|
||||
auto nSamples = datasets.getNSamples(dataset);
|
||||
std::cout << setw(6) << right << nSamples << " ";
|
||||
std::cout << setw(5) << right << datasets.getFeatures(dataset).size() << " ";
|
||||
std::cout << setw(3) << right << datasets.getNClasses(dataset) << " ";
|
||||
std::stringstream oss;
|
||||
std::string sep = "";
|
||||
for (auto number : datasets.getClassesCounts(dataset)) {
|
||||
oss << sep << std::setprecision(2) << fixed << (float)number / nSamples * 100.0 << "% (" << number << ")";
|
||||
sep = " / ";
|
||||
}
|
||||
auto balance = outputBalance(oss.str());
|
||||
std::cout << balance << std::endl;
|
||||
// Store data for Excel report
|
||||
data[dataset] = json::object();
|
||||
data[dataset]["samples"] = nSamples;
|
||||
data[dataset]["features"] = datasets.getFeatures(dataset).size();
|
||||
data[dataset]["classes"] = datasets.getNClasses(dataset);
|
||||
data[dataset]["balance"] = oss.str();
|
||||
}
|
||||
std::cout << Colors::RESET() << std::endl;
|
||||
if (excel) {
|
||||
auto report = platform::DatasetsExcel();
|
||||
report.report(data);
|
||||
std::cout << "Output saved in " << report.getFileName() << std::endl;
|
||||
}
|
||||
return 0;
|
||||
}
|
211
src/list/b_list.cpp
Normal file
211
src/list/b_list.cpp
Normal file
@@ -0,0 +1,211 @@
|
||||
#include <iostream>
|
||||
#include <locale>
|
||||
#include <map>
|
||||
#include <argparse/argparse.hpp>
|
||||
#include <nlohmann/json.hpp>
|
||||
#include "main/Models.h"
|
||||
#include "main/modelRegister.h"
|
||||
#include "common/Paths.h"
|
||||
#include "common/Colors.h"
|
||||
#include "common/Datasets.h"
|
||||
#include "DatasetsExcel.h"
|
||||
#include "ResultsDataset.h"
|
||||
#include "ResultsDatasetExcel.h"
|
||||
#include "config.h"
|
||||
|
||||
const int BALANCE_LENGTH = 75;
|
||||
|
||||
struct separated : numpunct<char> {
|
||||
char do_decimal_point() const { return ','; }
|
||||
char do_thousands_sep() const { return '.'; }
|
||||
std::string do_grouping() const { return "\03"; }
|
||||
};
|
||||
|
||||
std::string outputBalance(const std::string& balance)
|
||||
{
|
||||
auto temp = std::string(balance);
|
||||
while (temp.size() > BALANCE_LENGTH - 1) {
|
||||
auto part = temp.substr(0, BALANCE_LENGTH);
|
||||
std::cout << part << std::endl;
|
||||
std::cout << setw(52) << " ";
|
||||
temp = temp.substr(BALANCE_LENGTH);
|
||||
}
|
||||
return temp;
|
||||
}
|
||||
|
||||
void list_datasets(argparse::ArgumentParser& program)
|
||||
{
|
||||
auto datasets = platform::Datasets(false, platform::Paths::datasets());
|
||||
auto excel = program.get<bool>("excel");
|
||||
locale mylocale(std::cout.getloc(), new separated);
|
||||
locale::global(mylocale);
|
||||
std::cout.imbue(mylocale);
|
||||
std::cout << Colors::GREEN() << " # Dataset Sampl. Feat. Cls Balance" << std::endl;
|
||||
std::string balanceBars = std::string(BALANCE_LENGTH, '=');
|
||||
std::cout << "=== ============================== ====== ===== === " << balanceBars << std::endl;
|
||||
int num = 0;
|
||||
json data;
|
||||
for (const auto& dataset : datasets.getNames()) {
|
||||
auto color = num % 2 ? Colors::CYAN() : Colors::BLUE();
|
||||
std::cout << color << setw(3) << right << num++ << " ";
|
||||
std::cout << setw(30) << left << dataset << " ";
|
||||
datasets.loadDataset(dataset);
|
||||
auto nSamples = datasets.getNSamples(dataset);
|
||||
std::cout << setw(6) << right << nSamples << " ";
|
||||
std::cout << setw(5) << right << datasets.getFeatures(dataset).size() << " ";
|
||||
std::cout << setw(3) << right << datasets.getNClasses(dataset) << " ";
|
||||
std::stringstream oss;
|
||||
std::string sep = "";
|
||||
for (auto number : datasets.getClassesCounts(dataset)) {
|
||||
oss << sep << std::setprecision(2) << fixed << (float)number / nSamples * 100.0 << "% (" << number << ")";
|
||||
sep = " / ";
|
||||
}
|
||||
auto balance = outputBalance(oss.str());
|
||||
std::cout << balance << std::endl;
|
||||
// Store data for Excel report
|
||||
data[dataset] = json::object();
|
||||
data[dataset]["samples"] = nSamples;
|
||||
data[dataset]["features"] = datasets.getFeatures(dataset).size();
|
||||
data[dataset]["classes"] = datasets.getNClasses(dataset);
|
||||
data[dataset]["balance"] = oss.str();
|
||||
}
|
||||
if (excel) {
|
||||
auto report = platform::DatasetsExcel();
|
||||
report.report(data);
|
||||
std::cout << std::endl << Colors::GREEN() << "Output saved in " << report.getFileName() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
void list_results(argparse::ArgumentParser& program)
|
||||
{
|
||||
auto dataset = program.get<string>("dataset");
|
||||
auto score = program.get<string>("score");
|
||||
auto model = program.get<string>("model");
|
||||
auto excel = program.get<bool>("excel");
|
||||
auto results = platform::ResultsDataset(dataset, model, score);
|
||||
results.load();
|
||||
results.sortModel();
|
||||
if (results.empty()) {
|
||||
std::cerr << Colors::RED() << "No results found for dataset " << dataset << " and model " << model << Colors::RESET() << std::endl;
|
||||
exit(1);
|
||||
}
|
||||
//
|
||||
// List data
|
||||
//
|
||||
int maxModel = results.maxModelSize();
|
||||
int maxHyper = results.maxHyperSize();
|
||||
double maxResult = results.maxResultScore();
|
||||
std::cout << Colors::GREEN() << "Results of dataset " << dataset << " - for " << model << " model" << std::endl;
|
||||
std::cout << "There are " << results.size() << " results" << std::endl;
|
||||
std::cout << Colors::GREEN() << " # " << std::setw(maxModel + 1) << std::left << "Model" << "Date Time Score Hyperparameters" << std::endl;
|
||||
std::cout << "=== " << std::string(maxModel, '=') << " ========== ======== =========== " << std::string(maxHyper, '=') << std::endl;
|
||||
auto i = 0;
|
||||
json data = json::object();
|
||||
data["results"] = json::array();
|
||||
for (const auto& result : results) {
|
||||
auto results = result.getData();
|
||||
for (const auto& item : results["results"]) {
|
||||
if (item["dataset"] == dataset) {
|
||||
auto color = (i % 2) ? Colors::BLUE() : Colors::CYAN();
|
||||
color = item["score"].get<double>() == maxResult ? Colors::RED() : color;
|
||||
std::cout << color << std::setw(3) << std::fixed << std::right << i++ << " ";
|
||||
std::cout << std::setw(maxModel) << std::left << result.getModel() << " ";
|
||||
std::cout << color << result.getDate() << " ";
|
||||
std::cout << color << result.getTime() << " ";
|
||||
std::cout << std::setw(11) << std::setprecision(9) << std::fixed << item["score"].get<double>() << " ";
|
||||
std::cout << item["hyperparameters"].dump() << std::endl;
|
||||
// Store data for Excel report
|
||||
json res = json::object();
|
||||
res["date"] = result.getDate();
|
||||
res["time"] = result.getTime();
|
||||
res["model"] = result.getModel();
|
||||
res["score"] = item["score"].get<double>();
|
||||
res["hyperparameters"] = item["hyperparameters"].dump();
|
||||
data["results"].push_back(res);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (excel) {
|
||||
data["dataset"] = dataset;
|
||||
data["score"] = score;
|
||||
data["model"] = model;
|
||||
data["maxModel"] = maxModel;
|
||||
data["maxHyper"] = maxHyper;
|
||||
data["maxResult"] = maxResult;
|
||||
auto report = platform::ResultsDatasetExcel();
|
||||
report.report(data);
|
||||
std::cout << std::endl << Colors::GREEN() << "Output saved in " << report.getFileName() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
int main(int argc, char** argv)
|
||||
{
|
||||
argparse::ArgumentParser program("b_list", { platform_project_version.begin(), platform_project_version.end() });
|
||||
//
|
||||
// datasets subparser
|
||||
//
|
||||
argparse::ArgumentParser datasets_command("datasets");
|
||||
datasets_command.add_description("List datasets available in the platform.");
|
||||
datasets_command.add_argument("--excel").help("Output in Excel format").default_value(false).implicit_value(true);
|
||||
//
|
||||
// results subparser
|
||||
//
|
||||
argparse::ArgumentParser results_command("results");
|
||||
results_command.add_description("List the results of a given dataset.");
|
||||
auto datasets = platform::Datasets(false, platform::Paths::datasets());
|
||||
results_command.add_argument("-d", "--dataset")
|
||||
.help("Dataset to use " + datasets.toString())
|
||||
.required()
|
||||
.action([](const std::string& value) {
|
||||
auto datasets = platform::Datasets(false, platform::Paths::datasets());
|
||||
static const std::vector<std::string> choices = datasets.getNames();
|
||||
if (find(choices.begin(), choices.end(), value) != choices.end()) {
|
||||
return value;
|
||||
}
|
||||
throw std::runtime_error("Dataset must be one of " + datasets.toString());
|
||||
}
|
||||
);
|
||||
results_command.add_argument("-m", "--model")
|
||||
.help("Model to use: " + platform::Models::instance()->toString() + " or any")
|
||||
.default_value("any")
|
||||
.action([](const std::string& value) {
|
||||
std::vector<std::string> valid(platform::Models::instance()->getNames());
|
||||
valid.push_back("any");
|
||||
static const std::vector<std::string> choices = valid;
|
||||
if (find(choices.begin(), choices.end(), value) != choices.end()) {
|
||||
return value;
|
||||
}
|
||||
throw std::runtime_error("Model must be one of " + platform::Models::instance()->toString() + " or any");
|
||||
}
|
||||
);
|
||||
results_command.add_argument("--excel").help("Output in Excel format").default_value(false).implicit_value(true);
|
||||
results_command.add_argument("-s", "--score").default_value("accuracy").help("Filter results of the score name supplied");
|
||||
|
||||
// Add subparsers
|
||||
program.add_subparser(datasets_command);
|
||||
program.add_subparser(results_command);
|
||||
// Parse command line and execute
|
||||
try {
|
||||
program.parse_args(argc, argv);
|
||||
bool found = false;
|
||||
map<std::string, void(*)(argparse::ArgumentParser&)> commands = { {"datasets", &list_datasets}, {"results", &list_results} };
|
||||
for (const auto& command : commands) {
|
||||
if (program.is_subcommand_used(command.first)) {
|
||||
std::invoke(command.second, program.at<argparse::ArgumentParser>(command.first));
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
throw std::runtime_error("You must specify one of the following commands: datasets, results\n");
|
||||
}
|
||||
}
|
||||
catch (const exception& err) {
|
||||
cerr << err.what() << std::endl;
|
||||
cerr << program;
|
||||
exit(1);
|
||||
}
|
||||
std::cout << Colors::RESET() << std::endl;
|
||||
return 0;
|
||||
}
|
@@ -1,5 +1,5 @@
|
||||
#ifndef EXPERIMENT_H
|
||||
#define EXPERIMENT_H
|
||||
#pragma once
|
||||
|
||||
#include <torch/torch.h>
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <string>
|
||||
@@ -42,5 +42,4 @@ namespace platform {
|
||||
int nfolds{ 0 };
|
||||
int max_name{ 7 }; // max length of dataset name for formatting (default 7)
|
||||
};
|
||||
}
|
||||
#endif
|
||||
}
|
@@ -1,5 +1,5 @@
|
||||
#ifndef HYPERPARAMETERS_H
|
||||
#define HYPERPARAMETERS_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
@@ -20,4 +20,3 @@ namespace platform {
|
||||
std::map<std::string, json> hyperparameters;
|
||||
};
|
||||
} /* namespace platform */
|
||||
#endif /* HYPERPARAMETERS_H */
|
@@ -36,13 +36,15 @@ namespace platform {
|
||||
[](const pair<std::string, function<bayesnet::BaseClassifier* (void)>>& pair) { return pair.first; });
|
||||
return names;
|
||||
}
|
||||
std::string Models::tostring()
|
||||
std::string Models::toString()
|
||||
{
|
||||
std::string result = "";
|
||||
std::string sep = "";
|
||||
for (const auto& pair : functionRegistry) {
|
||||
result += pair.first + ", ";
|
||||
result += sep + pair.first;
|
||||
sep = ", ";
|
||||
}
|
||||
return "{" + result.substr(0, result.size() - 2) + "}";
|
||||
return "{" + result + "}";
|
||||
}
|
||||
Registrar::Registrar(const std::string& name, function<bayesnet::BaseClassifier* (void)> classFactoryFunction)
|
||||
{
|
@@ -1,5 +1,5 @@
|
||||
#ifndef MODELS_H
|
||||
#define MODELS_H
|
||||
#pragma once
|
||||
|
||||
#include <map>
|
||||
#include <bayesnet/BaseClassifier.h>
|
||||
#include <bayesnet/ensembles/AODE.h>
|
||||
@@ -31,7 +31,7 @@ namespace platform {
|
||||
void registerFactoryFunction(const std::string& name,
|
||||
function<bayesnet::BaseClassifier* (void)> classFactoryFunction);
|
||||
std::vector<string> getNames();
|
||||
std::string tostring();
|
||||
std::string toString();
|
||||
|
||||
};
|
||||
class Registrar {
|
||||
@@ -39,4 +39,3 @@ namespace platform {
|
||||
Registrar(const std::string& className, function<bayesnet::BaseClassifier* (void)> classFactoryFunction);
|
||||
};
|
||||
}
|
||||
#endif
|
@@ -1,5 +1,5 @@
|
||||
#ifndef RESULT_H
|
||||
#define RESULT_H
|
||||
#pragma once
|
||||
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
@@ -21,12 +21,14 @@ namespace platform {
|
||||
std::string to_string(int maxModel) const;
|
||||
std::string getFilename() const;
|
||||
std::string getDate() const { return data["date"].get<std::string>(); };
|
||||
std::string getTime() const { return data["time"].get<std::string>(); };
|
||||
double getScore() const { return score; };
|
||||
std::string getTitle() const { return data["title"].get<std::string>(); };
|
||||
double getDuration() const { return data["duration"]; };
|
||||
std::string getModel() const { return data["model"].get<std::string>(); };
|
||||
std::string getScoreName() const { return data["score_name"].get<std::string>(); };
|
||||
bool isComplete() const { return complete; };
|
||||
json getData() const { return data; }
|
||||
// Setters
|
||||
void setTitle(const std::string& title) { data["title"] = title; };
|
||||
void setLanguage(const std::string& language) { data["language"] = language; };
|
||||
@@ -48,4 +50,3 @@ namespace platform {
|
||||
double score = 0.0;
|
||||
};
|
||||
};
|
||||
#endif
|
@@ -15,18 +15,29 @@ using json = nlohmann::json;
|
||||
void manageArguments(argparse::ArgumentParser& program)
|
||||
{
|
||||
auto env = platform::DotEnv();
|
||||
program.add_argument("-d", "--dataset").default_value("").help("Dataset file name");
|
||||
auto datasets = platform::Datasets(false, platform::Paths::datasets());
|
||||
program.add_argument("-d", "--dataset")
|
||||
.help("Dataset file name: " + datasets.toString())
|
||||
.action([](const std::string& value) {
|
||||
auto datasets = platform::Datasets(false, platform::Paths::datasets());
|
||||
static const std::vector<std::string> choices_datasets(datasets.getNames());
|
||||
if (find(choices_datasets.begin(), choices_datasets.end(), value) != choices_datasets.end()) {
|
||||
return value;
|
||||
}
|
||||
throw std::runtime_error("Dataset must be one of: " + datasets.toString());
|
||||
}
|
||||
);
|
||||
program.add_argument("--hyperparameters").default_value("{}").help("Hyperparameters passed to the model in Experiment");
|
||||
program.add_argument("--hyper-file").default_value("").help("Hyperparameters file name." \
|
||||
"Mutually exclusive with hyperparameters. This file should contain hyperparameters for each dataset in json format.");
|
||||
program.add_argument("-m", "--model")
|
||||
.help("Model to use " + platform::Models::instance()->tostring())
|
||||
.help("Model to use: " + platform::Models::instance()->toString())
|
||||
.action([](const std::string& value) {
|
||||
static const std::vector<std::string> choices = platform::Models::instance()->getNames();
|
||||
if (find(choices.begin(), choices.end(), value) != choices.end()) {
|
||||
return value;
|
||||
}
|
||||
throw std::runtime_error("Model must be one of " + platform::Models::instance()->tostring());
|
||||
throw std::runtime_error("Model must be one of " + platform::Models::instance()->toString());
|
||||
}
|
||||
);
|
||||
program.add_argument("--title").default_value("").help("Experiment title");
|
@@ -1,5 +1,5 @@
|
||||
#ifndef MODEL_REGISTER_H
|
||||
#define MODEL_REGISTER_H
|
||||
#pragma once
|
||||
|
||||
static platform::Registrar registrarT("TAN",
|
||||
[](void) -> bayesnet::BaseClassifier* { return new bayesnet::TAN();});
|
||||
static platform::Registrar registrarTLD("TANLd",
|
||||
@@ -27,5 +27,4 @@ static platform::Registrar registrarSvc("SVC",
|
||||
static platform::Registrar registrarRaF("RandomForest",
|
||||
[](void) -> bayesnet::BaseClassifier* { return new pywrap::RandomForest();});
|
||||
static platform::Registrar registrarXGB("XGBoost",
|
||||
[](void) -> bayesnet::BaseClassifier* { return new pywrap::XGBoost();});
|
||||
#endif
|
||||
[](void) -> bayesnet::BaseClassifier* { return new pywrap::XGBoost();});
|
@@ -1,5 +1,5 @@
|
||||
#ifndef COMMAND_PARSER_H
|
||||
#define COMMAND_PARSER_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <tuple>
|
||||
@@ -17,4 +17,3 @@ namespace platform {
|
||||
int index;
|
||||
};
|
||||
} /* namespace platform */
|
||||
#endif /* COMMAND_PARSER_H */
|
@@ -12,8 +12,10 @@
|
||||
namespace platform {
|
||||
|
||||
ManageResults::ManageResults(int numFiles, const std::string& model, const std::string& score, bool complete, bool partial, bool compare) :
|
||||
numFiles{ numFiles }, complete{ complete }, partial{ partial }, compare{ compare }, results(Results(Paths::results(), model, score, complete, partial))
|
||||
numFiles{ numFiles }, complete{ complete }, partial{ partial }, compare{ compare }, results(ResultsManager(model, score, complete, partial))
|
||||
{
|
||||
results.load();
|
||||
results.sortDate();
|
||||
indexList = true;
|
||||
openExcel = false;
|
||||
workbook = NULL;
|
@@ -1,7 +1,7 @@
|
||||
#ifndef MANAGE_RESULTS_H
|
||||
#define MANAGE_RESULTS_H
|
||||
#pragma once
|
||||
|
||||
#include <xlsxwriter.h>
|
||||
#include "Results.h"
|
||||
#include "ResultsManager.h"
|
||||
|
||||
namespace platform {
|
||||
class ManageResults {
|
||||
@@ -23,9 +23,7 @@ namespace platform {
|
||||
bool complete;
|
||||
bool partial;
|
||||
bool compare;
|
||||
Results results;
|
||||
ResultsManager results;
|
||||
lxw_workbook* workbook;
|
||||
};
|
||||
}
|
||||
|
||||
#endif /* MANAGE_RESULTS_H */
|
@@ -1,18 +1,13 @@
|
||||
#include <algorithm>
|
||||
#include "Results.h"
|
||||
#include "common/Paths.h"
|
||||
#include "ResultsManager.h"
|
||||
|
||||
namespace platform {
|
||||
Results::Results(const std::string& path, const std::string& model, const std::string& score, bool complete, bool partial) :
|
||||
path(path), model(model), scoreName(score), complete(complete), partial(partial)
|
||||
ResultsManager::ResultsManager(const std::string& model, const std::string& score, bool complete, bool partial) :
|
||||
path(Paths::results()), model(model), scoreName(score), complete(complete), partial(partial), maxModel(0)
|
||||
{
|
||||
load();
|
||||
if (!files.empty()) {
|
||||
maxModel = (*max_element(files.begin(), files.end(), [](const Result& a, const Result& b) { return a.getModel().size() < b.getModel().size(); })).getModel().size();
|
||||
} else {
|
||||
maxModel = 0;
|
||||
}
|
||||
}
|
||||
void Results::load()
|
||||
void ResultsManager::load()
|
||||
{
|
||||
using std::filesystem::directory_iterator;
|
||||
for (const auto& file : directory_iterator(path)) {
|
||||
@@ -27,48 +22,58 @@ namespace platform {
|
||||
files.push_back(result);
|
||||
}
|
||||
}
|
||||
maxModel = std::max(size_t(5), (*max_element(files.begin(), files.end(), [](const Result& a, const Result& b) { return a.getModel().size() < b.getModel().size(); })).getModel().size());
|
||||
}
|
||||
void Results::hideResult(int index, const std::string& pathHidden)
|
||||
void ResultsManager::hideResult(int index, const std::string& pathHidden)
|
||||
{
|
||||
auto filename = files.at(index).getFilename();
|
||||
rename((path + "/" + filename).c_str(), (pathHidden + "/" + filename).c_str());
|
||||
files.erase(files.begin() + index);
|
||||
}
|
||||
void Results::deleteResult(int index)
|
||||
void ResultsManager::deleteResult(int index)
|
||||
{
|
||||
auto filename = files.at(index).getFilename();
|
||||
remove((path + "/" + filename).c_str());
|
||||
files.erase(files.begin() + index);
|
||||
}
|
||||
int Results::size() const
|
||||
int ResultsManager::size() const
|
||||
{
|
||||
return files.size();
|
||||
}
|
||||
void Results::sortDate()
|
||||
void ResultsManager::sortDate()
|
||||
{
|
||||
sort(files.begin(), files.end(), [](const Result& a, const Result& b) {
|
||||
if (a.getDate() == b.getDate()) {
|
||||
return a.getModel() < b.getModel();
|
||||
}
|
||||
return a.getDate() > b.getDate();
|
||||
});
|
||||
}
|
||||
void Results::sortModel()
|
||||
void ResultsManager::sortModel()
|
||||
{
|
||||
sort(files.begin(), files.end(), [](const Result& a, const Result& b) {
|
||||
if (a.getModel() == b.getModel()) {
|
||||
return a.getDate() > b.getDate();
|
||||
}
|
||||
return a.getModel() > b.getModel();
|
||||
});
|
||||
}
|
||||
void Results::sortDuration()
|
||||
void ResultsManager::sortDuration()
|
||||
{
|
||||
sort(files.begin(), files.end(), [](const Result& a, const Result& b) {
|
||||
return a.getDuration() > b.getDuration();
|
||||
});
|
||||
}
|
||||
void Results::sortScore()
|
||||
void ResultsManager::sortScore()
|
||||
{
|
||||
sort(files.begin(), files.end(), [](const Result& a, const Result& b) {
|
||||
if (a.getScore() == b.getScore()) {
|
||||
return a.getDate() > b.getDate();
|
||||
}
|
||||
return a.getScore() > b.getScore();
|
||||
});
|
||||
}
|
||||
bool Results::empty() const
|
||||
bool ResultsManager::empty() const
|
||||
{
|
||||
return files.empty();
|
||||
}
|
@@ -1,15 +1,15 @@
|
||||
#ifndef RESULTS_H
|
||||
#define RESULTS_H
|
||||
#include <map>
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include <nlohmann/json.hpp>
|
||||
#include "main/Result.h"
|
||||
namespace platform {
|
||||
using json = nlohmann::json;
|
||||
class Results {
|
||||
class ResultsManager {
|
||||
public:
|
||||
Results(const std::string& path, const std::string& model, const std::string& score, bool complete, bool partial);
|
||||
ResultsManager(const std::string& model, const std::string& score, bool complete, bool partial);
|
||||
void load(); // Loads the list of results
|
||||
void sortDate();
|
||||
void sortScore();
|
||||
void sortModel();
|
||||
@@ -30,7 +30,5 @@ namespace platform {
|
||||
bool partial;
|
||||
int maxModel;
|
||||
std::vector<Result> files;
|
||||
void load(); // Loads the list of results
|
||||
};
|
||||
};
|
||||
#endif
|
||||
};
|
@@ -84,6 +84,7 @@ namespace platform {
|
||||
void ExcelFile::createStyle(const std::string& name, lxw_format* style, bool odd)
|
||||
{
|
||||
addColor(style, odd);
|
||||
auto color_bold = 0xFF0000;
|
||||
if (name == "textCentered") {
|
||||
format_set_align(style, LXW_ALIGN_CENTER);
|
||||
format_set_font_size(style, normalSize);
|
||||
@@ -94,6 +95,13 @@ namespace platform {
|
||||
format_set_border(style, LXW_BORDER_THIN);
|
||||
format_set_align(style, LXW_ALIGN_VERTICAL_CENTER);
|
||||
format_set_text_wrap(style);
|
||||
} else if (name == "text_bold") {
|
||||
format_set_font_size(style, normalSize);
|
||||
format_set_border(style, LXW_BORDER_THIN);
|
||||
format_set_align(style, LXW_ALIGN_VERTICAL_CENTER);
|
||||
format_set_font_color(style, lxw_color_t(color_bold));
|
||||
format_set_bold(style);
|
||||
format_set_text_wrap(style);
|
||||
} else if (name == "bodyHeader") {
|
||||
format_set_bold(style);
|
||||
format_set_font_size(style, normalSize);
|
||||
@@ -106,6 +114,13 @@ namespace platform {
|
||||
format_set_align(style, LXW_ALIGN_VERTICAL_CENTER);
|
||||
format_set_border(style, LXW_BORDER_THIN);
|
||||
format_set_num_format(style, "0.0000000");
|
||||
} else if (name == "result_bold") {
|
||||
format_set_font_size(style, normalSize);
|
||||
format_set_align(style, LXW_ALIGN_VERTICAL_CENTER);
|
||||
format_set_border(style, LXW_BORDER_THIN);
|
||||
format_set_bold(style);
|
||||
format_set_font_color(style, lxw_color_t(color_bold));
|
||||
format_set_num_format(style, "0.0000000");
|
||||
} else if (name == "time") {
|
||||
format_set_font_size(style, normalSize);
|
||||
format_set_border(style, LXW_BORDER_THIN);
|
||||
@@ -116,6 +131,13 @@ namespace platform {
|
||||
format_set_num_format(style, "###,##0");
|
||||
format_set_align(style, LXW_ALIGN_VERTICAL_CENTER);
|
||||
format_set_border(style, LXW_BORDER_THIN);
|
||||
} else if (name == "ints_bold") {
|
||||
format_set_font_size(style, normalSize);
|
||||
format_set_num_format(style, "###,##0");
|
||||
format_set_align(style, LXW_ALIGN_VERTICAL_CENTER);
|
||||
format_set_bold(style);
|
||||
format_set_font_color(style, lxw_color_t(color_bold));
|
||||
format_set_border(style, LXW_BORDER_THIN);
|
||||
} else if (name == "floats") {
|
||||
format_set_border(style, LXW_BORDER_THIN);
|
||||
format_set_align(style, LXW_ALIGN_VERTICAL_CENTER);
|
||||
@@ -131,7 +153,7 @@ namespace platform {
|
||||
|
||||
void ExcelFile::createFormats()
|
||||
{
|
||||
auto styleNames = { "text", "textCentered", "bodyHeader", "result", "time", "ints", "floats", "percentage" };
|
||||
auto styleNames = { "text", "text_bold", "textCentered", "bodyHeader", "result", "result_bold", "time", "ints", "ints_bold", "floats", "percentage" };
|
||||
lxw_format* style;
|
||||
for (std::string name : styleNames) {
|
||||
lxw_format* style = workbook_add_format(workbook);
|
@@ -1,5 +1,5 @@
|
||||
#ifndef EXCELFILE_H
|
||||
#define EXCELFILE_H
|
||||
#pragma once
|
||||
|
||||
#include <locale>
|
||||
#include <string>
|
||||
#include <map>
|
||||
@@ -42,4 +42,3 @@ namespace platform {
|
||||
void setDefault();
|
||||
};
|
||||
}
|
||||
#endif // !EXCELFILE_H
|
@@ -1,5 +1,5 @@
|
||||
#ifndef REPORTBASE_H
|
||||
#define REPORTBASE_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <iostream>
|
||||
#include <nlohmann/json.hpp>
|
||||
@@ -33,4 +33,3 @@ namespace platform {
|
||||
bool existBestFile = true;
|
||||
};
|
||||
};
|
||||
#endif
|
@@ -1,5 +1,5 @@
|
||||
#ifndef REPORTCONSOLE_H
|
||||
#define REPORTCONSOLE_H
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include "common/Colors.h"
|
||||
#include "ReportBase.h"
|
||||
@@ -19,4 +19,3 @@ namespace platform {
|
||||
void showSummary() override;
|
||||
};
|
||||
};
|
||||
#endif
|
@@ -1,5 +1,5 @@
|
||||
#ifndef REPORTEXCEL_H
|
||||
#define REPORTEXCEL_H
|
||||
#pragma once
|
||||
|
||||
#include <map>
|
||||
#include <xlsxwriter.h>
|
||||
#include "common/Colors.h"
|
||||
@@ -22,4 +22,3 @@ namespace platform {
|
||||
void header_notes(int row);
|
||||
};
|
||||
};
|
||||
#endif // !REPORTEXCEL_H
|
@@ -8,7 +8,7 @@ if(ENABLE_TESTING)
|
||||
${CMAKE_BINARY_DIR}/configured_files/include
|
||||
/usr/local/include
|
||||
)
|
||||
set(TEST_SOURCES_PLATFORM TestUtils.cc TestPlatform.cc)
|
||||
set(TEST_SOURCES_PLATFORM TestUtils.cpp TestPlatform.cpp)
|
||||
add_executable(${TEST_PLATFORM} ${TEST_SOURCES_PLATFORM})
|
||||
target_link_libraries(${TEST_PLATFORM} PUBLIC "${TORCH_LIBRARIES}" ArffFiles mdlp Catch2::Catch2WithMain)
|
||||
add_test(NAME ${TEST_PLATFORM} COMMAND ${TEST_PLATFORM})
|
||||
|
@@ -1,5 +1,5 @@
|
||||
#ifndef TEST_UTILS_H
|
||||
#define TEST_UTILS_H
|
||||
#pragma once
|
||||
|
||||
#include <torch/torch.h>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
@@ -40,4 +40,3 @@ public:
|
||||
double epsilon = 1e-5;
|
||||
};
|
||||
|
||||
#endif //TEST_UTILS_H
|
Reference in New Issue
Block a user