Rename BestResult to BestScore
This commit is contained in:
parent
5fa0b957dd
commit
337b6f7e79
@ -1,7 +1,7 @@
|
|||||||
#ifndef BESTRESULT_H
|
#ifndef BESTSCORE_H
|
||||||
#define BESTRESULT_H
|
#define BESTSCORE_H
|
||||||
#include <string>
|
#include <string>
|
||||||
class BestResult {
|
class BestScore {
|
||||||
public:
|
public:
|
||||||
static std::string title() { return "STree_default (linear-ovo)"; }
|
static std::string title() { return "STree_default (linear-ovo)"; }
|
||||||
static double score() { return 22.109799; }
|
static double score() { return 22.109799; }
|
@ -8,6 +8,7 @@ include_directories(${BayesNet_SOURCE_DIR}/lib/libxlsxwriter/include)
|
|||||||
add_executable(main main.cc Folding.cc platformUtils.cc Experiment.cc Datasets.cc Models.cc ReportConsole.cc ReportBase.cc)
|
add_executable(main main.cc Folding.cc platformUtils.cc Experiment.cc Datasets.cc Models.cc ReportConsole.cc ReportBase.cc)
|
||||||
add_executable(manage manage.cc Results.cc ReportConsole.cc ReportExcel.cc ReportBase.cc Datasets.cc platformUtils.cc)
|
add_executable(manage manage.cc Results.cc ReportConsole.cc ReportExcel.cc ReportBase.cc Datasets.cc platformUtils.cc)
|
||||||
add_executable(list list.cc platformUtils Datasets.cc)
|
add_executable(list list.cc platformUtils Datasets.cc)
|
||||||
|
add_executable(best list.cc platformUtils Datasets.cc)
|
||||||
target_link_libraries(main BayesNet ArffFiles mdlp "${TORCH_LIBRARIES}")
|
target_link_libraries(main BayesNet ArffFiles mdlp "${TORCH_LIBRARIES}")
|
||||||
if (${CMAKE_HOST_SYSTEM_NAME} MATCHES "Linux")
|
if (${CMAKE_HOST_SYSTEM_NAME} MATCHES "Linux")
|
||||||
target_link_libraries(manage "${TORCH_LIBRARIES}" libxlsxwriter.so ArffFiles mdlp stdc++fs)
|
target_link_libraries(manage "${TORCH_LIBRARIES}" libxlsxwriter.so ArffFiles mdlp stdc++fs)
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
#include <locale>
|
#include <locale>
|
||||||
#include "Datasets.h"
|
#include "Datasets.h"
|
||||||
#include "ReportBase.h"
|
#include "ReportBase.h"
|
||||||
#include "BestResult.h"
|
#include "BestScore.h"
|
||||||
|
|
||||||
|
|
||||||
namespace platform {
|
namespace platform {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <locale>
|
#include <locale>
|
||||||
#include "ReportConsole.h"
|
#include "ReportConsole.h"
|
||||||
#include "BestResult.h"
|
#include "BestScore.h"
|
||||||
|
|
||||||
|
|
||||||
namespace platform {
|
namespace platform {
|
||||||
@ -99,9 +99,9 @@ namespace platform {
|
|||||||
cout << Colors::MAGENTA() << string(MAXL, '*') << endl;
|
cout << Colors::MAGENTA() << string(MAXL, '*') << endl;
|
||||||
showSummary();
|
showSummary();
|
||||||
auto score = data["score_name"].get<string>();
|
auto score = data["score_name"].get<string>();
|
||||||
if (score == BestResult::scoreName()) {
|
if (score == BestScore::scoreName()) {
|
||||||
stringstream oss;
|
stringstream oss;
|
||||||
oss << score << " compared to " << BestResult::title() << " .: " << totalScore / BestResult::score();
|
oss << score << " compared to " << BestScore::title() << " .: " << totalScore / BestScore::score();
|
||||||
cout << headerLine(oss.str());
|
cout << headerLine(oss.str());
|
||||||
}
|
}
|
||||||
if (!getExistBestFile() && compare) {
|
if (!getExistBestFile() && compare) {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <locale>
|
#include <locale>
|
||||||
#include "ReportExcel.h"
|
#include "ReportExcel.h"
|
||||||
#include "BestResult.h"
|
#include "BestScore.h"
|
||||||
|
|
||||||
|
|
||||||
namespace platform {
|
namespace platform {
|
||||||
@ -322,9 +322,9 @@ namespace platform {
|
|||||||
showSummary();
|
showSummary();
|
||||||
row += 4 + summary.size();
|
row += 4 + summary.size();
|
||||||
auto score = data["score_name"].get<string>();
|
auto score = data["score_name"].get<string>();
|
||||||
if (score == BestResult::scoreName()) {
|
if (score == BestScore::scoreName()) {
|
||||||
worksheet_merge_range(worksheet, row, 1, row, 5, (score + " compared to " + BestResult::title() + " .:").c_str(), efectiveStyle("text"));
|
worksheet_merge_range(worksheet, row, 1, row, 5, (score + " compared to " + BestScore::title() + " .:").c_str(), efectiveStyle("text"));
|
||||||
writeDouble(row, 6, totalScore / BestResult::score(), "result");
|
writeDouble(row, 6, totalScore / BestScore::score(), "result");
|
||||||
}
|
}
|
||||||
if (!getExistBestFile() && compare) {
|
if (!getExistBestFile() && compare) {
|
||||||
worksheet_write_string(worksheet, row + 1, 0, "*** Best Results File not found. Couldn't compare any result!", styles["summaryStyle"]);
|
worksheet_write_string(worksheet, row + 1, 0, "*** Best Results File not found. Couldn't compare any result!", styles["summaryStyle"]);
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
#include "Results.h"
|
#include "Results.h"
|
||||||
#include "ReportConsole.h"
|
#include "ReportConsole.h"
|
||||||
#include "ReportExcel.h"
|
#include "ReportExcel.h"
|
||||||
#include "BestResult.h"
|
#include "BestScore.h"
|
||||||
#include "Colors.h"
|
#include "Colors.h"
|
||||||
namespace platform {
|
namespace platform {
|
||||||
Result::Result(const string& path, const string& filename)
|
Result::Result(const string& path, const string& filename)
|
||||||
@ -17,8 +17,8 @@ namespace platform {
|
|||||||
score += result["score"].get<double>();
|
score += result["score"].get<double>();
|
||||||
}
|
}
|
||||||
scoreName = data["score_name"];
|
scoreName = data["score_name"];
|
||||||
if (scoreName == BestResult::scoreName()) {
|
if (scoreName == BestScore::scoreName()) {
|
||||||
score /= BestResult::score();
|
score /= BestScore::score();
|
||||||
}
|
}
|
||||||
title = data["title"];
|
title = data["title"];
|
||||||
duration = data["duration"];
|
duration = data["duration"];
|
||||||
|
52
src/Platform/best.cc
Normal file
52
src/Platform/best.cc
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
#include <iostream>
|
||||||
|
#include <argparse/argparse.hpp>
|
||||||
|
#include "platformUtils.h"
|
||||||
|
#include "Paths.h"
|
||||||
|
#include "Results.h"
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
|
||||||
|
argparse::ArgumentParser manageArguments(int argc, char** argv)
|
||||||
|
{
|
||||||
|
argparse::ArgumentParser program("best");
|
||||||
|
program.add_argument("-n", "--number").default_value(0).help("Number of results to show (0 = all)").scan<'i', int>();
|
||||||
|
program.add_argument("-m", "--model").default_value("any").help("Filter results of the selected model)");
|
||||||
|
program.add_argument("-s", "--score").default_value("any").help("Filter results of the score name supplied");
|
||||||
|
program.add_argument("--complete").help("Show only results with all datasets").default_value(false).implicit_value(true);
|
||||||
|
program.add_argument("--partial").help("Show only partial results").default_value(false).implicit_value(true);
|
||||||
|
program.add_argument("--compare").help("Compare with best results").default_value(false).implicit_value(true);
|
||||||
|
try {
|
||||||
|
program.parse_args(argc, argv);
|
||||||
|
auto number = program.get<int>("number");
|
||||||
|
if (number < 0) {
|
||||||
|
throw runtime_error("Number of results must be greater than or equal to 0");
|
||||||
|
}
|
||||||
|
auto model = program.get<string>("model");
|
||||||
|
auto score = program.get<string>("score");
|
||||||
|
auto complete = program.get<bool>("complete");
|
||||||
|
auto partial = program.get<bool>("partial");
|
||||||
|
auto compare = program.get<bool>("compare");
|
||||||
|
}
|
||||||
|
catch (const exception& err) {
|
||||||
|
cerr << err.what() << endl;
|
||||||
|
cerr << program;
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
return program;
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char** argv)
|
||||||
|
{
|
||||||
|
auto program = manageArguments(argc, argv);
|
||||||
|
auto number = program.get<int>("number");
|
||||||
|
auto model = program.get<string>("model");
|
||||||
|
auto score = program.get<string>("score");
|
||||||
|
auto complete = program.get<bool>("complete");
|
||||||
|
auto partial = program.get<bool>("partial");
|
||||||
|
auto compare = program.get<bool>("compare");
|
||||||
|
if (complete)
|
||||||
|
partial = false;
|
||||||
|
auto results = platform::Results(platform::Paths::results(), number, model, score, complete, partial, compare);
|
||||||
|
results.manage();
|
||||||
|
return 0;
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user