mirror of
https://github.com/Doctorado-ML/benchmark.git
synced 2025-08-15 23:45:54 +00:00
Begin Summary
This commit is contained in:
@@ -10,23 +10,22 @@ class Models:
|
||||
def get_model(name):
|
||||
if name == "STree":
|
||||
return Stree
|
||||
elif name == "Cart":
|
||||
if name == "Cart":
|
||||
return DecisionTreeClassifier
|
||||
elif name == "ExtraTree":
|
||||
if name == "ExtraTree":
|
||||
return ExtraTreeClassifier
|
||||
elif name == "Wodt":
|
||||
if name == "Wodt":
|
||||
return TreeClassifier
|
||||
elif name == "SVC":
|
||||
if name == "SVC":
|
||||
return SVC
|
||||
elif name == "ODTE":
|
||||
if name == "ODTE":
|
||||
return Odte
|
||||
else:
|
||||
msg = f"No model recognized {name}"
|
||||
if name == "Stree" or name == "stree":
|
||||
msg += ", did you mean STree?"
|
||||
elif name == "odte" or name == "Odte":
|
||||
msg += ", did you mean ODTE?"
|
||||
raise ValueError(msg)
|
||||
msg = f"No model recognized {name}"
|
||||
if name in ("Stree", "stree"):
|
||||
msg += ", did you mean STree?"
|
||||
elif name in ("odte", "Odte"):
|
||||
msg += ", did you mean ODTE?"
|
||||
raise ValueError(msg)
|
||||
|
||||
@staticmethod
|
||||
def get_complexity(name, result):
|
||||
|
@@ -34,7 +34,7 @@ class BaseReport(abc.ABC):
|
||||
best = BestResults(score, model, Datasets())
|
||||
self.best_results = best.load({})
|
||||
|
||||
def _compute_status(self, dataset, accuracy):
|
||||
def _compute_status(self, dataset, accuracy: float):
|
||||
best = self.best_results[dataset][0]
|
||||
status = " "
|
||||
if accuracy == best:
|
||||
@@ -57,15 +57,15 @@ class BaseReport(abc.ABC):
|
||||
return meaning[status]
|
||||
|
||||
@abc.abstractmethod
|
||||
def header(self):
|
||||
def header(self) -> None:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def print_line(self, result):
|
||||
def print_line(self, result) -> None:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def footer(self, accuracy):
|
||||
def footer(self, accuracy: float) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@@ -84,18 +84,18 @@ class Report(BaseReport):
|
||||
"Hyperparameters",
|
||||
]
|
||||
|
||||
def __init__(self, file_name, compare=False):
|
||||
def __init__(self, file_name: str, compare: bool = False):
|
||||
super().__init__(file_name)
|
||||
self.compare = compare
|
||||
|
||||
def header_line(self, text):
|
||||
def header_line(self, text: str) -> None:
|
||||
length = sum(self.header_lengths) + len(self.header_lengths) - 3
|
||||
if text == "*":
|
||||
print("*" * (length + 2))
|
||||
else:
|
||||
print(f"*{text:{length}s}*")
|
||||
|
||||
def print_line(self, result):
|
||||
def print_line(self, result) -> None:
|
||||
hl = self.header_lengths
|
||||
i = 0
|
||||
print(f"{result['dataset']:{hl[i]}s} ", end="")
|
||||
@@ -128,7 +128,7 @@ class Report(BaseReport):
|
||||
i += 1
|
||||
print(f"{str(result['hyperparameters']):{hl[i]}s} ")
|
||||
|
||||
def header(self):
|
||||
def header(self) -> None:
|
||||
if self.compare:
|
||||
self._load_best_results(
|
||||
self.data["score_name"], self.data["model"]
|
||||
@@ -156,7 +156,7 @@ class Report(BaseReport):
|
||||
line_col += "=" * underscore + " "
|
||||
print(f"\n{line_col}")
|
||||
|
||||
def footer(self, accuracy):
|
||||
def footer(self, accuracy: float) -> None:
|
||||
self.header_line("*")
|
||||
if self.compare:
|
||||
for key, value in self._compare_totals.items():
|
||||
|
35
src/Summary.py
Normal file
35
src/Summary.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import os
|
||||
from Utils import Folders, Files
|
||||
from Results import BaseReport
|
||||
|
||||
|
||||
class StubReport(BaseReport):
|
||||
def __init__(self, file_name):
|
||||
super().__init__(file_name=file_name, best_file=False)
|
||||
|
||||
def print_line(self, line) -> None:
|
||||
pass
|
||||
|
||||
def header(self) -> None:
|
||||
pass
|
||||
|
||||
def footer(self, accuracy: float) -> None:
|
||||
self.accuracy = accuracy
|
||||
|
||||
|
||||
class Summary:
|
||||
def __init__(self) -> None:
|
||||
self.results = Files().get_all_results()
|
||||
|
||||
def list(self) -> None:
|
||||
"""List all results"""
|
||||
max_length = max([len(x) for x in self.results])
|
||||
for result in self.results:
|
||||
report = StubReport(os.path.join(Folders.results, result))
|
||||
report.report()
|
||||
print(f"{result:{max_length}s} {report.accuracy:7.3f}")
|
||||
print("\n".join(self.results))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
Summary().list()
|
30
src/Utils.py
30
src/Utils.py
@@ -13,7 +13,7 @@ class Folders:
|
||||
|
||||
class Files:
|
||||
index = "all.txt"
|
||||
|
||||
report_ext = ".json"
|
||||
cmd_open_macos = "/usr/bin/open"
|
||||
cmd_open_linux = "/usr/bin/xdg-open"
|
||||
exreport_pdf = "Rplots.pdf"
|
||||
@@ -47,15 +47,13 @@ class Files:
|
||||
f"{stratified}.json"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def results_suffixes(score="", model=""):
|
||||
suffix = ".json"
|
||||
def results_suffixes(self, score="", model=""):
|
||||
suffix = self.report_ext
|
||||
if model == "" and score == "":
|
||||
return "results_", suffix
|
||||
elif model == "":
|
||||
if model == "":
|
||||
return f"results_{score}_", suffix
|
||||
else:
|
||||
return f"results_{score}_{model}_", suffix
|
||||
return f"results_{score}_{model}_", suffix
|
||||
|
||||
@staticmethod
|
||||
def dataset(name):
|
||||
@@ -75,6 +73,24 @@ class Files:
|
||||
)
|
||||
subprocess.run([command, name])
|
||||
|
||||
def get_all_results(self) -> list[str]:
|
||||
first_path = "."
|
||||
first_try = os.path.join(first_path, Folders.results)
|
||||
second_path = ".."
|
||||
second_try = os.path.join(second_path, first_try)
|
||||
if os.path.isdir(first_try):
|
||||
files_list = os.listdir(first_try)
|
||||
elif os.path.isdir(second_try):
|
||||
files_list = os.listdir(second_try)
|
||||
else:
|
||||
raise ValueError(f"{first_try} or {second_try} does not exist")
|
||||
result = []
|
||||
prefix, suffix = self.results_suffixes()
|
||||
for result_file in files_list:
|
||||
if result_file.startswith(prefix) and result_file.endswith(suffix):
|
||||
result.append(result_file)
|
||||
return result
|
||||
|
||||
|
||||
class Symbols:
|
||||
check_mark = "\N{heavy check mark}"
|
||||
|
Reference in New Issue
Block a user