Begin refactor main scripts

This commit is contained in:
2022-04-09 19:12:47 +02:00
parent 300033ce4e
commit de90374960
6 changed files with 182 additions and 128 deletions

View File

@@ -1159,3 +1159,56 @@ class Summary:
input_data=self.best_results(score=score, n=n),
sort_key="metric",
)
class PairCheck:
def __init__(self, score, model_a, model_b, winners=False, loosers=False):
self.score = score
self.model_a = model_a
self.model_b = model_b
self.winners = winners
self.loosers = loosers
self.winners_data = []
self.loosers_data = []
self.tie_data = []
def compute(self):
summary = Summary()
summary.acquire()
best_a = summary.best_result(
criterion="model", value=self.model_a, score=self.score
)
best_b = summary.best_result(
criterion="model", value=self.model_b, score=self.score
)
report_a = StubReport(os.path.join(Folders.results, best_a["file"]))
report_a.report()
report_b = StubReport(os.path.join(Folders.results, best_b["file"]))
report_b.report()
for result_a, result_b in zip(report_a.lines, report_b.lines):
result = result_a["score"] - result_b["score"]
if result > 0:
self.winners_data.append(result_a["dataset"])
elif result < 0:
self.loosers_data.append(result_a["dataset"])
else:
self.tie_data.append(result_a["dataset"])
def print(self):
print(f"{'Model':<20} {'File':<70} {'Score':<10} Win Tie Loose")
print("=" * 20 + " " + "=" * 70 + " " + "=" * 10 + " === === =====")
print(
f"{self.model_a:<20} {self.best_a['file']:<70} {report_1.score:10.5f}"
)
print(
f"{model2:<20} {best_2['file']:<70} "
f"{report_2.score:10.5f} "
f"{TextColor.GREEN}{win:3d} {TextColor.YELLOW}{tie:3d} "
f"{TextColor.RED}{loose:5d}"
)
if win_results:
print(TextColor.GREEN + "Winners:")
print(winners)
if loose_results:
print(TextColor.RED + "Loosers:")
print(loosers)

View File

@@ -64,8 +64,8 @@ def parse_arguments():
)
if __name__ == "__main__":
(excel, score, model, key, number, hidden) = parse_arguments()
data = Summary(hidden=hidden)
data.acquire()
data.list_results(score=score, model=model, sort_key=key, number=number)

View File

@@ -117,6 +117,7 @@ def parse_arguments():
)
if __name__ == "__main__":
(
stratified,
score,

View File

@@ -59,6 +59,7 @@ def parse_arguments():
)
if __name__ == "__main__":
(
score,
model1,
@@ -66,7 +67,6 @@ def parse_arguments():
win_results,
loose_results,
) = parse_arguments()
summary = Summary()
summary.acquire()
win = tie = loose = 0

View File

@@ -114,6 +114,7 @@ def default_report():
)
if __name__ == "__main__":
(file, excel, sql, compare, best, grid, score, model) = parse_arguments()
if grid:
best = False

View File

@@ -40,22 +40,21 @@ def parse_arguments():
)
if __name__ == "__main__":
(
score,
model,
list_results,
) = parse_arguments()
all_metrics = ["accuracy", "f1-macro", "f1-micro"]
metrics = all_metrics if score == "all" else [score]
summary = Summary()
summary.acquire()
for metric in metrics:
title = f"BEST RESULT of {metric} for {model}"
best = summary.best_result(criterion="model", value=model, score=metric)
best = summary.best_result(
criterion="model", value=model, score=metric
)
summary.show_result(data=best, title=title)
summary.show_result(
summary.best_result(score=metric), title=f"BEST RESULT of {metric}"