4 Commits

Author SHA1 Message Date
f264c84209 Add first select table with data 2023-06-05 02:09:09 +02:00
fba919de8c Login required 2023-06-05 00:52:14 +02:00
ddd1ae7c5b Working with bootstrap-flask 2023-06-04 22:51:02 +02:00
848ecbd5be init refactor 2023-06-04 19:09:29 +02:00
63 changed files with 650 additions and 1340 deletions

View File

@@ -1,9 +1,12 @@
[![CI](https://github.com/Doctorado-ML/benchmark/actions/workflows/main.yml/badge.svg)](https://github.com/Doctorado-ML/benchmark/actions/workflows/main.yml)
[![codecov](https://codecov.io/gh/Doctorado-ML/benchmark/branch/main/graph/badge.svg?token=ZRP937NDSG)](https://codecov.io/gh/Doctorado-ML/benchmark) [![codecov](https://codecov.io/gh/Doctorado-ML/benchmark/branch/main/graph/badge.svg?token=ZRP937NDSG)](https://codecov.io/gh/Doctorado-ML/benchmark)
[![Quality Gate Status](https://sonar.rmontanana.es/api/project_badges/measure?project=benchmark&metric=alert_status&token=336a6e501988888543c3153baa91bad4b9914dd2)](https://sonar.rmontanana.es/dashboard?id=benchmark)
[![Technical Debt](https://sonar.rmontanana.es/api/project_badges/measure?project=benchmark&metric=sqale_index&token=336a6e501988888543c3153baa91bad4b9914dd2)](https://sonar.rmontanana.es/dashboard?id=benchmark)
![https://img.shields.io/badge/python-3.8%2B-blue](https://img.shields.io/badge/python-3.8%2B-brightgreen) ![https://img.shields.io/badge/python-3.8%2B-blue](https://img.shields.io/badge/python-3.8%2B-brightgreen)
# benchmark # benchmark
Benchmarking Python models Benchmarking models
## Experimentation ## Experimentation

View File

@@ -13,27 +13,21 @@ ALL_METRICS = (
class EnvData: class EnvData:
def __init__(self): @staticmethod
self.args = {} def load():
args = {}
def load(self):
try: try:
with open(Files.dot_env) as f: with open(Files.dot_env) as f:
for line in f.read().splitlines(): for line in f.read().splitlines():
if line == "" or line.startswith("#"): if line == "" or line.startswith("#"):
continue continue
key, value = line.split("=") key, value = line.split("=")
self.args[key] = value args[key] = value
except FileNotFoundError: except FileNotFoundError:
print(NO_ENV, file=sys.stderr) print(NO_ENV, file=sys.stderr)
exit(1) exit(1)
else: else:
return self.args return args
def save(self):
with open(Files.dot_env, "w") as f:
for key, value in self.args.items():
f.write(f"{key}={value}\n")
class EnvDefault(argparse.Action): class EnvDefault(argparse.Action):
@@ -41,7 +35,7 @@ class EnvDefault(argparse.Action):
def __init__( def __init__(
self, envvar, required=True, default=None, mandatory=False, **kwargs self, envvar, required=True, default=None, mandatory=False, **kwargs
): ):
self._args = EnvData().load() self._args = EnvData.load()
self._overrides = {} self._overrides = {}
if required and not mandatory: if required and not mandatory:
default = self._args[envvar] default = self._args[envvar]
@@ -160,15 +154,6 @@ class Arguments(argparse.ArgumentParser):
"help": "Ignore nan results", "help": "Ignore nan results",
}, },
], ],
"iwss": [
("--iwss",),
{
"default": False,
"action": "store_true",
"required": False,
"help": "Do IWSS with training set and then apply to test set",
},
],
"key": [ "key": [
("-k", "--key"), ("-k", "--key"),
{ {
@@ -244,19 +229,6 @@ class Arguments(argparse.ArgumentParser):
"help": "number of folds", "help": "number of folds",
}, },
], ],
"output": [
("-o", "--output"),
{
"type": str,
"default": "local",
"choices": ["local", "docker"],
"required": False,
"help": (
"in be_flask tells if it is running in local or "
"in docker {local, docker}"
),
},
],
"platform": [ "platform": [
("-P", "--platform"), ("-P", "--platform"),
{ {

View File

@@ -32,8 +32,6 @@ class DatasetsArff:
def get_range_features(X, c_features): def get_range_features(X, c_features):
if c_features.strip() == "all": if c_features.strip() == "all":
return list(range(X.shape[1])) return list(range(X.shape[1]))
if c_features.strip() == "none":
return []
return json.loads(c_features) return json.loads(c_features)
def load(self, name, class_name): def load(self, name, class_name):
@@ -111,7 +109,7 @@ class DatasetsSurcov:
class Datasets: class Datasets:
def __init__(self, dataset_name=None, discretize=None): def __init__(self, dataset_name=None, discretize=None):
env_data = EnvData().load() env_data = EnvData.load()
# DatasetsSurcov, DatasetsTanveer, DatasetsArff,... # DatasetsSurcov, DatasetsTanveer, DatasetsArff,...
source_name = getattr( source_name = getattr(
__import__(__name__), __import__(__name__),
@@ -131,28 +129,29 @@ class Datasets:
def _init_names(self, dataset_name): def _init_names(self, dataset_name):
file_name = os.path.join(self.dataset.folder(), Files.index) file_name = os.path.join(self.dataset.folder(), Files.index)
default_class = "class"
self.continuous_features = {} self.continuous_features = {}
with open(file_name) as f: with open(file_name) as f:
sets = f.read().splitlines() sets = f.read().splitlines()
sets = [x for x in sets if not x.startswith("#")] sets = [x for x in sets if not x.startswith("#")]
results = [] class_names = [default_class] * len(sets)
class_names = [] if "," in sets[0]:
for set_name in sets: result = []
try: class_names = []
name, class_name, features = set_name.split(";") for data in sets:
except ValueError: name, class_name, features = data.split(",", 2)
class_name = "class" result.append(name)
features = "all" class_names.append(class_name)
name = set_name self.continuous_features[name] = features
results.append(name) sets = result
class_names.append(class_name) else:
features = features.strip() for name in sets:
self.continuous_features[name] = features self.continuous_features[name] = None
# Set as dataset list the dataset passed as argument # Set as dataset list the dataset passed as argument
if dataset_name is None: if dataset_name is None:
return class_names, results return class_names, sets
try: try:
class_name = class_names[results.index(dataset_name)] class_name = class_names[sets.index(dataset_name)]
except ValueError: except ValueError:
raise ValueError(f"Unknown dataset: {dataset_name}") raise ValueError(f"Unknown dataset: {dataset_name}")
return [class_name], [dataset_name] return [class_name], [dataset_name]

View File

@@ -7,17 +7,12 @@ import time
from datetime import datetime from datetime import datetime
from tqdm import tqdm from tqdm import tqdm
import numpy as np import numpy as np
from mufs import MUFS
from sklearn.model_selection import ( from sklearn.model_selection import (
StratifiedKFold, StratifiedKFold,
KFold, KFold,
GridSearchCV, GridSearchCV,
cross_validate,
) )
from sklearn.svm import LinearSVC
from sklearn.feature_selection import SelectFromModel
from sklearn.preprocessing import label_binarize
from sklearn.base import clone
from sklearn.metrics import check_scoring, roc_auc_score
from .Utils import Folders, Files, NO_RESULTS from .Utils import Folders, Files, NO_RESULTS
from .Datasets import Datasets from .Datasets import Datasets
from .Models import Models from .Models import Models
@@ -27,7 +22,7 @@ from .Arguments import EnvData
class Randomized: class Randomized:
@staticmethod @staticmethod
def seeds(): def seeds():
return json.loads(EnvData().load()["seeds"]) return json.loads(EnvData.load()["seeds"])
class BestResults: class BestResults:
@@ -120,10 +115,9 @@ class Experiment:
ignore_nan=True, ignore_nan=True,
fit_features=None, fit_features=None,
discretize=None, discretize=None,
iwss=False,
folds=5, folds=5,
): ):
env_data = EnvData().load() env_data = EnvData.load()
today = datetime.now() today = datetime.now()
self.time = today.strftime("%H:%M:%S") self.time = today.strftime("%H:%M:%S")
self.date = today.strftime("%Y-%m-%d") self.date = today.strftime("%Y-%m-%d")
@@ -182,7 +176,6 @@ class Experiment:
self.random_seeds = Randomized.seeds() self.random_seeds = Randomized.seeds()
self.results = [] self.results = []
self.duration = 0 self.duration = 0
self.iwss = iwss
self._init_experiment() self._init_experiment()
def get_output_file(self): def get_output_file(self):
@@ -219,362 +212,52 @@ class Experiment:
res["state_names"] = states res["state_names"] = states
return res return res
# def _n_fold_crossval(self, name, X, y, hyperparameters):
# if self.scores != []:
# raise ValueError("Must init experiment before!")
# loop = tqdm(
# self.random_seeds,
# position=1,
# leave=False,
# disable=not self.progress_bar,
# )
# for random_state in loop:
# loop.set_description(f"Seed({random_state:4d})")
# random.seed(random_state)
# np.random.seed(random_state)
# kfold = self.stratified_class(
# shuffle=True, random_state=random_state, n_splits=self.folds
# )
# clf = self._build_classifier(random_state, hyperparameters)
# fit_params = self._build_fit_params(name)
# self.version = Models.get_version(self.model_name, clf)
# with warnings.catch_warnings():
# warnings.filterwarnings("ignore")
# if self.iwss:
# # Manual cross-validation with IWSS feature selection
# fold_scores = []
# fold_times = []
# fold_estimators = []
# for train_idx, test_idx in kfold.split(X, y):
# # Split data
# X_train, X_test = X[train_idx], X[test_idx]
# y_train, y_test = y[train_idx], y[test_idx]
# # Apply IWSS feature selection
# transformer = MUFS()
# transformer.iwss(X_train, y_train, 0.5)
# X_train_selected = X_train[
# :, transformer.get_results()
# ]
# X_test_selected = X_test[:, transformer.get_results()]
# # print("Selected features:", transformer.get_results())
# # print(
# # f"Number of selected features: {X_train_selected.shape[1]}"
# # )
# # Clone classifier to avoid data leakage between folds
# clf_fold = clone(clf)
# # Fit the classifier
# start_time = time.time()
# clf_fold.fit(X_train_selected, y_train)
# fit_time = time.time() - start_time
# # Score on test set
# score_func = get_scorer(
# self.score_name.replace("-", "_")
# )
# # Handle scoring based on the metric type
# if self.score_name in [
# "roc_auc",
# "log_loss",
# "roc_auc_ovr",
# "roc_auc_ovo",
# ]:
# # These metrics need probabilities
# if hasattr(clf_fold, "predict_proba"):
# y_score = clf_fold.predict_proba(
# X_test_selected
# )
# # Handle missing classes in the fold
# if len(unique_train_classes) < len(
# unique_all_classes
# ):
# # Create a full probability matrix with zeros for missing classes
# y_score_full = np.zeros(
# (len(y_test), len(unique_all_classes))
# )
# for i, class_label in enumerate(
# unique_train_classes
# ):
# class_idx = np.where(
# unique_all_classes == class_label
# )[0][0]
# y_score_full[:, class_idx] = y_score[
# :, i
# ]
# y_score = y_score_full
# else:
# # Fallback to decision_function for SVM-like models
# y_score = clf_fold.decision_function(
# X_test_selected
# )
# test_score = score_func._score_func(
# y_test, y_score
# )
# else:
# # For metrics that use predictions (accuracy, f1, etc.)
# test_score = score_func(
# clf_fold, X_test_selected, y_test
# )
# fold_scores.append(test_score)
# fold_times.append(fit_time)
# fold_estimators.append(clf_fold)
# # Package results to match cross_validate output format
# res = {
# "test_score": np.array(fold_scores),
# "fit_time": np.array(fold_times),
# "estimator": fold_estimators,
# }
# else:
# # Original cross_validate approach
# res = cross_validate(
# clf,
# X,
# y,
# cv=kfold,
# fit_params=fit_params,
# return_estimator=True,
# scoring=self.score_name.replace("-", "_"),
# )
# # Handle NaN values
# if np.isnan(res["test_score"]).any():
# if not self.ignore_nan:
# print(res["test_score"])
# raise ValueError("NaN in results")
# results = res["test_score"][~np.isnan(res["test_score"])]
# else:
# results = res["test_score"]
# # Store results
# self.scores.extend(results)
# self.times.extend(res["fit_time"])
# for result_item in res["estimator"]:
# nodes_item, leaves_item, depth_item = (
# Models.get_complexity(self.model_name, result_item)
# )
# self.nodes.append(nodes_item)
# self.leaves.append(leaves_item)
# self.depths.append(depth_item)
# from sklearn.base import clone
# import numpy as np
# import time
# import warnings
# from tqdm import tqdm
def _n_fold_crossval(self, name, X, y, hyperparameters): def _n_fold_crossval(self, name, X, y, hyperparameters):
if self.scores != []: if self.scores != []:
raise ValueError("Must init experiment before!") raise ValueError("Must init experiment before!")
# Get all unique classes and check data
unique_all_classes = np.sort(np.unique(y))
n_classes = len(unique_all_classes)
# Check if we have enough samples per class for stratified k-fold
min_samples_per_class = np.min(np.bincount(y))
if min_samples_per_class < self.folds:
warnings.warn(
f"Class imbalance detected: minimum class has {min_samples_per_class} samples. "
f"Consider using fewer folds or handling imbalanced data."
)
loop = tqdm( loop = tqdm(
self.random_seeds, self.random_seeds,
position=1, position=1,
leave=False, leave=False,
disable=not self.progress_bar, disable=not self.progress_bar,
) )
for random_state in loop: for random_state in loop:
loop.set_description(f"Seed({random_state:4d})") loop.set_description(f"Seed({random_state:4d})")
random.seed(random_state)
np.random.seed(random_state) np.random.seed(random_state)
kfold = self.stratified_class( kfold = self.stratified_class(
shuffle=True, random_state=random_state, n_splits=self.folds shuffle=True, random_state=random_state, n_splits=self.folds
) )
clf = self._build_classifier(random_state, hyperparameters) clf = self._build_classifier(random_state, hyperparameters)
fit_params = self._build_fit_params(name) fit_params = self._build_fit_params(name)
self.version = Models.get_version(self.model_name, clf) self.version = Models.get_version(self.model_name, clf)
# Check if the classifier supports probability predictions
scorer = check_scoring(clf, scoring="roc_auc_ovr")
if not hasattr(clf, "predict_proba") and not hasattr(
clf, "decision_function"
):
raise ValueError(
f"Classifier {self.model_name} doesn't support probability predictions "
"required for ROC-AUC scoring"
)
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.filterwarnings("ignore") warnings.filterwarnings("ignore")
res = cross_validate(
fold_scores = [] clf,
fold_times = [] X,
fold_estimators = [] y,
cv=kfold,
for fold_idx, (train_idx, test_idx) in enumerate( fit_params=fit_params,
kfold.split(X, y) return_estimator=True,
): scoring=self.score_name.replace("-", "_"),
# Split data )
X_train, X_test = X[train_idx], X[test_idx] if np.isnan(res["test_score"]).any():
y_train, y_test = y[train_idx], y[test_idx] if not self.ignore_nan:
print(res["test_score"])
# Check classes in this fold raise ValueError("NaN in results")
unique_test_classes = np.unique(y_test) results = res["test_score"][~np.isnan(res["test_score"])]
n_test_classes = len(unique_test_classes) else:
results = res["test_score"]
# Skip fold if we don't have at least 2 classes in test set self.scores.extend(results)
if n_test_classes < 2: self.times.extend(res["fit_time"])
warnings.warn( for result_item in res["estimator"]:
f"Fold {fold_idx}: Test set has only {n_test_classes} class(es). " nodes_item, leaves_item, depth_item = Models.get_complexity(
f"Skipping this fold for ROC-AUC calculation." self.model_name, result_item
) )
fold_scores.append(np.nan) self.nodes.append(nodes_item)
fold_times.append(np.nan) self.leaves.append(leaves_item)
fold_estimators.append(None) self.depths.append(depth_item)
continue
# Apply IWSS feature selection if enabled
if self.iwss:
# transformer = (
# MUFS(discrete=False)
# if "cli_rad" in name
# else MUFS(discrete=True)
# )
# transformer.iwss(X_train, y_train, 0.5)
# selected_features = transformer.get_results()
# Apply L1-based feature selection
# Using LinearSVC with L1 penalty
lsvc = LinearSVC(
C=0.1, # Regularization parameter - adjust this for more/fewer features
penalty="l1",
dual=False,
max_iter=2000,
random_state=random_state,
)
selector = SelectFromModel(lsvc, prefit=False)
selector.fit(X_train, y_train)
# Transform the data
X_train_selected = selector.transform(X_train)
X_test_selected = selector.transform(X_test)
# Get information about selected features
selected_features = selector.get_support(indices=True)
n_selected = len(selected_features)
if len(selected_features) == 0:
warnings.warn(
f"Fold {fold_idx}: No features selected by IWSS. Using all features."
)
X_train_selected = X_train
X_test_selected = X_test
else:
X_train_selected = X_train[:, selected_features]
X_test_selected = X_test[:, selected_features]
else:
X_train_selected = X_train
X_test_selected = X_test
# Clone and fit classifier
clf_fold = clone(clf)
start_time = time.time()
clf_fold.fit(X_train_selected, y_train)
fit_time = time.time() - start_time
# Get probability predictions
y_proba = clf_fold.predict_proba(X_test_selected)
# Calculate ROC-AUC score
# Handle case where test set doesn't have all classes
if len(clf_fold.classes_) != len(unique_test_classes):
# Map probabilities to only test classes
test_class_indices = [
np.where(clf_fold.classes_ == c)[0][0]
for c in unique_test_classes
if c in clf_fold.classes_
]
y_proba = y_proba[:, test_class_indices]
# Binarize labels for multi-class ROC-AUC
y_test_binarized = label_binarize(
y_test, classes=unique_test_classes
)
# Calculate ROC-AUC with OVR strategy
if n_test_classes == 2:
# Binary classification
test_score = roc_auc_score(y_test, y_proba[:, 1])
else:
# Multi-class with macro-average
test_score = roc_auc_score(
y_test_binarized,
y_proba,
multi_class="ovr",
average="macro",
)
fold_scores.append(test_score)
fold_times.append(fit_time)
fold_estimators.append(clf_fold)
# Filter out NaN scores if ignore_nan is True
scores_array = np.array(fold_scores)
times_array = np.array(fold_times)
if np.isnan(scores_array).any():
if not self.ignore_nan:
nan_folds = np.where(np.isnan(scores_array))[0]
raise ValueError(
f"NaN scores in folds {nan_folds}. "
f"Set ignore_nan=True to skip these folds."
)
else:
# Filter out NaN values
valid_mask = ~np.isnan(scores_array)
scores_array = scores_array[valid_mask]
times_array = times_array[valid_mask]
fold_estimators = [
e
for e, valid in zip(fold_estimators, valid_mask)
if valid
]
if len(scores_array) == 0:
warnings.warn(
f"All folds resulted in NaN for seed {random_state}. Skipping."
)
continue
# Store results
self.scores.extend(scores_array)
self.times.extend(times_array)
# Store complexity metrics
for estimator in fold_estimators:
if estimator is not None:
nodes_item, leaves_item, depth_item = (
Models.get_complexity(self.model_name, estimator)
)
self.nodes.append(nodes_item)
self.leaves.append(leaves_item)
self.depths.append(depth_item)
def _add_results(self, name, hyperparameters, samples, features, classes): def _add_results(self, name, hyperparameters, samples, features, classes):
record = {} record = {}

View File

@@ -8,8 +8,7 @@ from sklearn.ensemble import (
) )
from sklearn.svm import SVC from sklearn.svm import SVC
from stree import Stree from stree import Stree
from bayesclass.clfs import TAN, KDB, AODE, KDBNew, TANNew, AODENew
# from bayesclass.clfs import TAN, KDB, AODE, KDBNew, TANNew, AODENew, BoostAODE
from wodt import Wodt from wodt import Wodt
from odte import Odte from odte import Odte
from xgboost import XGBClassifier from xgboost import XGBClassifier
@@ -40,13 +39,12 @@ class Models:
def define_models(random_state): def define_models(random_state):
return { return {
"STree": Stree(random_state=random_state), "STree": Stree(random_state=random_state),
# "TAN": TAN(random_state=random_state), "TAN": TAN(random_state=random_state),
# "KDB": KDB(k=2), "KDB": KDB(k=2),
# "TANNew": TANNew(random_state=random_state), "TANNew": TANNew(random_state=random_state),
# "KDBNew": KDBNew(k=2), "KDBNew": KDBNew(k=2),
# "AODENew": AODENew(random_state=random_state), "AODENew": AODENew(random_state=random_state),
# "AODE": AODE(random_state=random_state), "AODE": AODE(random_state=random_state),
# "BoostAODE": BoostAODE(random_state=random_state),
"Cart": DecisionTreeClassifier(random_state=random_state), "Cart": DecisionTreeClassifier(random_state=random_state),
"ExtraTree": ExtraTreeClassifier(random_state=random_state), "ExtraTree": ExtraTreeClassifier(random_state=random_state),
"Wodt": Wodt(random_state=random_state), "Wodt": Wodt(random_state=random_state),
@@ -71,7 +69,6 @@ class Models:
algorithm="SAMME", algorithm="SAMME",
random_state=random_state, random_state=random_state,
), ),
"AdaBoost": AdaBoostClassifier(random_state=random_state),
"GBC": GradientBoostingClassifier(random_state=random_state), "GBC": GradientBoostingClassifier(random_state=random_state),
"RandomForest": RandomForestClassifier(random_state=random_state), "RandomForest": RandomForestClassifier(random_state=random_state),
"Mock": MockModel(random_state=random_state), "Mock": MockModel(random_state=random_state),
@@ -100,13 +97,13 @@ class Models:
nodes = 0 nodes = 0
leaves = result.get_n_leaves() leaves = result.get_n_leaves()
depth = 0 depth = 0
elif name.startswith("Bagging") or name == "AdaBoostStree": elif name.startswith("Bagging") or name.startswith("AdaBoost"):
nodes, leaves = list( nodes, leaves = list(
zip(*[x.nodes_leaves() for x in result.estimators_]) zip(*[x.nodes_leaves() for x in result.estimators_])
) )
nodes, leaves = mean(nodes), mean(leaves) nodes, leaves = mean(nodes), mean(leaves)
depth = mean([x.depth_ for x in result.estimators_]) depth = mean([x.depth_ for x in result.estimators_])
elif name == "RandomForest" or name == "AdaBoost": elif name == "RandomForest":
leaves = mean([x.get_n_leaves() for x in result.estimators_]) leaves = mean([x.get_n_leaves() for x in result.estimators_])
depth = mean([x.get_depth() for x in result.estimators_]) depth = mean([x.get_depth() for x in result.estimators_])
nodes = mean([x.tree_.node_count for x in result.estimators_]) nodes = mean([x.tree_.node_count for x in result.estimators_])

View File

@@ -71,6 +71,7 @@ class Report(BaseReport):
self._load_best_results( self._load_best_results(
self.data["score_name"], self.data["model"] self.data["score_name"], self.data["model"]
) )
self._compare_totals = {}
self.header_line("*") self.header_line("*")
self.header_line( self.header_line(
f" {self.data['model']} ver. {self.data['version']}" f" {self.data['model']} ver. {self.data['version']}"

View File

@@ -52,11 +52,10 @@ class BaseReport(abc.ABC):
self.score_name = self.data["score_name"] self.score_name = self.data["score_name"]
self.__load_env_data() self.__load_env_data()
self.__compute_best_results_ever() self.__compute_best_results_ever()
self._compare_totals = {}
def __load_env_data(self): def __load_env_data(self):
# Set the labels for nodes, leaves, depth # Set the labels for nodes, leaves, depth
env_data = EnvData().load() env_data = EnvData.load()
self.nodes_label = env_data["nodes"] self.nodes_label = env_data["nodes"]
self.leaves_label = env_data["leaves"] self.leaves_label = env_data["leaves"]
self.depth_label = env_data["depth"] self.depth_label = env_data["depth"]
@@ -108,11 +107,9 @@ class BaseReport(abc.ABC):
status = ( status = (
Symbols.cross Symbols.cross
if accuracy <= max_value if accuracy <= max_value
else ( else Symbols.upward_arrow
Symbols.upward_arrow if accuracy > max_value
if accuracy > max_value else " "
else " "
)
) )
if status != " ": if status != " ":
if status not in self._compare_totals: if status not in self._compare_totals:
@@ -151,11 +148,8 @@ class BaseReport(abc.ABC):
class StubReport(BaseReport): class StubReport(BaseReport):
def __init__(self, file_name, compare=False): def __init__(self, file_name):
self.compare = compare
super().__init__(file_name=file_name, best_file=False) super().__init__(file_name=file_name, best_file=False)
if self.compare:
self._load_best_results(self.score_name, self.data["model"])
def print_line(self, line) -> None: def print_line(self, line) -> None:
pass pass
@@ -163,11 +157,6 @@ class StubReport(BaseReport):
def header(self) -> None: def header(self) -> None:
self.title = self.data["title"] self.title = self.data["title"]
self.duration = self.data["duration"] self.duration = self.data["duration"]
self.model = self.data["model"]
self.date = self.data["date"]
self.time = self.data["time"]
self.metric = self.data["score_name"]
self.platform = self.data["platform"]
def footer(self, accuracy: float) -> None: def footer(self, accuracy: float) -> None:
self.accuracy = accuracy self.accuracy = accuracy
@@ -176,7 +165,7 @@ class StubReport(BaseReport):
class Summary: class Summary:
def __init__(self, hidden=False, compare=False) -> None: def __init__(self, hidden=False, compare=False) -> None:
self.results = Files.get_all_results(hidden=hidden) self.results = Files().get_all_results(hidden=hidden)
self.data = [] self.data = []
self.data_filtered = [] self.data_filtered = []
self.datasets = {} self.datasets = {}
@@ -202,11 +191,9 @@ class Summary:
self.models.add(model) self.models.add(model)
report = StubReport( report = StubReport(
os.path.join( os.path.join(
( Folders.hidden_results
Folders.hidden_results if self.hidden
if self.hidden else Folders.results,
else Folders.results
),
result, result,
) )
) )

View File

@@ -299,11 +299,11 @@ class ReportDatasets:
color2 = "#FDE9D9" color2 = "#FDE9D9"
color3 = "#B1A0C7" color3 = "#B1A0C7"
def __init__(self, excel=False, book=None, output=True): def __init__(self, excel=False, book=None):
self.excel = excel self.excel = excel
self.env = EnvData().load() self.env = EnvData().load()
self.close = False self.close = False
self.output = output self.output = True
self.header_text = f"Datasets used in benchmark ver. {__version__}" self.header_text = f"Datasets used in benchmark ver. {__version__}"
if excel: if excel:
self.max_length = 0 self.max_length = 0
@@ -620,7 +620,7 @@ class Benchmark:
self.__compute_best_results_ever() self.__compute_best_results_ever()
def __compute_best_results_ever(self): def __compute_best_results_ever(self):
args = EnvData().load() args = EnvData.load()
key = args["source_data"] key = args["source_data"]
best = BestResultsEver() best = BestResultsEver()
_, self.best_score_value = best.get_name_value(key, self._score) _, self.best_score_value = best.get_name_value(key, self._score)

View File

@@ -15,7 +15,6 @@ class Folders:
img = "img" img = "img"
excel = "excel" excel = "excel"
sql = "sql" sql = "sql"
current = os.getcwd()
@staticmethod @staticmethod
def src(): def src():

View File

@@ -10,7 +10,7 @@ from .Results import Report
from ._version import __version__ from ._version import __version__
__author__ = "Ricardo Montañana Gómez" __author__ = "Ricardo Montañana Gómez"
__copyright__ = "Copyright 2020-2024, Ricardo Montañana Gómez" __copyright__ = "Copyright 2020-2023, Ricardo Montañana Gómez"
__license__ = "MIT License" __license__ = "MIT License"
__author_email__ = "ricardo.montanana@alu.uclm.es" __author_email__ = "ricardo.montanana@alu.uclm.es"

View File

@@ -1 +1 @@
__version__ = "1.0.1" __version__ = "0.5.0"

View File

@@ -1,20 +0,0 @@
#!/usr/bin/env python
from benchmark.Arguments import EnvData
from flask import Flask
from .main import main, OUTPUT
FRAMEWORK = "framework"
FRAMEWORKS = "frameworks"
TEST = "test"
def create_app(output="local"):
app = Flask(__name__)
config = EnvData().load()
app.register_blueprint(main)
app.config[FRAMEWORK] = config[FRAMEWORK]
app.config[FRAMEWORKS] = ["bootstrap", "bulma"]
app.config[OUTPUT] = output
app.jinja_env.auto_reload = True
app.config["TEMPLATES_AUTO_RELOAD"] = True
return app

View File

@@ -1 +0,0 @@
*.xlsx

View File

@@ -1,20 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>{{ title }}</title>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-KK94CHFLLe+nY2dmCWGMq91rCGa5gtU4mk92HdvYe+M/SXH301p5ILy+dN9+nJOZ" crossorigin="anonymous" />
<link href="https://fonts.googleapis.com/css?family=Montserrat:300,400,500,600" rel="stylesheet" />
<link rel="stylesheet" href="https://cdn.datatables.net/1.10.25/css/jquery.dataTables.min.css" />
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/MaterialDesign-Webfont/7.1.96/css/materialdesignicons.css" integrity="sha512-lD1LHcZ8tFHvMFNeo6qOLY/HjzSPCasPJOAoir22byDxlZI1R71S5lZel8zRL2TZ+Dut1wOHfYgSU2lHXuL00w==" crossorigin="anonymous" referrerpolicy="no-referrer" />
<link rel="stylesheet" href="{{ url_for('static', filename='css/main.css') }}" />
</head>
<body>
{% block content %}
{% endblock %}
</body>
<script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
{% block jscript %}
{% endblock %}
</html>

View File

@@ -1,19 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>{{ title }}</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/bulma/0.9.3/css/bulma.min.css" />
<link rel="stylesheet" href="https://cdn.datatables.net/1.10.25/css/jquery.dataTables.min.css" />
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/MaterialDesign-Webfont/7.1.96/css/materialdesignicons.css" integrity="sha512-lD1LHcZ8tFHvMFNeo6qOLY/HjzSPCasPJOAoir22byDxlZI1R71S5lZel8zRL2TZ+Dut1wOHfYgSU2lHXuL00w==" crossorigin="anonymous" referrerpolicy="no-referrer" />
<link rel="stylesheet" href="{{ url_for('static', filename='css/main.css') }}" />
</head>
<body>
{% block content %}
{% endblock %}
</body>
<script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
{% block jscript %}
{% endblock %}
</html>

View File

@@ -1,68 +0,0 @@
{% extends 'base_' ~ framework ~ '.html' %}
{% macro javascript(file) %}
<script src="{{ url_for('static', filename=file) }}"></script>
{% endmacro %}
{% if framework == 'bootstrap' %}
{% set button_class = 'btn btn-primary btn-small' %}
{% set h1_class = 'text-center' %}
{% set table_class = 'table table-striped table-hover table-bordered' %}
{% set head_class = 'bg-primary text-white' %}
{% set text_right = 'text-end' %}
{% set container = 'container' %}
{% set selected = 'selected' %}
{%- macro header(title, close, url) -%}
<div class="p-4 bg-primary text-white">
{%- if close -%}
<button type="button" class="btn-close" aria-label="Close" onclick="location.href = '{{ url }}'"></button>
{%- endif -%}
<h1 class="alternate-font">{{ title }}</h1>
</div>
{%- endmacro -%}
{% else %}
{% set button_class = 'button is-primary is-small' %}
{% set h1_class = 'title is-1 has-text-centered' %}
{% set table_class = 'table is-striped is-hoverable cell-border is-bordered' %}
{% set head_class = 'is-selected' %}
{% set text_right = 'has-text-right' %}
{% set container = 'container' %}
{% set selected = 'is-selected' %}
{%- macro header(title, close, url) -%}
<div class="hero is-info is-bold">
<div class="hero-body">
{%- if close -%}
<button class="delete is-large" onclick="location.href = '{{ url }}'"></button>
{%- endif -%}
<h1 class="is-size-3 alternate-font">{{ title }}</h1>
</div>
</div>
{%- endmacro -%}
{% endif %}
{% block content %}
<div class="{{ container }}">
{{ header('Benchmark Datasets Report', True, url_for('main.index', compare = compare)) }}
<button class="{{ button_class }}" onclick="excelFiles(['datasets'], false)"><i class="mdi mdi-file-excel"></i> Excel</button>
{% include 'partials/datasets_table.html' %}
</div>
{% endblock %}
{% block jscript %}
{{ javascript("js/excelFiles.js") }}
<script>
$(document).ready(function () {
$(document).ajaxStart(function(){
$("body").addClass('ajaxLoading');
});
$(document).ajaxStop(function(){
$("body").removeClass('ajaxLoading');
});
});
// Check if row is selected
$('#file-table tbody').on('click', 'tr', function () {
if ($(this).hasClass('{{ selected }}')) {
$(this).removeClass('{{ selected }}');
} else {
$('#file-table tbody tr.{{ selected }}').removeClass("{{ selected }}")
$(this).addClass('{{ selected }}');
}
});
</script>
{% endblock %}

View File

@@ -1,20 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>Error</title>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/css/bootstrap.min.css" rel="stylesheet"
integrity="sha384-KK94CHFLLe+nY2dmCWGMq91rCGa5gtU4mk92HdvYe+M/SXH301p5ILy+dN9+nJOZ" crossorigin="anonymous">
</head>
<body>
<div class="container">
<div class="alert alert-danger my-5" role="alert">
<h4 class="alert-heading"><button class="btn-close btn-sm" type="button"
onclick="location.href='/index/{{ compare }}';"></button>Error</h4>
<p>There was an error processing action, {{ message }}. Please try again later.</p>
<hr>
<p class="mb-0">If the problem persists, please contact support.</p>
</div>
</div>
</body>
</html>

View File

@@ -1,22 +0,0 @@
$(document).ready(function () {
// Check if row is selected
$('#report-table tbody').on('click', 'tr', function () {
if ($(this).hasClass('{{ selected }}')) {
$(this).removeClass('{{ selected }}');
} else {
$('#report-table tbody tr.{{ selected }}').removeClass("{{ selected }}")
$(this).addClass('{{ selected }}');
}
});
$(document).ajaxStart(function(){
$("body").addClass('ajaxLoading');
});
$(document).ajaxStop(function(){
$("body").removeClass('ajaxLoading');
});
});
function excelFile() {
var selectedFiles = ["{{ file }}"];
var compare = "{{ compare }}" == "True";
excelFiles(selectedFiles, compare)
}

View File

@@ -1,97 +0,0 @@
$(document).ready(function () {
var table = $("#file-table").DataTable({
paging: true,
searching: true,
ordering: true,
info: true,
"select.items": "row",
pageLength: 25,
columnDefs: [
{
targets: 8,
orderable: false,
},
],
//"language": {
// "lengthMenu": "_MENU_"
//}
});
$('#file-table').on( 'draw.dt', function () {
enable_disable_best_buttons();
} );
// Check if row is selected
$("#file-table tbody").on("click", "tr", function () {
if ($(this).hasClass("{{ select.selected() }}")) {
$(this).removeClass("{{ select.selected() }}");
} else {
table
.$("tr.{{ select.selected() }}")
.removeClass("{{ select.selected() }}");
$(this).addClass("{{ select.selected() }}");
}
});
// Show file with doubleclick
$("#file-table tbody").on("dblclick", "tr", function () {
showFile($(this).attr("id"));
});
$(document).ajaxStart(function () {
$("body").addClass("ajaxLoading");
});
$(document).ajaxStop(function () {
$("body").removeClass("ajaxLoading");
});
$('#compare').change(function() {
enable_disable_best_buttons();
});
enable_disable_best_buttons();
});
function enable_disable_best_buttons(){
if ($('#compare').is(':checked')) {
$("[name='best_buttons']").addClass("tag is-link is-normal");
$("[name='best_buttons']").removeAttr("hidden");
} else {
$("[name='best_buttons']").removeClass("tag is-link is-normal");
$("[name='best_buttons']").attr("hidden", true);
}
}
function showFile(selectedFile) {
var form = $(
'<form action="/show" method="post">' +
'<input type="hidden" name="selected-file" value="' +
selectedFile +
'" />' +
'<input type="hidden" name="compare" value=' +
$("#compare").is(":checked") +
" />" +
"</form>"
);
$("body").append(form);
form.submit();
}
function excel() {
var checkbox = document.getElementsByName("selected_files");
var selectedFiles = [];
for (var i = 0; i < checkbox.length; i++) {
if (checkbox[i].checked) {
selectedFiles.push(checkbox[i].value);
}
}
if (selectedFiles.length == 0) {
alert("Select at least one file");
return;
}
var compare = $("#compare").is(":checked");
excelFiles(selectedFiles, compare);
}
function setCheckBoxes(value) {
var checkbox = document.getElementsByName("selected_files");
for (i = 0; i < checkbox.length; i++) {
checkbox[i].checked = value;
}
}
function redirectDouble(route, parameter) {
location.href = "/"+ route + "/" + parameter + "/" + $("#compare").is(":checked");
}
function redirectSimple(route) {
location.href = "/" + route + "/" + $("#compare").is(":checked");
}

View File

@@ -1,56 +0,0 @@
{%- macro header(title, close=False, url="") -%}
<div class="p-4 bg-primary text-white">
{%- if close -%}
<button type="button" class="btn-close" aria-label="Close" onclick="location.href = '{{url}}'"></button>
{%- endif -%}
<h1 class="alternate-font">{{ title }}</h1>
</div>
{%- endmacro -%}
{%- macro get_table_class() -%}
table table-striped table-hover table-bordered
{%- endmacro -%}
{%- macro icon(icon_name) -%}
<i class="mdi mdi-{{icon_name}}"></i>
{%- endmacro -%}
{%- macro get_button(text, action) -%}
<button class="btn btn-primary btn-small" onclick="{{ action }}">{{ text|safe }}</button>
{%- endmacro -%}
{%- macro get_button_class() -%}
button btn-primary btn-small
{%- endmacro %}
{%- macro get_button_tag(icon_name, method, visible=True, name="") -%}
<button class="btn btn-primary btn-small" onclick="{{ method }}" {{ "" if visible else "hidden='true'" }} {{ "" if name=="" else "name='" + name +"'"}}><i class="mdi mdi-{{ icon_name }}"></i></button>
{%- endmacro -%}
{%- macro get_button_reset() -%}
<button class="btn btn-primary btn-small btn-danger" onclick="setCheckBoxes(false)"><i class="mdi mdi-checkbox-multiple-blank"></i></button>
{%- endmacro -%}
{%- macro get_button_all() -%}
<button class="btn btn-primary btn-small btn-success" onclick="setCheckBoxes(true)"><i class="mdi mdi-checkbox-multiple-marked"></i></button>
{%- endmacro -%}
{%- macro get_tag_class() -%}
badge bg-info bg-small
{%- endmacro -%}
{%- macro get_container_class() -%}
container-fluid
{%- endmacro -%}
{%- macro selected() -%}
selected
{%- endmacro -%}
{%- macro get_level_class() -%}
navbar
{%- endmacro -%}
{%- macro get_align_right() -%}
text-end
{%- endmacro -%}
{%- macro get_left_position() -%}
float-left
{%- endmacro -%}
{%- macro get_right_position() -%}
float-right
{%- endmacro -%}
{%- macro get_row_head_class() -%}
bg-primary text-white
{%- endmacro -%}
{%- macro get_align_center() -%}
text-center
{%- endmacro -%}

View File

@@ -1,58 +0,0 @@
{%- macro header(title, close=False, url="") -%}
<div class="hero is-info is-bold">
<div class="hero-body">
{%- if close -%}
<button class="delete is-large" onclick="location.href = '{{ url }}'"></button>
{%- endif -%}
<h1 class="is-size-3 alternate-font">{{ title }}</h1>
</div>
</div>
{%- endmacro -%}
{%- macro get_table_class() -%}
table is-striped is-hoverable cell-border is-bordered
{%- endmacro -%}
{%- macro icon(icon_name) -%}
<i class="mdi mdi-{{icon_name}}"></i>
{%- endmacro -%}
{%- macro get_button(text, action) -%}
<button class="button is-primary is-small" onclick="{{ action }}">{{ text|safe }}</button>
{%- endmacro -%}
{%- macro get_button_tag(icon_name, method, visible=True, name="") -%}
<span class="{{ "tag is-link is-normal" if visible else "" }}" type="button" onclick="{{ method }}" {{ "" if visible else "hidden='true'" }} {{ "" if name=="" else "name='" + name +"'"}}>{{icon(icon_name)}}</span>
{%- endmacro -%}
{%- macro get_button_reset() -%}
<span class="tag is-link is-danger" type="button" onclick="setCheckBoxes(false)"><i class="mdi mdi-checkbox-multiple-blank"></i></span>
{%- endmacro -%}
{%- macro get_button_all() -%}
<span class="tag is-link is-success" type="button" onclick="setCheckBoxes(true)"><i class="mdi mdi-checkbox-multiple-marked"></i></span>
{%- endmacro -%}
{%- macro get_tag_class() -%}
tag is-info is-small
{%- endmacro -%}
{%- macro get_container_class() -%}
container is-fluid
{%- endmacro -%}
{%- macro selected() -%}
is-selected
{%- endmacro -%}
{%- macro get_level_class() -%}
level
{%- endmacro -%}
{%- macro get_align_right() -%}
has-text-right
{%- endmacro -%}
{%- macro get_align_center() -%}
has-text-center
{%- endmacro -%}
{%- macro get_left_position() -%}
float-left
{%- endmacro -%}
{%- macro get_right_position() -%}
float-right
{%- endmacro -%}
{%- macro get_row_head_class() -%}
is-selected
{%- endmacro -%}
{%- macro get_align_center() -%}
has-text-center
{%- endmacro -%}

View File

@@ -1,27 +0,0 @@
{% extends "base_" ~ framework ~ ".html" %}
{% block content %}
<table id="file-table" class="{{ table_class }}">
<thead>
<tr class="{{ head_class }}">
<th class="{{ text_center }}">Dataset</th>
<th class="{{ text_center }}">Samples</th>
<th class="{{ text_center }}">Features</th>
<th class="{{ text_center }}">Cont. Feat.</th>
<th class="{{ text_center }}">Classes</th>
<th class="{{ text_center }}">Balance</th>
</tr>
</thead>
<tbody>
{% for dataset in datasets %}
<tr>
<td>{{ dataset.dataset }}</td>
<td class="{{ text_right }}">{{ "{:,}".format(dataset.samples) }}</td>
<td class="{{ text_right }}">{{ "{:,}".format(dataset.features) }}</td>
<td class="{{ text_right }}">{{ dataset.cont_features }}</td>
<td class="{{ text_right }}">{{ dataset.classes }}</td>
<td>{{ dataset.balance }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endblock %}

View File

@@ -1,14 +0,0 @@
{% for item in data.results %}
<tr>
<td>{{ item.dataset }}</td>
<td class="{{ right }}">{{ '{:,}'.format(item.samples) }}</td>
<td class="{{ right }}">{{"%d" % item.features}}</td>
<td class="{{ right }}">{{"%d" % item.classes}}</td>
<td class="{{ right }}">{{ '{:,.2f}'.format(item.nodes|float) }}</td>
<td class="{{ right }}">{{ '{:,.2f}'.format(item.leaves|float) }}</td>
<td class="{{ right }}">{{ '{:,.2f}'.format(item.depth|float) }}</td>
<td class="{{ right }}">{{"%.6f±%.4f" % (item.score, item.score_std)}} {{ item.symbol|safe }}</td>
<td class="{{ right }}">{{"%.6f±%.4f" % (item.time, item.time_std)}}</td>
<td class="{{ center }}">{{ item.hyperparameters }}</td>
</tr>
{% endfor %}

View File

@@ -1,102 +0,0 @@
<div id="app">
<section class="section">
<div class="container-fluid">
<div class="p-4 bg-primary text-white">
<button type="button"
class="btn-close"
aria-label="Close"
onclick="location.href = '{{ back }}'"></button>
<h1>{{ data.title }}</h1>
</div>
<div>
<table class="table table-bordered">
<thead>
<tr class="bg-info text-white">
<th class="text-center">Platform</th>
<th class="text-center">Model</th>
<th class="text-center">Date</th>
<th class="text-center">Time</th>
{% if data.duration > 7200 %}
{% set unit = "h" %}
{% set divider = 3600 %}
{% else %}
{% set unit = "min" %}
{% set divider = 60 %}
{% endif %}
<th class="text-center">Duration ({{ unit }})</th>
<th class="text-center">Stratified</th>
<th class="text-center">Discretized</th>
<th class="text-center"># Folds</th>
</tr>
<tr>
<th class="text-center">{{ data.platform }}</th>
<th class="text-center">{{ data.model }} {{ data.version }}</th>
<th class="text-center">{{ data.date }}</th>
<th class="text-center">{{ data.time }}</th>
<th class="text-center">{{ "%.2f" % (data.duration/divider) }}</th>
<th class="text-center">{{ data.stratified }}</th>
<th class="text-center">{{ data.discretized }}</th>
<th class="text-center">{{ data.folds }}</th>
</tr>
<tr>
<th class="text-center bg-info text-white">Language</th>
<th class="text-center" colspan=3>{{ data.language }} {{ data.language_version }}</th>
<th class="text-center bg-info text-white">Seeds</th>
<th class="text-center" colspan=6>{{ data.seeds }}</th>
</tr>
</thead>
</table>
<div>
<button class="{{ button }}" onclick="excelFile()">
<i class="mdi mdi-file-excel"></i> Excel
</button>
</div>
<table id="report-table"
class="table table-striped table-hover table-bordered">
<thead>
<tr class="bg-primary text-white">
<th class="text-center">Dataset</th>
<th class="text-center">Samples</th>
<th class="text-center">Features</th>
<th class="text-center">Classes</th>
<th class="text-center">{{ app_config.nodes }}</th>
<th class="text-center">{{ app_config.leaves }}</th>
<th class="text-center">{{ app_config.depth }}</th>
<th class="text-center">{{ data.score_name|capitalize }}</th>
<th class="text-center">Time</th>
<th class="text-center">hyperparameters</th>
</tr>
</thead>
<tbody>
{% include "partials/table_report.html" %}
</tbody>
</table>
{% if summary|length > 0 %}
<div class="col-4 col-lg-4">
<table class="table table-bordered">
<thead>
<tr>
<th class="text-center bg-primary text-white">Symbol</th>
<th class="text-center bg-primary text-white">Meaning</th>
<th class="text-center bg-primary text-white">Count</th>
</tr>
</thead>
{% include "partials/table_summary.html" %}
</table>
</div>
{% endif %}
<button type="button"
class="btn-close"
aria-label="Close"
onclick="location.href = '{{ back }}'"></button>
<h7>
<b>
Total score: {{ "%.6f" % (data.results | sum(attribute="score") ) }}
</b>
</h7>
<h7>
Number of files: {{ data.results | length }}
</h7>
</div>
</section>
</div>

View File

@@ -1,100 +0,0 @@
<div id="app">
<header>
<div class="container is-fluid">
<div class="hero is-info is-bold">
<div class="hero-body">
<button class="delete is-large" onclick="location.href = '{{ back }}'"></button>
<h1 class="is-size-3">{{ data.title }}</h1>
</div>
</div>
</div>
</header>
<section class="section">
<div class="container is-fluid">
<div>
<table class="table is-fullwidth is-striped is-bordered">
<thead>
<tr class="is-selected">
<th class="has-text-centered">Platform</th>
<th class="has-text-centered">Model</th>
<th class="has-text-centered">Date</th>
<th class="has-text-centered">Time</th>
{% if data.duration > 7200 %}
{% set unit = "h" %}
{% set divider = 3600 %}
{% else %}
{% set unit = "min" %}
{% set divider = 60 %}
{% endif %}
<th class="has-text-centered">Duration ({{ unit }})</th>
<th class="has-text-centered">Stratified</th>
<th class="has-text-centered">Discretized</th>
<th class="has-text-centered"># Folds</th>
</tr>
<tr>
<th class="has-text-centered">{{ data.platform }}</th>
<th class="has-text-centered">{{ data.model }} {{ data.version }}</th>
<th class="has-text-centered">{{ data.date }}</th>
<th class="has-text-centered">{{ data.time }}</th>
<th class="has-text-centered">{{ "%.2f" % (data.duration/divider) }}</th>
<th class="has-text-centered">{{ data.stratified }}</th>
<th class="has-text-centered">{{ data.discretized }}</th>
<th class="has-text-centered">{{ data.folds }}</th>
</tr>
<tr>
<th class="has-text-center is-selected">Language</th>
<th class="has-text-centered" colspan=3>{{ data.language }} {{ data.language_version }}</th>
<th class="has-text-centered is-selected">Seeds</th>
<th class="has-text-centered" colspan=6>{{ data.seeds }}</th>
</tr>
</thead>
</table>
<div>
<button class="{{ button }}" onclick="excelFile()">
<i class="mdi mdi-file-excel"></i> Excel
</button>
</div>
<table id="report-table"
class="table is-fullwidth is-striped is-hoverable is-bordered">
<thead>
<tr class="is-selected">
<th class="has-text-centered">Dataset</th>
<th class="has-text-centered">Samples</th>
<th class="has-text-centered">Features</th>
<th class="has-text-centered">Classes</th>
<th class="has-text-centered">{{ app_config.nodes }}</th>
<th class="has-text-centered">{{ app_config.leaves }}</th>
<th class="has-text-centered">{{ app_config.depth }}</th>
<th class="has-text-centered">{{ data.score_name|capitalize }}</th>
<th class="has-text-centered">Time</th>
<th class="has-text-centered">hyperparameters</th>
</tr>
</thead>
<tbody>
{% include "partials/table_report.html" %}
</tbody>
</table>
{% if summary|length > 0 %}
<div class="col-2 col-lg-2">
<table class="table is-bordered">
<thead>
<tr class="is-selected">
<th class="has-text-centered">Symbol</th>
<th class="has-text-centered">Meaning</th>
<th class="has-text-centered">Count</th>
</tr>
</thead>
{% include "partials/table_summary.html" %}
</table>
</div>
{% endif %}
<h2 class="has-text-white has-background-primary">
<b>
<button class="delete" onclick="location.href = '{{ back }}'"></button>
Total score: {{ "%.6f" % (data.results | sum(attribute="score") ) }}
</b>
</h2>
<h2>Number of files: {{ data.results | length }}</h2>
</div>
</section>
</div>

View File

@@ -1,41 +0,0 @@
<table id="file-table" class="{{ select.get_table_class() }}">
<thead>
<tr>
<th>Model</th>
<th>Metric</th>
<th>Platform</th>
<th>Date</th>
<th>Time</th>
<th>Stratified</th>
<th>Title</th>
<th>Score</th>
<th>{{ select.get_button_reset()|safe }} {{ select.get_button_all()|safe }}</th>
</tr>
</thead>
<tbody>
{% for file, data in files.items() %}
{% set parts = file.split('_') %}
{% set stratified = parts[6].split('.')[0] %}
<tr id="{{ file }}">
<td>{{ parts[2] }}</td>
<td>{{ parts[1] }}</td>
<td>{{ parts[3] }}</td>
<td>{{ parts[4] }}</td>
<td>{{ parts[5] }}</td>
<td>{{ 'True' if stratified =='1' else 'False' }}</td>
<td>{{ "%s" % data["title"] }}</td>
<td class="{{ select.get_align_right() }}">{{ "%.6f" % data["score"] }}</td>
<td>
{{ select.get_button_tag("table-eye", "showFile('" ~ file ~ "')") | safe }}
{% set file_best = "best_results_" ~ parts[1] ~ "_" ~ parts[2] ~ ".json" %}
{{ select.get_button_tag("star-circle-outline", "redirectDouble('best_results', '" ~ file_best ~ "')", visible=False, name="best_buttons") | safe }}
<input
type="checkbox"
name="selected_files"
value="{{ file }}"
/>
</td>
</tr>
{% endfor %}
</tbody>
</table>

View File

@@ -1,15 +0,0 @@
<div class="{{ select.get_container_class() }}">
{{ select.header("Benchmark Results") }}
<div class="{{ select.get_level_class() }}">
<div class="{{ select.get_left_position() }}">
{{ select.get_button("Use " ~ candidate, "redirectDouble('config', '" ~ candidate ~ "')")|safe }}
{{ select.get_button(select.icon("excel") ~ " Excel", "excel()")|safe }}
{{ select.get_button(select.icon("database-eye") ~ " Datasets", "redirectSimple('datasets')")|safe }}
</div>
<div class="{{ select.get_right_position() }}">
<input type="checkbox" id="compare" name="compare" {% if compare %} {{ "checked" }} {% endif %}>
<span class="{{ select.get_tag_class() }}">Comparing with best results</span>
</div>
</div>
{% include "partials/table_select.html" %}
</div>

View File

@@ -1,13 +0,0 @@
{% for key, value in summary.items() %}
<tr>
<td class="{{ center }}">
{{key}}
</td>
<td >
{{value[0]}}
</td>
<td class={{ right }}>
{{'{:,}'.format(value[1])}}
</td>
</tr>
{% endfor %}

View File

@@ -1,29 +0,0 @@
{% macro javascript(file) %}
<script src="{{ url_for('static', filename=file) }}"></script>
{% endmacro %}
{% set title = 'Report Viewer' %}
{% extends 'base_' ~ framework ~ '.html' %}
{% block content %}
{% if framework == 'bootstrap' %}
{% set center = 'text-center' %}
{% set right = 'text-end' %}
{% set button = 'btn btn-primary' %}
{% include 'partials/table_report_bootstrap.html' %}
{% else %}
{% set center = 'has-text-centered' %}
{% set right = 'has-text-right' %}
{% set button = 'button is-primary' %}
{% include 'partials/table_report_bulma.html' %}
{% endif %}
{% endblock %}
{% block jscript %}
{% if framework == 'bootstrap' %}
{% set selected = 'selected' %}
{% else %}
{% set selected = 'is-selected' %}
{% endif %}
<script>
{% include "js/report.js" %}
</script>
{{ javascript("js/excelFiles.js") }}
{% endblock %}

View File

@@ -1,47 +0,0 @@
{% set title = "Best Results" %}
{% extends "base_" ~ framework ~ ".html" %}
{% import "partials/cfg_select_" ~ framework ~ ".jinja" as select %}
{% block content %}
<div class="container">
{{ select.header(title, True, url_for("main.index", compare=compare)) }}
<table id="file-table" class="{{ select.get_table_class() }}">
<thead>
<tr class="{{ select.get_row_head_class() }}">
<th class="{{ select.get_align_center() }}">Dataset</th>
<th class="{{ select.get_align_center() }}">Score</th>
<th class="{{ select.get_align_center() }}">Hyperparameters</th>
<th class="{{ select.get_align_center() }}">File</th>
</tr>
</thead>
<tbody>
{% for dataset, info in data.items() %}
<tr>
<td>{{ dataset }}</td>
<td class="{{ select.get_align_right() }}">{{ '%9.7f' % info[0] }}</td>
<td class="{{ select.get_align_center() }}">{{ info[1] }}</td>
<td>
{% set url = url_for(request.endpoint, **request.view_args)|urlencode %}
<a href="{{ url_for('main.showfile', file_name = info[2], compare = compare) }}?url={{ url }}">{{ info[2] }}</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
{% endblock %}
{% block jscript %}
<script>
$(document).ready(function () {
// Check if row is selected
$('#file-table tbody').on('click', 'tr', function () {
if ($(this).hasClass('{{ select.selected() }}')) {
$(this).removeClass('{{ select.selected() }}');
} else {
$('#file-table tbody tr.{{ select.selected() }}').removeClass("{{ select.selected() }}")
$(this).addClass('{{ select.selected() }}');
}
});
});
</script>
{% endblock %}

View File

@@ -1,20 +0,0 @@
{% macro javascript(file) %}
<script src="{{ url_for('static', filename=file) }}"></script>
{% endmacro %}
{% set title = 'Benchmark Results' %}
{% extends 'base_' ~ framework ~ '.html' %}
{% import 'partials/cfg_select_' ~ framework ~ '.jinja' as select %}
{% block content %}
{% include 'partials/table_select_design.html' %}
{% endblock %}
{% block jscript %}
<script src="https://cdn.datatables.net/1.10.25/js/jquery.dataTables.min.js"></script>
{% if framework == 'bootstrap' %}
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/js/bootstrap.min.js" integrity="sha384-cuYeSxntonz0PPNlHhBs68uyIAVpIIOZZ5JqeqvYYIcEL727kskC66kF92t6Xl2V" crossorigin="anonymous"></script>
{% endif %}
<script>
{% include'/js/select.js' %}
</script>
{{ javascript('js/excelFiles.js') }}
{% endblock %}

View File

@@ -1,18 +1,14 @@
#!/usr/bin/env python #!/usr/bin/env python
import webbrowser import webbrowser
from benchmark.Arguments import Arguments from benchmark.scripts.flask_app.app import create_app
from benchmark.scripts.app.app import create_app, TEST, OUTPUT
# Launch a flask server to serve the results # Launch a flask server to serve the results
def main(args_test=None): def main(args_test=None):
arguments = Arguments(prog="be_flask")
arguments.xset("output")
args = arguments.parse(args_test)
app = create_app() app = create_app()
app.config[TEST] = args_test is not None app.config["TEST"] = args_test is not None
app.config[OUTPUT] = args.output output = app.config["OUTPUT"]
print("Output is ", args.output) print("Output is ", output)
if args.output == "local": if output == "local":
webbrowser.open_new("http://127.0.0.1:1234/") webbrowser.open_new("http://127.0.0.1:1234/")
app.run(port=1234, host="0.0.0.0") app.run(port=1234, host="0.0.0.0")

View File

@@ -14,7 +14,7 @@ def main(args_test=None):
arguments.xset("stratified").xset("score").xset("model", mandatory=True) arguments.xset("stratified").xset("score").xset("model", mandatory=True)
arguments.xset("n_folds").xset("platform").xset("quiet").xset("title") arguments.xset("n_folds").xset("platform").xset("quiet").xset("title")
arguments.xset("report").xset("ignore_nan").xset("discretize") arguments.xset("report").xset("ignore_nan").xset("discretize")
arguments.xset("fit_features").xset("iwss") arguments.xset("fit_features")
arguments.add_exclusive( arguments.add_exclusive(
["grid_paramfile", "best_paramfile", "hyperparameters"] ["grid_paramfile", "best_paramfile", "hyperparameters"]
) )
@@ -43,7 +43,6 @@ def main(args_test=None):
folds=args.n_folds, folds=args.n_folds,
fit_features=args.fit_features, fit_features=args.fit_features,
discretize=args.discretize, discretize=args.discretize,
iwss=args.iwss,
) )
job.do_experiment() job.do_experiment()
except ValueError as e: except ValueError as e:

View File

@@ -0,0 +1,2 @@
OUTPUT="local"
FRAMEWORK="bulma"

Binary file not shown.

View File

@@ -0,0 +1,39 @@
#!/usr/bin/env python
from flask import Flask
from flask_bootstrap import Bootstrap5
from flask_login import LoginManager
from .config import Config
from .models import User, db
from .results.main import results
from .main import main
bootstrap = Bootstrap5()
login_manager = LoginManager()
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
def make_shell_context():
return {"db": db, "User": User}
def create_app():
app = Flask(__name__)
bootstrap.init_app(app)
# app.register_blueprint(results)
app.config.from_object(Config)
db.init_app(app)
login_manager.init_app(app)
login_manager.login_view = "main.login"
app.jinja_env.auto_reload = True
app.register_blueprint(results, url_prefix="/results")
app.register_blueprint(main)
app.shell_context_processor(make_shell_context)
with app.app_context():
db.create_all()
return app

View File

@@ -0,0 +1,17 @@
import os
from dotenv import load_dotenv
basedir = os.path.abspath(os.path.dirname(__file__))
load_dotenv(os.path.join(basedir, ".env"))
class Config(object):
FRAMEWORKS = ["bootstrap", "bulma"]
FRAMEWORK = os.environ.get("FRAMEWORK") or FRAMEWORKS[0]
OUTPUT = os.environ.get("OUTPUT") or "local" # local or docker
TEMPLATES_AUTO_RELOAD = True
SECRET_KEY = os.environ.get("SECRET_KEY") or "really-hard-to-guess-key"
SQLALCHEMY_DATABASE_URI = os.environ.get(
"DATABASE_URL"
) or "sqlite:///" + os.path.join(basedir, "app.db")
SQLALCHEMY_TRACK_MODIFICATIONS = False

View File

@@ -0,0 +1,22 @@
from flask_wtf import FlaskForm
from wtforms import (
StringField,
PasswordField,
BooleanField,
SubmitField,
)
from wtforms.validators import (
DataRequired,
Length,
)
class LoginForm(FlaskForm):
username = StringField(
"Username", validators=[DataRequired(), Length(1, 20)]
)
password = PasswordField(
"Password", validators=[DataRequired(), Length(4, 150)]
)
remember_me = BooleanField("Remember me")
submit = SubmitField()

View File

@@ -0,0 +1,51 @@
from flask import (
Blueprint,
render_template,
url_for,
flash,
redirect,
request,
)
from flask_login import login_user, current_user, logout_user, login_required
from werkzeug.urls import url_parse
from .forms import LoginForm
from .models import User
main = Blueprint("main", __name__)
@main.route("/")
@main.route("/index")
def index():
return render_template("index.html")
@main.route("/config")
@login_required
def config():
return render_template("config.html")
@main.route("/login", methods=["GET", "POST"])
def login():
if current_user.is_authenticated:
return redirect(url_for("main.index"))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if user is None or not user.check_password(form.password.data):
flash("Invalid username or password")
return redirect(url_for("main.login"))
login_user(user, remember=form.remember_me.data)
flash("Logged in successfully.")
next_page = request.args.get("next")
if not next_page or url_parse(next_page).netloc != "":
next_page = url_for("main.index")
return redirect(next_page)
return render_template("login.html", title="Sign In", form=form)
@main.route("/logout")
def logout():
logout_user()
return redirect(url_for("main.index"))

View File

@@ -0,0 +1,29 @@
from hashlib import md5
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import Column, Integer, String
from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
db = SQLAlchemy()
class User(UserMixin, db.Model):
id = Column(Integer, primary_key=True)
username = Column(String(64), index=True, unique=True)
email = Column(String(120), index=True, unique=True)
password_hash = Column(String(128))
def __repr__(self):
return "<User {} {}>".format(self.username, self.email)
def set_password(self, password):
self.password_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def avatar(self, size):
digest = md5(self.email.lower().encode("utf-8")).hexdigest()
return "https://www.gravatar.com/avatar/{}?d=identicon&s={}".format(
digest, size
)

View File

@@ -0,0 +1,46 @@
import os
import json
import shutil
import xlsxwriter
from benchmark.Utils import Files, Folders
from benchmark.Arguments import EnvData
from benchmark.ResultsBase import StubReport
from benchmark.ResultsFiles import Excel, ReportDatasets
from benchmark.Datasets import Datasets
from flask import Blueprint, current_app, send_file
from flask import render_template, current_app, request, redirect, url_for
from flask_login import login_required
results = Blueprint("results", __name__, template_folder="templates")
@results.route("/select")
@login_required
def select(compare="False"):
# Get a list of files in a directory
files = {}
names = Files.get_all_results(hidden=False)
for name in names:
report = StubReport(os.path.join(Folders.results, name))
report.report()
files[name] = {
"duration": report.duration,
"score": report.score,
"title": report.title,
}
candidate = current_app.config["FRAMEWORKS"].copy()
candidate.remove(current_app.config["FRAMEWORK"])
return render_template(
"select.html",
files=files,
candidate=candidate[0],
framework=current_app.config["FRAMEWORK"],
compare=compare.capitalize() == "True",
)
return render_template("test.html")
@results.route("/datasets")
@login_required
def datasets(compare="False"):
return render_template("test.html")

View File

@@ -1,89 +1,91 @@
#!/usr/bin/env python #!/usr/bin/env python
import os # import os
import json # import json
import shutil # import shutil
import xlsxwriter # import xlsxwriter
from dotenv import dotenv_values # from benchmark.Utils import Files, Folders
from benchmark.Utils import Files, Folders # from benchmark.Arguments import EnvData
from benchmark.Arguments import EnvData # from benchmark.ResultsBase import StubReport
from benchmark.ResultsBase import StubReport # from benchmark.ResultsFiles import Excel, ReportDatasets
from benchmark.ResultsFiles import Excel, ReportDatasets # from benchmark.Datasets import Datasets
from benchmark.Datasets import Datasets # from flask import Blueprint, current_app, send_file
from flask import Blueprint, current_app, send_file # from flask import render_template, request, redirect, url_for
from flask import render_template, request, redirect, url_for from flask import Blueprint, render_template
main = Blueprint("main", __name__) results = Blueprint("results", __name__, template_folder="results")
FRAMEWORK = "framework" # FRAMEWORK = "framework"
FRAMEWORKS = "frameworks" # FRAMEWORKS = "frameworks"
OUTPUT = "output" # OUTPUT = "output"
TEST = "test" # TEST = "test"
class AjaxResponse: # class AjaxResponse:
def __init__(self, success, file_name, code=200): # def __init__(self, success, file_name, code=200):
self.success = success # self.success = success
self.file_name = file_name # self.file_name = file_name
self.code = code # self.code = code
def to_string(self): # def to_string(self):
return ( # return (
json.dumps( # json.dumps(
{ # {
"success": self.success, # "success": self.success,
"file": self.file_name, # "file": self.file_name,
"output": current_app.config[OUTPUT], # "output": current_app.config[OUTPUT],
} # }
), # ),
self.code, # self.code,
{"ContentType": "application/json"}, # {"ContentType": "application/json"},
) # )
def process_data(file_name, compare, data): # def process_data(file_name, compare, data):
report = StubReport( # report = StubReport(
os.path.join(Folders.results, file_name), compare=compare # os.path.join(Folders.results, file_name), compare=compare
) # )
new_list = [] # new_list = []
for result in data["results"]: # for result in data["results"]:
symbol = report._compute_status(result["dataset"], result["score"]) # symbol = report._compute_status(result["dataset"], result["score"])
result["symbol"] = symbol if symbol != " " else "&nbsp;" # result["symbol"] = symbol if symbol != " " else "&nbsp;"
new_list.append(result) # new_list.append(result)
data["results"] = new_list # data["results"] = new_list
# Compute summary with explanation of symbols # # Compute summary with explanation of symbols
summary = {} # summary = {}
for key, value in report._compare_totals.items(): # for key, value in report._compare_totals.items():
summary[key] = (report._status_meaning(key), value) # summary[key] = (report._status_meaning(key), value)
return summary # return summary
@main.route("/index/<compare>") @results.route("/results/<compare>")
@main.route("/") def results(compare="False"):
def index(compare="False"): # # Get a list of files in a directory
# Get a list of files in a directory # files = {}
files = {} # names = Files.get_all_results(hidden=False)
names = Files.get_all_results(hidden=False) # for name in names:
for name in names: # report = StubReport(os.path.join(Folders.results, name))
report = StubReport(os.path.join(Folders.results, name)) # report.report()
report.report() # files[name] = {
files[name] = { # "duration": report.duration,
"duration": report.duration, # "score": report.score,
"score": report.score, # "title": report.title,
"title": report.title, # }
} # candidate = current_app.config[FRAMEWORKS].copy()
candidate = current_app.config[FRAMEWORKS].copy() # candidate.remove(current_app.config[FRAMEWORK])
candidate.remove(current_app.config[FRAMEWORK]) # return render_template(
return render_template( # "select.html",
"select.html", # files=files,
files=files, # candidate=candidate[0],
candidate=candidate[0], # framework=current_app.config[FRAMEWORK],
framework=current_app.config[FRAMEWORK], # compare=compare.capitalize() == "True",
compare=compare.capitalize() == "True", # )
) return render_template("test.html")
@main.route("/datasets/<compare>") """
def datasets(compare): @results.route("/datasets/<compare>")
@results.route("datasets")
def datasets(compare=False):
dt = Datasets() dt = Datasets()
datos = [] datos = []
for dataset in dt: for dataset in dt:
@@ -96,11 +98,11 @@ def datasets(compare):
) )
@main.route("/showfile/<file_name>/<compare>") @results.route("/showfile/<file_name>/<compare>")
def showfile(file_name, compare, back=None): def showfile(file_name, compare, back=None):
compare = compare.capitalize() == "True" compare = compare.capitalize() == "True"
back = request.args["url"] if back is None else back back = request.args["url"] if back is None else back
app_config = dotenv_values(".env") print(f"back [{back}]")
with open(os.path.join(Folders.results, file_name)) as f: with open(os.path.join(Folders.results, file_name)) as f:
data = json.load(f) data = json.load(f)
try: try:
@@ -114,11 +116,10 @@ def showfile(file_name, compare, back=None):
summary=summary, summary=summary,
framework=current_app.config[FRAMEWORK], framework=current_app.config[FRAMEWORK],
back=back, back=back,
app_config=app_config,
) )
@main.route("/show", methods=["post"]) @results.route("/show", methods=["post"])
def show(): def show():
selected_file = request.form["selected-file"] selected_file = request.form["selected-file"]
compare = request.form["compare"] compare = request.form["compare"]
@@ -131,7 +132,7 @@ def show():
) )
@main.route("/excel", methods=["post"]) @results.route("/excel", methods=["post"])
def excel(): def excel():
selected_files = request.json["selectedFiles"] selected_files = request.json["selectedFiles"]
compare = request.json["compare"] compare = request.json["compare"]
@@ -171,7 +172,7 @@ def excel():
return AjaxResponse(True, Files.be_list_excel).to_string() return AjaxResponse(True, Files.be_list_excel).to_string()
@main.route("/download/<file_name>") @results.route("/download/<file_name>")
def download(file_name): def download(file_name):
src = os.path.join(Folders.current, Folders.excel, file_name) src = os.path.join(Folders.current, Folders.excel, file_name)
dest = os.path.join( dest = os.path.join(
@@ -181,7 +182,7 @@ def download(file_name):
return send_file(dest, as_attachment=True) return send_file(dest, as_attachment=True)
@main.route("/config/<framework>/<compare>") @results.route("/config/<framework>/<compare>")
def config(framework, compare): def config(framework, compare):
if framework not in current_app.config[FRAMEWORKS]: if framework not in current_app.config[FRAMEWORKS]:
message = f"framework {framework} not supported" message = f"framework {framework} not supported"
@@ -194,7 +195,7 @@ def config(framework, compare):
return redirect(url_for("main.index", compare=compare)) return redirect(url_for("main.index", compare=compare))
@main.route("/best_results/<file>/<compare>") @results.route("/best_results/<file>/<compare>")
def best_results(file, compare): def best_results(file, compare):
compare = compare.capitalize() == "True" compare = compare.capitalize() == "True"
try: try:
@@ -208,3 +209,4 @@ def best_results(file, compare):
compare=compare, compare=compare,
framework=current_app.config[FRAMEWORK], framework=current_app.config[FRAMEWORK],
) )
"""

View File

@@ -0,0 +1,50 @@
{%- macro get_button_tag(icon_name, method, visible=True, name="") -%}
<button class="btn btn-primary btn-small" onclick="{{ method }}" {{ "" if visible else "hidden='true'" }} {{ "" if name=="" else "name='" + name +"'"}}><i class="mdi mdi-{{ icon_name }}"></i>
</button>
{%- endmacro -%}
<table id="file-table"
class="table table-striped table-hover table-bordered">
<thead>
<tr>
<th>Model</th>
<th>Metric</th>
<th>Platform</th>
<th>Date</th>
<th>Time</th>
<th>Stratified</th>
<th>Title</th>
<th>Score</th>
<th>
<button class="btn btn-primary btn-small btn-danger"
onclick="setCheckBoxes(false)">
<i class="mdi mdi-checkbox-multiple-blank"></i>
</button>
<button class="btn btn-primary btn-small btn-success"
onclick="setCheckBoxes(true)">
<i class="mdi mdi-checkbox-multiple-marked"></i>
</button>
</tr>
</thead>
<tbody>
{% for file, data in files.items() %}
{% set parts = file.split('_') %}
{% set stratified = parts[6].split('.')[0] %}
<tr id="{{ file }}">
<td>{{ parts[2] }}</td>
<td>{{ parts[1] }}</td>
<td>{{ parts[3] }}</td>
<td>{{ parts[4] }}</td>
<td>{{ parts[5] }}</td>
<td>{{ 'True' if stratified =='1' else 'False' }}</td>
<td>{{ "%s" % data["title"] }}</td>
<td class="text-end">{{ "%.6f" % data["score"] }}</td>
<td>
{{ get_button_tag("table-eye", "showFile('" ~ file ~ "') ") | safe }}
{% set file_best = "best_results_" ~ parts[1] ~ "_" ~ parts[2] ~ ".json" %}
{{ get_button_tag("star-circle-outline", "redirectDouble('best_results', '" ~ file_best ~ "') ", visible=False, name="best_buttons") | safe }}
<input type="checkbox" name="selected_files" value="{{ file }}" />
</td>
</tr>
{% endfor %}
</tbody>
</table>

View File

@@ -0,0 +1,9 @@
{% extends "base.html" %}
{% block content %}
{% include "_table_select.html" %}
{% endblock %}
{% block jscript %}
{{ super() }}
<script src="https://cdn.datatables.net/1.10.25/js/jquery.dataTables.min.js"></script>
<script src="{{ url_for('static', filename="js/select.js") }}"></script>
{% endblock %}

View File

@@ -28,3 +28,24 @@ tbody {
padding: 0.25rem 0.5rem; padding: 0.25rem 0.5rem;
font-size: 0.75rem; font-size: 0.75rem;
} }
body {
padding-bottom: 20px;
}
.navbar {
margin-bottom: 20px;
}
pre {
background: #ddd;
padding: 10px;
}
h2 {
margin-top: 20px;
}
footer {
margin: 20px;
}

View File

@@ -0,0 +1,97 @@
$(document).ready(function () {
var table = $("#file-table").DataTable({
paging: true,
searching: true,
ordering: true,
info: true,
"select.items": "row",
pageLength: 25,
columnDefs: [
{
targets: 8,
orderable: false,
},
],
//"language": {
// "lengthMenu": "_MENU_"
//}
});
$('#file-table').on( 'draw.dt', function () {
enable_disable_best_buttons();
} );
// Check if row is selected
$("#file-table tbody").on("click", "tr", function () {
if ($(this).hasClass("selected")) {
$(this).removeClass("selected");
} else {
table
.$("tr.selected")
.removeClass("selected");
$(this).addClass("selected");
}
});
// Show file with doubleclick
$("#file-table tbody").on("dblclick", "tr", function () {
showFile($(this).attr("id"));
});
$(document).ajaxStart(function () {
$("body").addClass("ajaxLoading");
});
$(document).ajaxStop(function () {
$("body").removeClass("ajaxLoading");
});
$('#compare').change(function() {
enable_disable_best_buttons();
});
enable_disable_best_buttons();
});
function enable_disable_best_buttons(){
if ($('#compare').is(':checked')) {
$("[name='best_buttons']").addClass("tag is-link is-normal");
$("[name='best_buttons']").removeAttr("hidden");
} else {
$("[name='best_buttons']").removeClass("tag is-link is-normal");
$("[name='best_buttons']").attr("hidden", true);
}
}
function showFile(selectedFile) {
var form = $(
'<form action="/show" method="post">' +
'<input type="hidden" name="selected-file" value="' +
selectedFile +
'" />' +
'<input type="hidden" name="compare" value=' +
$("#compare").is(":checked") +
" />" +
"</form>"
);
$("body").append(form);
form.submit();
}
function excel() {
var checkbox = document.getElementsByName("selected_files");
var selectedFiles = [];
for (var i = 0; i < checkbox.length; i++) {
if (checkbox[i].checked) {
selectedFiles.push(checkbox[i].value);
}
}
if (selectedFiles.length == 0) {
alert("Select at least one file");
return;
}
var compare = $("#compare").is(":checked");
excelFiles(selectedFiles, compare);
}
function setCheckBoxes(value) {
var checkbox = document.getElementsByName("selected_files");
for (i = 0; i < checkbox.length; i++) {
checkbox[i].checked = value;
}
}
function redirectDouble(route, parameter) {
location.href = "/"+ route + "/" + parameter + "/" + $("#compare").is(":checked");
}
function redirectSimple(route) {
location.href = "/" + route + "/" + $("#compare").is(":checked");
}

View File

@@ -0,0 +1,30 @@
{% from 'bootstrap5/nav.html' import render_nav_item %}
<nav class="navbar navbar-expand-sm navbar-light bg-light mb-4 justify-content-end">
<div class="container">
<button class="navbar-toggler"
type="button"
data-bs-toggle="collapse"
data-bs-target="#navbarSupportedContent"
aria-controls="navbarSupportedContent"
aria-expanded="false"
aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarSupportedContent">
<!-- Left side of navbar -->
<ul class="navbar-nav me-auto">
{{ render_nav_item('main.index', 'Home') }}
</ul>
<ul class="navbar-nav justify-content-end">
{{ render_nav_item('results.select', 'Results') }}
{{ render_nav_item('results.datasets', 'Datasets') }}
{{ render_nav_item('main.config', 'Config') }}
{% if current_user.is_authenticated %}
{{ render_nav_item('main.logout', 'Logout') }}
{% else %}
{{ render_nav_item('main.login', 'Login') }}
{% endif %}
</ul>
</div>
</div>
</nav>

View File

@@ -0,0 +1,27 @@
<!DOCTYPE html>
<html lang="en">
<head>
{% block head %}
<meta charset="utf-8">
<meta name="viewport"
content="width=device-width, initial-scale=1, shrink-to-fit=no">
{% block styles %}{{ bootstrap.load_css() }}{% endblock %}
<title>Benchmark</title>
{% endblock %}
</head>
<body>
{% include "_nav.html" %}
{% with messages = get_flashed_messages() %}
{% if messages %}
{% for message in messages %}<div class="alert alert-info" role="alert">{{ message }}</div>{% endfor %}
{% endif %}
{% endwith %}
<div class="container">
{% block content %}{% endblock %}
</div>
</body>
{% block jscript %}
<script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
{{ bootstrap.load_js() }}
{% endblock %}
</html>

View File

@@ -0,0 +1,5 @@
{% extends "base.html" %}
{% block content %}
<h1>Home</h1>
<p>Welcome to the home page!</p>
{% endblock content %}

View File

@@ -0,0 +1,5 @@
{% extends "base.html" %}
{% block content %}
<h1>My First Heading</h1>
<p>My first paragraph.</p>
{% endblock %}

View File

@@ -0,0 +1,6 @@
{% extends 'base.html' %}
{% from 'bootstrap5/form.html' import render_form %}
{% block content %}
<h2>Login</h2>
{{ render_form(form) }}
{% endblock content %}

View File

@@ -68,7 +68,7 @@ class ArgumentsTest(TestBase):
test_args = ["-n", "3", "-k", "date"] test_args = ["-n", "3", "-k", "date"]
with self.assertRaises(SystemExit): with self.assertRaises(SystemExit):
arguments.parse(test_args) arguments.parse(test_args)
self.assertRegex( self.assertRegexpMatches(
stderr.getvalue(), stderr.getvalue(),
r"error: the following arguments are required: -m/--model", r"error: the following arguments are required: -m/--model",
) )
@@ -79,7 +79,7 @@ class ArgumentsTest(TestBase):
test_args = ["-n", "3", "-m", "SVC"] test_args = ["-n", "3", "-m", "SVC"]
with self.assertRaises(SystemExit): with self.assertRaises(SystemExit):
arguments.parse(test_args) arguments.parse(test_args)
self.assertRegex( self.assertRegexpMatches(
stderr.getvalue(), stderr.getvalue(),
r"error: the following arguments are required: -k/--key", r"error: the following arguments are required: -k/--key",
) )
@@ -114,7 +114,7 @@ class ArgumentsTest(TestBase):
test_args = None test_args = None
with self.assertRaises(SystemExit): with self.assertRaises(SystemExit):
arguments.parse(test_args) arguments.parse(test_args)
self.assertRegex( self.assertRegexpMatches(
stderr.getvalue(), stderr.getvalue(),
r"error: the following arguments are required: -m/--model, " r"error: the following arguments are required: -m/--model, "
"-k/--key, --title", "-k/--key, --title",

View File

@@ -102,7 +102,7 @@ class ModelTest(TestBase):
test = { test = {
"STree": ((11, 6, 4), 1.0), "STree": ((11, 6, 4), 1.0),
"Wodt": ((303, 152, 50), 0.9382022471910112), "Wodt": ((303, 152, 50), 0.9382022471910112),
"ODTE": ((786, 443, 337), 1.0), "ODTE": ((7.86, 4.43, 3.37), 1.0),
"Cart": ((23, 12, 5), 1.0), "Cart": ((23, 12, 5), 1.0),
"SVC": ((0, 0, 0), 0.7078651685393258), "SVC": ((0, 0, 0), 0.7078651685393258),
"RandomForest": ((21.3, 11, 5.26), 1.0), "RandomForest": ((21.3, 11, 5.26), 1.0),

View File

@@ -118,7 +118,7 @@ class UtilTest(TestBase):
def test_Files_get_results(self): def test_Files_get_results(self):
os.chdir(os.path.dirname(os.path.abspath(__file__))) os.chdir(os.path.dirname(os.path.abspath(__file__)))
self.assertCountEqual( self.assertCountEqual(
Files.get_all_results(hidden=False), Files().get_all_results(hidden=False),
[ [
"results_accuracy_STree_iMac27_2021-10-27_09:40:40_0.json", "results_accuracy_STree_iMac27_2021-10-27_09:40:40_0.json",
"results_accuracy_STree_iMac27_2021-09-30_11:42:07_0.json", "results_accuracy_STree_iMac27_2021-09-30_11:42:07_0.json",
@@ -130,7 +130,7 @@ class UtilTest(TestBase):
], ],
) )
self.assertCountEqual( self.assertCountEqual(
Files.get_all_results(hidden=True), Files().get_all_results(hidden=True),
[ [
"results_accuracy_STree_iMac27_2021-11-01_23:55:16_0.json", "results_accuracy_STree_iMac27_2021-11-01_23:55:16_0.json",
"results_accuracy_XGBoost_MacBookpro16_2022-05-04_11:00:35_" "results_accuracy_XGBoost_MacBookpro16_2022-05-04_11:00:35_"
@@ -143,7 +143,7 @@ class UtilTest(TestBase):
# check with results # check with results
os.rename(Folders.results, f"{Folders.results}.test") os.rename(Folders.results, f"{Folders.results}.test")
try: try:
Files.get_all_results(hidden=False) Files().get_all_results(hidden=False)
except ValueError: except ValueError:
pass pass
else: else:
@@ -153,7 +153,7 @@ class UtilTest(TestBase):
# check with hidden_results # check with hidden_results
os.rename(Folders.hidden_results, f"{Folders.hidden_results}.test") os.rename(Folders.hidden_results, f"{Folders.hidden_results}.test")
try: try:
Files.get_all_results(hidden=True) Files().get_all_results(hidden=True)
except ValueError: except ValueError:
pass pass
else: else:

View File

@@ -1,2 +1,2 @@
iris;class;all iris,class,all
wine;class;[0, 1] wine,class,[0, 1]

View File

@@ -6,7 +6,7 @@
"kernel": "liblinear", "kernel": "liblinear",
"multiclass_strategy": "ovr" "multiclass_strategy": "ovr"
}, },
"v. 1.4.0, Computed on Test on 2022-02-22 at 12:00:00 took 1s" "v. 1.3.1, Computed on Test on 2022-02-22 at 12:00:00 took 1s"
], ],
"balloons": [ "balloons": [
0.625, 0.625,
@@ -15,6 +15,6 @@
"kernel": "linear", "kernel": "linear",
"multiclass_strategy": "ovr" "multiclass_strategy": "ovr"
}, },
"v. 1.4.0, Computed on Test on 2022-02-22 at 12:00:00 took 1s" "v. 1.3.1, Computed on Test on 2022-02-22 at 12:00:00 took 1s"
] ]
} }

View File

@@ -120,7 +120,7 @@ class BeMainTest(TestBase):
module.main(parameter) module.main(parameter)
self.assertEqual(msg.exception.code, 2) self.assertEqual(msg.exception.code, 2)
self.assertEqual(stderr.getvalue(), "") self.assertEqual(stderr.getvalue(), "")
self.assertRegex(stdout.getvalue(), message) self.assertRegexpMatches(stdout.getvalue(), message)
def test_be_main_best_params_non_existent(self): def test_be_main_best_params_non_existent(self):
model = "GBC" model = "GBC"

View File

@@ -1,4 +1,4 @@
1;1;"Datasets used in benchmark ver. 1.0.1" 1;1;"Datasets used in benchmark ver. 0.5.0"
2;1;" Default score accuracy" 2;1;" Default score accuracy"
2;2;"Cross validation" 2;2;"Cross validation"
2;6;"5 Folds" 2;6;"5 Folds"

View File

@@ -1,4 +1,4 @@
Datasets used in benchmark ver. 1.0.1 Datasets used in benchmark ver. 0.5.0
Dataset Sampl. Feat. Cont Cls Balance Dataset Sampl. Feat. Cont Cls Balance
============================== ====== ===== ==== === ========================================== ============================== ====== ===== ==== === ==========================================

View File

@@ -1,11 +1,18 @@
pandas pandas
scikit-learn scikit-learn
scipy scipy
python-dotenv
flask
bootstrap-flask
flask-wtf
flask-login
flask-migrate
flask_sqlalchemy
odte odte
cython cython
fimdlp fimdlp
mufs mufs
#bayesclass @ git+ssh://git@github.com/doctorado-ml/bayesclass.git bayesclass @ git+ssh://git@github.com/doctorado-ml/bayesclass.git
xlsxwriter xlsxwriter
openpyxl openpyxl
tqdm tqdm
@@ -13,4 +20,3 @@ xgboost
graphviz graphviz
Wodt @ git+ssh://git@github.com/doctorado-ml/Wodt.git#egg=Wodt Wodt @ git+ssh://git@github.com/doctorado-ml/Wodt.git#egg=Wodt
unittest-xml-reporting unittest-xml-reporting
flask

View File

@@ -39,7 +39,6 @@ def script_names():
"report", "report",
"summary", "summary",
"init_project", "init_project",
"flask",
] ]
result = [] result = []
for script in scripts: for script in scripts: