mirror of
https://github.com/Doctorado-ML/benchmark.git
synced 2025-08-17 16:35:54 +00:00
Compare commits
45 Commits
Author | SHA1 | Date | |
---|---|---|---|
0e724f2c6b
|
|||
|
cf8fd3454e | ||
|
162cdc2da1 | ||
|
765112073c | ||
69e21584bd
|
|||
419c899c94
|
|||
2a2ed81a6c
|
|||
4c5502611a
|
|||
|
70f1da5fc7 | ||
|
14dba5edb8 | ||
a31d62263d
|
|||
3f3a18e4fe
|
|||
6844d13973
|
|||
4b17cc2230
|
|||
257cb8e95a
|
|||
34b4cb6477
|
|||
0b258595f9
|
|||
ff25581e99
|
|||
aeec3a65af
|
|||
b7d26b82b1
|
|||
f7ed11562b
|
|||
a51fed6281
|
|||
7d5f3058c3
|
|||
54d141e861
|
|||
04ea568c71
|
|||
d8285eb2bb
|
|||
5f7fb7d5ac
|
|||
dd3cb91951
|
|||
40af738ed9
|
|||
10c352fdb5
|
|||
1b362f2110
|
|||
007c419979
|
|||
2df055334c
|
|||
395a64abb7
|
|||
8fe4b888b8
|
|||
60086b3925
|
|||
c55a0b29ab
|
|||
c2415576c9
|
|||
663a0b0258
|
|||
655c1db889
|
|||
e17e7d4e00
|
|||
be62e38e77
|
|||
83cfc3e5f5
|
|||
3928b9c583
|
|||
219b626061
|
@@ -1,12 +1,9 @@
|
||||
[](https://github.com/Doctorado-ML/benchmark/actions/workflows/main.yml)
|
||||
[](https://codecov.io/gh/Doctorado-ML/benchmark)
|
||||
[](https://sonar.rmontanana.es/dashboard?id=benchmark)
|
||||
[](https://sonar.rmontanana.es/dashboard?id=benchmark)
|
||||

|
||||
|
||||
# benchmark
|
||||
|
||||
Benchmarking models
|
||||
Benchmarking Python models
|
||||
|
||||
## Experimentation
|
||||
|
||||
|
@@ -13,21 +13,27 @@ ALL_METRICS = (
|
||||
|
||||
|
||||
class EnvData:
|
||||
@staticmethod
|
||||
def load():
|
||||
args = {}
|
||||
def __init__(self):
|
||||
self.args = {}
|
||||
|
||||
def load(self):
|
||||
try:
|
||||
with open(Files.dot_env) as f:
|
||||
for line in f.read().splitlines():
|
||||
if line == "" or line.startswith("#"):
|
||||
continue
|
||||
key, value = line.split("=")
|
||||
args[key] = value
|
||||
self.args[key] = value
|
||||
except FileNotFoundError:
|
||||
print(NO_ENV, file=sys.stderr)
|
||||
exit(1)
|
||||
else:
|
||||
return args
|
||||
return self.args
|
||||
|
||||
def save(self):
|
||||
with open(Files.dot_env, "w") as f:
|
||||
for key, value in self.args.items():
|
||||
f.write(f"{key}={value}\n")
|
||||
|
||||
|
||||
class EnvDefault(argparse.Action):
|
||||
@@ -35,7 +41,7 @@ class EnvDefault(argparse.Action):
|
||||
def __init__(
|
||||
self, envvar, required=True, default=None, mandatory=False, **kwargs
|
||||
):
|
||||
self._args = EnvData.load()
|
||||
self._args = EnvData().load()
|
||||
self._overrides = {}
|
||||
if required and not mandatory:
|
||||
default = self._args[envvar]
|
||||
@@ -154,6 +160,15 @@ class Arguments(argparse.ArgumentParser):
|
||||
"help": "Ignore nan results",
|
||||
},
|
||||
],
|
||||
"iwss": [
|
||||
("--iwss",),
|
||||
{
|
||||
"default": False,
|
||||
"action": "store_true",
|
||||
"required": False,
|
||||
"help": "Do IWSS with training set and then apply to test set",
|
||||
},
|
||||
],
|
||||
"key": [
|
||||
("-k", "--key"),
|
||||
{
|
||||
@@ -229,6 +244,19 @@ class Arguments(argparse.ArgumentParser):
|
||||
"help": "number of folds",
|
||||
},
|
||||
],
|
||||
"output": [
|
||||
("-o", "--output"),
|
||||
{
|
||||
"type": str,
|
||||
"default": "local",
|
||||
"choices": ["local", "docker"],
|
||||
"required": False,
|
||||
"help": (
|
||||
"in be_flask tells if it is running in local or "
|
||||
"in docker {local, docker}"
|
||||
),
|
||||
},
|
||||
],
|
||||
"platform": [
|
||||
("-P", "--platform"),
|
||||
{
|
||||
|
@@ -32,6 +32,8 @@ class DatasetsArff:
|
||||
def get_range_features(X, c_features):
|
||||
if c_features.strip() == "all":
|
||||
return list(range(X.shape[1]))
|
||||
if c_features.strip() == "none":
|
||||
return []
|
||||
return json.loads(c_features)
|
||||
|
||||
def load(self, name, class_name):
|
||||
@@ -109,7 +111,7 @@ class DatasetsSurcov:
|
||||
|
||||
class Datasets:
|
||||
def __init__(self, dataset_name=None, discretize=None):
|
||||
env_data = EnvData.load()
|
||||
env_data = EnvData().load()
|
||||
# DatasetsSurcov, DatasetsTanveer, DatasetsArff,...
|
||||
source_name = getattr(
|
||||
__import__(__name__),
|
||||
@@ -129,29 +131,28 @@ class Datasets:
|
||||
|
||||
def _init_names(self, dataset_name):
|
||||
file_name = os.path.join(self.dataset.folder(), Files.index)
|
||||
default_class = "class"
|
||||
self.continuous_features = {}
|
||||
with open(file_name) as f:
|
||||
sets = f.read().splitlines()
|
||||
sets = [x for x in sets if not x.startswith("#")]
|
||||
class_names = [default_class] * len(sets)
|
||||
if "," in sets[0]:
|
||||
result = []
|
||||
class_names = []
|
||||
for data in sets:
|
||||
name, class_name, features = data.split(",", 2)
|
||||
result.append(name)
|
||||
class_names.append(class_name)
|
||||
self.continuous_features[name] = features
|
||||
sets = result
|
||||
else:
|
||||
for name in sets:
|
||||
self.continuous_features[name] = None
|
||||
results = []
|
||||
class_names = []
|
||||
for set_name in sets:
|
||||
try:
|
||||
name, class_name, features = set_name.split(";")
|
||||
except ValueError:
|
||||
class_name = "class"
|
||||
features = "all"
|
||||
name = set_name
|
||||
results.append(name)
|
||||
class_names.append(class_name)
|
||||
features = features.strip()
|
||||
self.continuous_features[name] = features
|
||||
# Set as dataset list the dataset passed as argument
|
||||
if dataset_name is None:
|
||||
return class_names, sets
|
||||
return class_names, results
|
||||
try:
|
||||
class_name = class_names[sets.index(dataset_name)]
|
||||
class_name = class_names[results.index(dataset_name)]
|
||||
except ValueError:
|
||||
raise ValueError(f"Unknown dataset: {dataset_name}")
|
||||
return [class_name], [dataset_name]
|
||||
|
@@ -7,12 +7,17 @@ import time
|
||||
from datetime import datetime
|
||||
from tqdm import tqdm
|
||||
import numpy as np
|
||||
from mufs import MUFS
|
||||
from sklearn.model_selection import (
|
||||
StratifiedKFold,
|
||||
KFold,
|
||||
GridSearchCV,
|
||||
cross_validate,
|
||||
)
|
||||
from sklearn.svm import LinearSVC
|
||||
from sklearn.feature_selection import SelectFromModel
|
||||
from sklearn.preprocessing import label_binarize
|
||||
from sklearn.base import clone
|
||||
from sklearn.metrics import check_scoring, roc_auc_score
|
||||
from .Utils import Folders, Files, NO_RESULTS
|
||||
from .Datasets import Datasets
|
||||
from .Models import Models
|
||||
@@ -22,7 +27,7 @@ from .Arguments import EnvData
|
||||
class Randomized:
|
||||
@staticmethod
|
||||
def seeds():
|
||||
return json.loads(EnvData.load()["seeds"])
|
||||
return json.loads(EnvData().load()["seeds"])
|
||||
|
||||
|
||||
class BestResults:
|
||||
@@ -115,9 +120,10 @@ class Experiment:
|
||||
ignore_nan=True,
|
||||
fit_features=None,
|
||||
discretize=None,
|
||||
iwss=False,
|
||||
folds=5,
|
||||
):
|
||||
env_data = EnvData.load()
|
||||
env_data = EnvData().load()
|
||||
today = datetime.now()
|
||||
self.time = today.strftime("%H:%M:%S")
|
||||
self.date = today.strftime("%Y-%m-%d")
|
||||
@@ -176,6 +182,7 @@ class Experiment:
|
||||
self.random_seeds = Randomized.seeds()
|
||||
self.results = []
|
||||
self.duration = 0
|
||||
self.iwss = iwss
|
||||
self._init_experiment()
|
||||
|
||||
def get_output_file(self):
|
||||
@@ -212,52 +219,362 @@ class Experiment:
|
||||
res["state_names"] = states
|
||||
return res
|
||||
|
||||
# def _n_fold_crossval(self, name, X, y, hyperparameters):
|
||||
# if self.scores != []:
|
||||
# raise ValueError("Must init experiment before!")
|
||||
|
||||
# loop = tqdm(
|
||||
# self.random_seeds,
|
||||
# position=1,
|
||||
# leave=False,
|
||||
# disable=not self.progress_bar,
|
||||
# )
|
||||
|
||||
# for random_state in loop:
|
||||
# loop.set_description(f"Seed({random_state:4d})")
|
||||
# random.seed(random_state)
|
||||
# np.random.seed(random_state)
|
||||
|
||||
# kfold = self.stratified_class(
|
||||
# shuffle=True, random_state=random_state, n_splits=self.folds
|
||||
# )
|
||||
|
||||
# clf = self._build_classifier(random_state, hyperparameters)
|
||||
# fit_params = self._build_fit_params(name)
|
||||
# self.version = Models.get_version(self.model_name, clf)
|
||||
|
||||
# with warnings.catch_warnings():
|
||||
# warnings.filterwarnings("ignore")
|
||||
|
||||
# if self.iwss:
|
||||
# # Manual cross-validation with IWSS feature selection
|
||||
# fold_scores = []
|
||||
# fold_times = []
|
||||
# fold_estimators = []
|
||||
|
||||
# for train_idx, test_idx in kfold.split(X, y):
|
||||
# # Split data
|
||||
# X_train, X_test = X[train_idx], X[test_idx]
|
||||
# y_train, y_test = y[train_idx], y[test_idx]
|
||||
|
||||
# # Apply IWSS feature selection
|
||||
# transformer = MUFS()
|
||||
# transformer.iwss(X_train, y_train, 0.5)
|
||||
# X_train_selected = X_train[
|
||||
# :, transformer.get_results()
|
||||
# ]
|
||||
# X_test_selected = X_test[:, transformer.get_results()]
|
||||
# # print("Selected features:", transformer.get_results())
|
||||
# # print(
|
||||
# # f"Number of selected features: {X_train_selected.shape[1]}"
|
||||
# # )
|
||||
|
||||
# # Clone classifier to avoid data leakage between folds
|
||||
# clf_fold = clone(clf)
|
||||
|
||||
# # Fit the classifier
|
||||
# start_time = time.time()
|
||||
# clf_fold.fit(X_train_selected, y_train)
|
||||
# fit_time = time.time() - start_time
|
||||
|
||||
# # Score on test set
|
||||
# score_func = get_scorer(
|
||||
# self.score_name.replace("-", "_")
|
||||
# )
|
||||
# # Handle scoring based on the metric type
|
||||
# if self.score_name in [
|
||||
# "roc_auc",
|
||||
# "log_loss",
|
||||
# "roc_auc_ovr",
|
||||
# "roc_auc_ovo",
|
||||
# ]:
|
||||
# # These metrics need probabilities
|
||||
# if hasattr(clf_fold, "predict_proba"):
|
||||
# y_score = clf_fold.predict_proba(
|
||||
# X_test_selected
|
||||
# )
|
||||
|
||||
# # Handle missing classes in the fold
|
||||
# if len(unique_train_classes) < len(
|
||||
# unique_all_classes
|
||||
# ):
|
||||
# # Create a full probability matrix with zeros for missing classes
|
||||
# y_score_full = np.zeros(
|
||||
# (len(y_test), len(unique_all_classes))
|
||||
# )
|
||||
# for i, class_label in enumerate(
|
||||
# unique_train_classes
|
||||
# ):
|
||||
# class_idx = np.where(
|
||||
# unique_all_classes == class_label
|
||||
# )[0][0]
|
||||
# y_score_full[:, class_idx] = y_score[
|
||||
# :, i
|
||||
# ]
|
||||
# y_score = y_score_full
|
||||
# else:
|
||||
# # Fallback to decision_function for SVM-like models
|
||||
# y_score = clf_fold.decision_function(
|
||||
# X_test_selected
|
||||
# )
|
||||
|
||||
# test_score = score_func._score_func(
|
||||
# y_test, y_score
|
||||
# )
|
||||
# else:
|
||||
# # For metrics that use predictions (accuracy, f1, etc.)
|
||||
# test_score = score_func(
|
||||
# clf_fold, X_test_selected, y_test
|
||||
# )
|
||||
|
||||
# fold_scores.append(test_score)
|
||||
# fold_times.append(fit_time)
|
||||
# fold_estimators.append(clf_fold)
|
||||
|
||||
# # Package results to match cross_validate output format
|
||||
# res = {
|
||||
# "test_score": np.array(fold_scores),
|
||||
# "fit_time": np.array(fold_times),
|
||||
# "estimator": fold_estimators,
|
||||
# }
|
||||
# else:
|
||||
# # Original cross_validate approach
|
||||
# res = cross_validate(
|
||||
# clf,
|
||||
# X,
|
||||
# y,
|
||||
# cv=kfold,
|
||||
# fit_params=fit_params,
|
||||
# return_estimator=True,
|
||||
# scoring=self.score_name.replace("-", "_"),
|
||||
# )
|
||||
|
||||
# # Handle NaN values
|
||||
# if np.isnan(res["test_score"]).any():
|
||||
# if not self.ignore_nan:
|
||||
# print(res["test_score"])
|
||||
# raise ValueError("NaN in results")
|
||||
# results = res["test_score"][~np.isnan(res["test_score"])]
|
||||
# else:
|
||||
# results = res["test_score"]
|
||||
|
||||
# # Store results
|
||||
# self.scores.extend(results)
|
||||
# self.times.extend(res["fit_time"])
|
||||
|
||||
# for result_item in res["estimator"]:
|
||||
# nodes_item, leaves_item, depth_item = (
|
||||
# Models.get_complexity(self.model_name, result_item)
|
||||
# )
|
||||
# self.nodes.append(nodes_item)
|
||||
# self.leaves.append(leaves_item)
|
||||
# self.depths.append(depth_item)
|
||||
|
||||
# from sklearn.base import clone
|
||||
# import numpy as np
|
||||
# import time
|
||||
# import warnings
|
||||
# from tqdm import tqdm
|
||||
|
||||
def _n_fold_crossval(self, name, X, y, hyperparameters):
|
||||
if self.scores != []:
|
||||
raise ValueError("Must init experiment before!")
|
||||
|
||||
# Get all unique classes and check data
|
||||
unique_all_classes = np.sort(np.unique(y))
|
||||
n_classes = len(unique_all_classes)
|
||||
|
||||
# Check if we have enough samples per class for stratified k-fold
|
||||
min_samples_per_class = np.min(np.bincount(y))
|
||||
if min_samples_per_class < self.folds:
|
||||
warnings.warn(
|
||||
f"Class imbalance detected: minimum class has {min_samples_per_class} samples. "
|
||||
f"Consider using fewer folds or handling imbalanced data."
|
||||
)
|
||||
|
||||
loop = tqdm(
|
||||
self.random_seeds,
|
||||
position=1,
|
||||
leave=False,
|
||||
disable=not self.progress_bar,
|
||||
)
|
||||
|
||||
for random_state in loop:
|
||||
loop.set_description(f"Seed({random_state:4d})")
|
||||
random.seed(random_state)
|
||||
np.random.seed(random_state)
|
||||
|
||||
kfold = self.stratified_class(
|
||||
shuffle=True, random_state=random_state, n_splits=self.folds
|
||||
)
|
||||
|
||||
clf = self._build_classifier(random_state, hyperparameters)
|
||||
fit_params = self._build_fit_params(name)
|
||||
self.version = Models.get_version(self.model_name, clf)
|
||||
|
||||
# Check if the classifier supports probability predictions
|
||||
scorer = check_scoring(clf, scoring="roc_auc_ovr")
|
||||
if not hasattr(clf, "predict_proba") and not hasattr(
|
||||
clf, "decision_function"
|
||||
):
|
||||
raise ValueError(
|
||||
f"Classifier {self.model_name} doesn't support probability predictions "
|
||||
"required for ROC-AUC scoring"
|
||||
)
|
||||
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore")
|
||||
res = cross_validate(
|
||||
clf,
|
||||
X,
|
||||
y,
|
||||
cv=kfold,
|
||||
fit_params=fit_params,
|
||||
return_estimator=True,
|
||||
scoring=self.score_name.replace("-", "_"),
|
||||
)
|
||||
if np.isnan(res["test_score"]).any():
|
||||
if not self.ignore_nan:
|
||||
print(res["test_score"])
|
||||
raise ValueError("NaN in results")
|
||||
results = res["test_score"][~np.isnan(res["test_score"])]
|
||||
else:
|
||||
results = res["test_score"]
|
||||
self.scores.extend(results)
|
||||
self.times.extend(res["fit_time"])
|
||||
for result_item in res["estimator"]:
|
||||
nodes_item, leaves_item, depth_item = Models.get_complexity(
|
||||
self.model_name, result_item
|
||||
)
|
||||
self.nodes.append(nodes_item)
|
||||
self.leaves.append(leaves_item)
|
||||
self.depths.append(depth_item)
|
||||
|
||||
fold_scores = []
|
||||
fold_times = []
|
||||
fold_estimators = []
|
||||
|
||||
for fold_idx, (train_idx, test_idx) in enumerate(
|
||||
kfold.split(X, y)
|
||||
):
|
||||
# Split data
|
||||
X_train, X_test = X[train_idx], X[test_idx]
|
||||
y_train, y_test = y[train_idx], y[test_idx]
|
||||
|
||||
# Check classes in this fold
|
||||
unique_test_classes = np.unique(y_test)
|
||||
n_test_classes = len(unique_test_classes)
|
||||
|
||||
# Skip fold if we don't have at least 2 classes in test set
|
||||
if n_test_classes < 2:
|
||||
warnings.warn(
|
||||
f"Fold {fold_idx}: Test set has only {n_test_classes} class(es). "
|
||||
f"Skipping this fold for ROC-AUC calculation."
|
||||
)
|
||||
fold_scores.append(np.nan)
|
||||
fold_times.append(np.nan)
|
||||
fold_estimators.append(None)
|
||||
continue
|
||||
|
||||
# Apply IWSS feature selection if enabled
|
||||
if self.iwss:
|
||||
# transformer = (
|
||||
# MUFS(discrete=False)
|
||||
# if "cli_rad" in name
|
||||
# else MUFS(discrete=True)
|
||||
# )
|
||||
# transformer.iwss(X_train, y_train, 0.5)
|
||||
# selected_features = transformer.get_results()
|
||||
# Apply L1-based feature selection
|
||||
# Using LinearSVC with L1 penalty
|
||||
lsvc = LinearSVC(
|
||||
C=0.1, # Regularization parameter - adjust this for more/fewer features
|
||||
penalty="l1",
|
||||
dual=False,
|
||||
max_iter=2000,
|
||||
random_state=random_state,
|
||||
)
|
||||
selector = SelectFromModel(lsvc, prefit=False)
|
||||
selector.fit(X_train, y_train)
|
||||
|
||||
# Transform the data
|
||||
X_train_selected = selector.transform(X_train)
|
||||
X_test_selected = selector.transform(X_test)
|
||||
|
||||
# Get information about selected features
|
||||
selected_features = selector.get_support(indices=True)
|
||||
n_selected = len(selected_features)
|
||||
if len(selected_features) == 0:
|
||||
warnings.warn(
|
||||
f"Fold {fold_idx}: No features selected by IWSS. Using all features."
|
||||
)
|
||||
X_train_selected = X_train
|
||||
X_test_selected = X_test
|
||||
else:
|
||||
X_train_selected = X_train[:, selected_features]
|
||||
X_test_selected = X_test[:, selected_features]
|
||||
else:
|
||||
X_train_selected = X_train
|
||||
X_test_selected = X_test
|
||||
|
||||
# Clone and fit classifier
|
||||
clf_fold = clone(clf)
|
||||
|
||||
start_time = time.time()
|
||||
clf_fold.fit(X_train_selected, y_train)
|
||||
fit_time = time.time() - start_time
|
||||
|
||||
# Get probability predictions
|
||||
y_proba = clf_fold.predict_proba(X_test_selected)
|
||||
|
||||
# Calculate ROC-AUC score
|
||||
# Handle case where test set doesn't have all classes
|
||||
if len(clf_fold.classes_) != len(unique_test_classes):
|
||||
# Map probabilities to only test classes
|
||||
test_class_indices = [
|
||||
np.where(clf_fold.classes_ == c)[0][0]
|
||||
for c in unique_test_classes
|
||||
if c in clf_fold.classes_
|
||||
]
|
||||
y_proba = y_proba[:, test_class_indices]
|
||||
|
||||
# Binarize labels for multi-class ROC-AUC
|
||||
y_test_binarized = label_binarize(
|
||||
y_test, classes=unique_test_classes
|
||||
)
|
||||
|
||||
# Calculate ROC-AUC with OVR strategy
|
||||
if n_test_classes == 2:
|
||||
# Binary classification
|
||||
test_score = roc_auc_score(y_test, y_proba[:, 1])
|
||||
else:
|
||||
# Multi-class with macro-average
|
||||
test_score = roc_auc_score(
|
||||
y_test_binarized,
|
||||
y_proba,
|
||||
multi_class="ovr",
|
||||
average="macro",
|
||||
)
|
||||
|
||||
fold_scores.append(test_score)
|
||||
fold_times.append(fit_time)
|
||||
fold_estimators.append(clf_fold)
|
||||
|
||||
# Filter out NaN scores if ignore_nan is True
|
||||
scores_array = np.array(fold_scores)
|
||||
times_array = np.array(fold_times)
|
||||
|
||||
if np.isnan(scores_array).any():
|
||||
if not self.ignore_nan:
|
||||
nan_folds = np.where(np.isnan(scores_array))[0]
|
||||
raise ValueError(
|
||||
f"NaN scores in folds {nan_folds}. "
|
||||
f"Set ignore_nan=True to skip these folds."
|
||||
)
|
||||
else:
|
||||
# Filter out NaN values
|
||||
valid_mask = ~np.isnan(scores_array)
|
||||
scores_array = scores_array[valid_mask]
|
||||
times_array = times_array[valid_mask]
|
||||
fold_estimators = [
|
||||
e
|
||||
for e, valid in zip(fold_estimators, valid_mask)
|
||||
if valid
|
||||
]
|
||||
|
||||
if len(scores_array) == 0:
|
||||
warnings.warn(
|
||||
f"All folds resulted in NaN for seed {random_state}. Skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
# Store results
|
||||
self.scores.extend(scores_array)
|
||||
self.times.extend(times_array)
|
||||
|
||||
# Store complexity metrics
|
||||
for estimator in fold_estimators:
|
||||
if estimator is not None:
|
||||
nodes_item, leaves_item, depth_item = (
|
||||
Models.get_complexity(self.model_name, estimator)
|
||||
)
|
||||
self.nodes.append(nodes_item)
|
||||
self.leaves.append(leaves_item)
|
||||
self.depths.append(depth_item)
|
||||
|
||||
def _add_results(self, name, hyperparameters, samples, features, classes):
|
||||
record = {}
|
||||
|
@@ -8,7 +8,8 @@ from sklearn.ensemble import (
|
||||
)
|
||||
from sklearn.svm import SVC
|
||||
from stree import Stree
|
||||
from bayesclass.clfs import TAN, KDB, AODE, KDBNew, TANNew, AODENew
|
||||
|
||||
# from bayesclass.clfs import TAN, KDB, AODE, KDBNew, TANNew, AODENew, BoostAODE
|
||||
from wodt import Wodt
|
||||
from odte import Odte
|
||||
from xgboost import XGBClassifier
|
||||
@@ -39,12 +40,13 @@ class Models:
|
||||
def define_models(random_state):
|
||||
return {
|
||||
"STree": Stree(random_state=random_state),
|
||||
"TAN": TAN(random_state=random_state),
|
||||
"KDB": KDB(k=2),
|
||||
"TANNew": TANNew(random_state=random_state),
|
||||
"KDBNew": KDBNew(k=2),
|
||||
"AODENew": AODENew(random_state=random_state),
|
||||
"AODE": AODE(random_state=random_state),
|
||||
# "TAN": TAN(random_state=random_state),
|
||||
# "KDB": KDB(k=2),
|
||||
# "TANNew": TANNew(random_state=random_state),
|
||||
# "KDBNew": KDBNew(k=2),
|
||||
# "AODENew": AODENew(random_state=random_state),
|
||||
# "AODE": AODE(random_state=random_state),
|
||||
# "BoostAODE": BoostAODE(random_state=random_state),
|
||||
"Cart": DecisionTreeClassifier(random_state=random_state),
|
||||
"ExtraTree": ExtraTreeClassifier(random_state=random_state),
|
||||
"Wodt": Wodt(random_state=random_state),
|
||||
@@ -69,6 +71,7 @@ class Models:
|
||||
algorithm="SAMME",
|
||||
random_state=random_state,
|
||||
),
|
||||
"AdaBoost": AdaBoostClassifier(random_state=random_state),
|
||||
"GBC": GradientBoostingClassifier(random_state=random_state),
|
||||
"RandomForest": RandomForestClassifier(random_state=random_state),
|
||||
"Mock": MockModel(random_state=random_state),
|
||||
@@ -97,13 +100,13 @@ class Models:
|
||||
nodes = 0
|
||||
leaves = result.get_n_leaves()
|
||||
depth = 0
|
||||
elif name.startswith("Bagging") or name.startswith("AdaBoost"):
|
||||
elif name.startswith("Bagging") or name == "AdaBoostStree":
|
||||
nodes, leaves = list(
|
||||
zip(*[x.nodes_leaves() for x in result.estimators_])
|
||||
)
|
||||
nodes, leaves = mean(nodes), mean(leaves)
|
||||
depth = mean([x.depth_ for x in result.estimators_])
|
||||
elif name == "RandomForest":
|
||||
elif name == "RandomForest" or name == "AdaBoost":
|
||||
leaves = mean([x.get_n_leaves() for x in result.estimators_])
|
||||
depth = mean([x.get_depth() for x in result.estimators_])
|
||||
nodes = mean([x.tree_.node_count for x in result.estimators_])
|
||||
|
@@ -71,7 +71,6 @@ class Report(BaseReport):
|
||||
self._load_best_results(
|
||||
self.data["score_name"], self.data["model"]
|
||||
)
|
||||
self._compare_totals = {}
|
||||
self.header_line("*")
|
||||
self.header_line(
|
||||
f" {self.data['model']} ver. {self.data['version']}"
|
||||
|
@@ -52,10 +52,11 @@ class BaseReport(abc.ABC):
|
||||
self.score_name = self.data["score_name"]
|
||||
self.__load_env_data()
|
||||
self.__compute_best_results_ever()
|
||||
self._compare_totals = {}
|
||||
|
||||
def __load_env_data(self):
|
||||
# Set the labels for nodes, leaves, depth
|
||||
env_data = EnvData.load()
|
||||
env_data = EnvData().load()
|
||||
self.nodes_label = env_data["nodes"]
|
||||
self.leaves_label = env_data["leaves"]
|
||||
self.depth_label = env_data["depth"]
|
||||
@@ -107,9 +108,11 @@ class BaseReport(abc.ABC):
|
||||
status = (
|
||||
Symbols.cross
|
||||
if accuracy <= max_value
|
||||
else Symbols.upward_arrow
|
||||
if accuracy > max_value
|
||||
else " "
|
||||
else (
|
||||
Symbols.upward_arrow
|
||||
if accuracy > max_value
|
||||
else " "
|
||||
)
|
||||
)
|
||||
if status != " ":
|
||||
if status not in self._compare_totals:
|
||||
@@ -148,8 +151,11 @@ class BaseReport(abc.ABC):
|
||||
|
||||
|
||||
class StubReport(BaseReport):
|
||||
def __init__(self, file_name):
|
||||
def __init__(self, file_name, compare=False):
|
||||
self.compare = compare
|
||||
super().__init__(file_name=file_name, best_file=False)
|
||||
if self.compare:
|
||||
self._load_best_results(self.score_name, self.data["model"])
|
||||
|
||||
def print_line(self, line) -> None:
|
||||
pass
|
||||
@@ -157,6 +163,11 @@ class StubReport(BaseReport):
|
||||
def header(self) -> None:
|
||||
self.title = self.data["title"]
|
||||
self.duration = self.data["duration"]
|
||||
self.model = self.data["model"]
|
||||
self.date = self.data["date"]
|
||||
self.time = self.data["time"]
|
||||
self.metric = self.data["score_name"]
|
||||
self.platform = self.data["platform"]
|
||||
|
||||
def footer(self, accuracy: float) -> None:
|
||||
self.accuracy = accuracy
|
||||
@@ -165,7 +176,7 @@ class StubReport(BaseReport):
|
||||
|
||||
class Summary:
|
||||
def __init__(self, hidden=False, compare=False) -> None:
|
||||
self.results = Files().get_all_results(hidden=hidden)
|
||||
self.results = Files.get_all_results(hidden=hidden)
|
||||
self.data = []
|
||||
self.data_filtered = []
|
||||
self.datasets = {}
|
||||
@@ -191,9 +202,11 @@ class Summary:
|
||||
self.models.add(model)
|
||||
report = StubReport(
|
||||
os.path.join(
|
||||
Folders.hidden_results
|
||||
if self.hidden
|
||||
else Folders.results,
|
||||
(
|
||||
Folders.hidden_results
|
||||
if self.hidden
|
||||
else Folders.results
|
||||
),
|
||||
result,
|
||||
)
|
||||
)
|
||||
|
@@ -299,11 +299,11 @@ class ReportDatasets:
|
||||
color2 = "#FDE9D9"
|
||||
color3 = "#B1A0C7"
|
||||
|
||||
def __init__(self, excel=False, book=None):
|
||||
def __init__(self, excel=False, book=None, output=True):
|
||||
self.excel = excel
|
||||
self.env = EnvData().load()
|
||||
self.close = False
|
||||
self.output = True
|
||||
self.output = output
|
||||
self.header_text = f"Datasets used in benchmark ver. {__version__}"
|
||||
if excel:
|
||||
self.max_length = 0
|
||||
@@ -620,7 +620,7 @@ class Benchmark:
|
||||
self.__compute_best_results_ever()
|
||||
|
||||
def __compute_best_results_ever(self):
|
||||
args = EnvData.load()
|
||||
args = EnvData().load()
|
||||
key = args["source_data"]
|
||||
best = BestResultsEver()
|
||||
_, self.best_score_value = best.get_name_value(key, self._score)
|
||||
|
@@ -15,6 +15,7 @@ class Folders:
|
||||
img = "img"
|
||||
excel = "excel"
|
||||
sql = "sql"
|
||||
current = os.getcwd()
|
||||
|
||||
@staticmethod
|
||||
def src():
|
||||
|
@@ -10,7 +10,7 @@ from .Results import Report
|
||||
from ._version import __version__
|
||||
|
||||
__author__ = "Ricardo Montañana Gómez"
|
||||
__copyright__ = "Copyright 2020-2023, Ricardo Montañana Gómez"
|
||||
__copyright__ = "Copyright 2020-2024, Ricardo Montañana Gómez"
|
||||
__license__ = "MIT License"
|
||||
__author_email__ = "ricardo.montanana@alu.uclm.es"
|
||||
|
||||
|
@@ -1 +1 @@
|
||||
__version__ = "0.5.0"
|
||||
__version__ = "1.0.1"
|
||||
|
20
benchmark/scripts/app/app.py
Executable file
20
benchmark/scripts/app/app.py
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env python
|
||||
from benchmark.Arguments import EnvData
|
||||
from flask import Flask
|
||||
from .main import main, OUTPUT
|
||||
|
||||
FRAMEWORK = "framework"
|
||||
FRAMEWORKS = "frameworks"
|
||||
TEST = "test"
|
||||
|
||||
|
||||
def create_app(output="local"):
|
||||
app = Flask(__name__)
|
||||
config = EnvData().load()
|
||||
app.register_blueprint(main)
|
||||
app.config[FRAMEWORK] = config[FRAMEWORK]
|
||||
app.config[FRAMEWORKS] = ["bootstrap", "bulma"]
|
||||
app.config[OUTPUT] = output
|
||||
app.jinja_env.auto_reload = True
|
||||
app.config["TEMPLATES_AUTO_RELOAD"] = True
|
||||
return app
|
166
benchmark/scripts/flask_app/results/results.ant.py → benchmark/scripts/app/main.py
Normal file → Executable file
166
benchmark/scripts/flask_app/results/results.ant.py → benchmark/scripts/app/main.py
Normal file → Executable file
@@ -1,91 +1,89 @@
|
||||
#!/usr/bin/env python
|
||||
# import os
|
||||
# import json
|
||||
# import shutil
|
||||
# import xlsxwriter
|
||||
# from benchmark.Utils import Files, Folders
|
||||
# from benchmark.Arguments import EnvData
|
||||
# from benchmark.ResultsBase import StubReport
|
||||
# from benchmark.ResultsFiles import Excel, ReportDatasets
|
||||
# from benchmark.Datasets import Datasets
|
||||
# from flask import Blueprint, current_app, send_file
|
||||
# from flask import render_template, request, redirect, url_for
|
||||
from flask import Blueprint, render_template
|
||||
import os
|
||||
import json
|
||||
import shutil
|
||||
import xlsxwriter
|
||||
from dotenv import dotenv_values
|
||||
from benchmark.Utils import Files, Folders
|
||||
from benchmark.Arguments import EnvData
|
||||
from benchmark.ResultsBase import StubReport
|
||||
from benchmark.ResultsFiles import Excel, ReportDatasets
|
||||
from benchmark.Datasets import Datasets
|
||||
from flask import Blueprint, current_app, send_file
|
||||
from flask import render_template, request, redirect, url_for
|
||||
|
||||
|
||||
results = Blueprint("results", __name__, template_folder="results")
|
||||
# FRAMEWORK = "framework"
|
||||
# FRAMEWORKS = "frameworks"
|
||||
# OUTPUT = "output"
|
||||
# TEST = "test"
|
||||
main = Blueprint("main", __name__)
|
||||
FRAMEWORK = "framework"
|
||||
FRAMEWORKS = "frameworks"
|
||||
OUTPUT = "output"
|
||||
TEST = "test"
|
||||
|
||||
|
||||
# class AjaxResponse:
|
||||
# def __init__(self, success, file_name, code=200):
|
||||
# self.success = success
|
||||
# self.file_name = file_name
|
||||
# self.code = code
|
||||
class AjaxResponse:
|
||||
def __init__(self, success, file_name, code=200):
|
||||
self.success = success
|
||||
self.file_name = file_name
|
||||
self.code = code
|
||||
|
||||
# def to_string(self):
|
||||
# return (
|
||||
# json.dumps(
|
||||
# {
|
||||
# "success": self.success,
|
||||
# "file": self.file_name,
|
||||
# "output": current_app.config[OUTPUT],
|
||||
# }
|
||||
# ),
|
||||
# self.code,
|
||||
# {"ContentType": "application/json"},
|
||||
# )
|
||||
def to_string(self):
|
||||
return (
|
||||
json.dumps(
|
||||
{
|
||||
"success": self.success,
|
||||
"file": self.file_name,
|
||||
"output": current_app.config[OUTPUT],
|
||||
}
|
||||
),
|
||||
self.code,
|
||||
{"ContentType": "application/json"},
|
||||
)
|
||||
|
||||
|
||||
# def process_data(file_name, compare, data):
|
||||
# report = StubReport(
|
||||
# os.path.join(Folders.results, file_name), compare=compare
|
||||
# )
|
||||
# new_list = []
|
||||
# for result in data["results"]:
|
||||
# symbol = report._compute_status(result["dataset"], result["score"])
|
||||
# result["symbol"] = symbol if symbol != " " else " "
|
||||
# new_list.append(result)
|
||||
# data["results"] = new_list
|
||||
# # Compute summary with explanation of symbols
|
||||
# summary = {}
|
||||
# for key, value in report._compare_totals.items():
|
||||
# summary[key] = (report._status_meaning(key), value)
|
||||
# return summary
|
||||
def process_data(file_name, compare, data):
|
||||
report = StubReport(
|
||||
os.path.join(Folders.results, file_name), compare=compare
|
||||
)
|
||||
new_list = []
|
||||
for result in data["results"]:
|
||||
symbol = report._compute_status(result["dataset"], result["score"])
|
||||
result["symbol"] = symbol if symbol != " " else " "
|
||||
new_list.append(result)
|
||||
data["results"] = new_list
|
||||
# Compute summary with explanation of symbols
|
||||
summary = {}
|
||||
for key, value in report._compare_totals.items():
|
||||
summary[key] = (report._status_meaning(key), value)
|
||||
return summary
|
||||
|
||||
|
||||
@results.route("/results/<compare>")
|
||||
def results(compare="False"):
|
||||
# # Get a list of files in a directory
|
||||
# files = {}
|
||||
# names = Files.get_all_results(hidden=False)
|
||||
# for name in names:
|
||||
# report = StubReport(os.path.join(Folders.results, name))
|
||||
# report.report()
|
||||
# files[name] = {
|
||||
# "duration": report.duration,
|
||||
# "score": report.score,
|
||||
# "title": report.title,
|
||||
# }
|
||||
# candidate = current_app.config[FRAMEWORKS].copy()
|
||||
# candidate.remove(current_app.config[FRAMEWORK])
|
||||
# return render_template(
|
||||
# "select.html",
|
||||
# files=files,
|
||||
# candidate=candidate[0],
|
||||
# framework=current_app.config[FRAMEWORK],
|
||||
# compare=compare.capitalize() == "True",
|
||||
# )
|
||||
return render_template("test.html")
|
||||
@main.route("/index/<compare>")
|
||||
@main.route("/")
|
||||
def index(compare="False"):
|
||||
# Get a list of files in a directory
|
||||
files = {}
|
||||
names = Files.get_all_results(hidden=False)
|
||||
for name in names:
|
||||
report = StubReport(os.path.join(Folders.results, name))
|
||||
report.report()
|
||||
files[name] = {
|
||||
"duration": report.duration,
|
||||
"score": report.score,
|
||||
"title": report.title,
|
||||
}
|
||||
candidate = current_app.config[FRAMEWORKS].copy()
|
||||
candidate.remove(current_app.config[FRAMEWORK])
|
||||
return render_template(
|
||||
"select.html",
|
||||
files=files,
|
||||
candidate=candidate[0],
|
||||
framework=current_app.config[FRAMEWORK],
|
||||
compare=compare.capitalize() == "True",
|
||||
)
|
||||
|
||||
|
||||
"""
|
||||
@results.route("/datasets/<compare>")
|
||||
@results.route("datasets")
|
||||
def datasets(compare=False):
|
||||
@main.route("/datasets/<compare>")
|
||||
def datasets(compare):
|
||||
dt = Datasets()
|
||||
datos = []
|
||||
for dataset in dt:
|
||||
@@ -98,11 +96,11 @@ def datasets(compare=False):
|
||||
)
|
||||
|
||||
|
||||
@results.route("/showfile/<file_name>/<compare>")
|
||||
@main.route("/showfile/<file_name>/<compare>")
|
||||
def showfile(file_name, compare, back=None):
|
||||
compare = compare.capitalize() == "True"
|
||||
back = request.args["url"] if back is None else back
|
||||
print(f"back [{back}]")
|
||||
app_config = dotenv_values(".env")
|
||||
with open(os.path.join(Folders.results, file_name)) as f:
|
||||
data = json.load(f)
|
||||
try:
|
||||
@@ -116,10 +114,11 @@ def showfile(file_name, compare, back=None):
|
||||
summary=summary,
|
||||
framework=current_app.config[FRAMEWORK],
|
||||
back=back,
|
||||
app_config=app_config,
|
||||
)
|
||||
|
||||
|
||||
@results.route("/show", methods=["post"])
|
||||
@main.route("/show", methods=["post"])
|
||||
def show():
|
||||
selected_file = request.form["selected-file"]
|
||||
compare = request.form["compare"]
|
||||
@@ -132,7 +131,7 @@ def show():
|
||||
)
|
||||
|
||||
|
||||
@results.route("/excel", methods=["post"])
|
||||
@main.route("/excel", methods=["post"])
|
||||
def excel():
|
||||
selected_files = request.json["selectedFiles"]
|
||||
compare = request.json["compare"]
|
||||
@@ -172,7 +171,7 @@ def excel():
|
||||
return AjaxResponse(True, Files.be_list_excel).to_string()
|
||||
|
||||
|
||||
@results.route("/download/<file_name>")
|
||||
@main.route("/download/<file_name>")
|
||||
def download(file_name):
|
||||
src = os.path.join(Folders.current, Folders.excel, file_name)
|
||||
dest = os.path.join(
|
||||
@@ -182,7 +181,7 @@ def download(file_name):
|
||||
return send_file(dest, as_attachment=True)
|
||||
|
||||
|
||||
@results.route("/config/<framework>/<compare>")
|
||||
@main.route("/config/<framework>/<compare>")
|
||||
def config(framework, compare):
|
||||
if framework not in current_app.config[FRAMEWORKS]:
|
||||
message = f"framework {framework} not supported"
|
||||
@@ -195,7 +194,7 @@ def config(framework, compare):
|
||||
return redirect(url_for("main.index", compare=compare))
|
||||
|
||||
|
||||
@results.route("/best_results/<file>/<compare>")
|
||||
@main.route("/best_results/<file>/<compare>")
|
||||
def best_results(file, compare):
|
||||
compare = compare.capitalize() == "True"
|
||||
try:
|
||||
@@ -209,4 +208,3 @@ def best_results(file, compare):
|
||||
compare=compare,
|
||||
framework=current_app.config[FRAMEWORK],
|
||||
)
|
||||
"""
|
@@ -27,25 +27,4 @@ tbody {
|
||||
.btn-small {
|
||||
padding: 0.25rem 0.5rem;
|
||||
font-size: 0.75rem;
|
||||
}
|
||||
|
||||
body {
|
||||
padding-bottom: 20px;
|
||||
}
|
||||
|
||||
.navbar {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
pre {
|
||||
background: #ddd;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
h2 {
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
footer {
|
||||
margin: 20px;
|
||||
}
|
1
benchmark/scripts/app/static/excel/.gitignore
vendored
Normal file
1
benchmark/scripts/app/static/excel/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.xlsx
|
20
benchmark/scripts/app/templates/base_bootstrap.html
Normal file
20
benchmark/scripts/app/templates/base_bootstrap.html
Normal file
@@ -0,0 +1,20 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>{{ title }}</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-KK94CHFLLe+nY2dmCWGMq91rCGa5gtU4mk92HdvYe+M/SXH301p5ILy+dN9+nJOZ" crossorigin="anonymous" />
|
||||
<link href="https://fonts.googleapis.com/css?family=Montserrat:300,400,500,600" rel="stylesheet" />
|
||||
<link rel="stylesheet" href="https://cdn.datatables.net/1.10.25/css/jquery.dataTables.min.css" />
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/MaterialDesign-Webfont/7.1.96/css/materialdesignicons.css" integrity="sha512-lD1LHcZ8tFHvMFNeo6qOLY/HjzSPCasPJOAoir22byDxlZI1R71S5lZel8zRL2TZ+Dut1wOHfYgSU2lHXuL00w==" crossorigin="anonymous" referrerpolicy="no-referrer" />
|
||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/main.css') }}" />
|
||||
</head>
|
||||
<body>
|
||||
{% block content %}
|
||||
|
||||
{% endblock %}
|
||||
</body>
|
||||
<script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
|
||||
{% block jscript %}
|
||||
|
||||
{% endblock %}
|
||||
</html>
|
19
benchmark/scripts/app/templates/base_bulma.html
Normal file
19
benchmark/scripts/app/templates/base_bulma.html
Normal file
@@ -0,0 +1,19 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>{{ title }}</title>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/bulma/0.9.3/css/bulma.min.css" />
|
||||
<link rel="stylesheet" href="https://cdn.datatables.net/1.10.25/css/jquery.dataTables.min.css" />
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/MaterialDesign-Webfont/7.1.96/css/materialdesignicons.css" integrity="sha512-lD1LHcZ8tFHvMFNeo6qOLY/HjzSPCasPJOAoir22byDxlZI1R71S5lZel8zRL2TZ+Dut1wOHfYgSU2lHXuL00w==" crossorigin="anonymous" referrerpolicy="no-referrer" />
|
||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/main.css') }}" />
|
||||
</head>
|
||||
<body>
|
||||
{% block content %}
|
||||
|
||||
{% endblock %}
|
||||
</body>
|
||||
<script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
|
||||
{% block jscript %}
|
||||
|
||||
{% endblock %}
|
||||
</html>
|
68
benchmark/scripts/app/templates/datasets.html
Normal file
68
benchmark/scripts/app/templates/datasets.html
Normal file
@@ -0,0 +1,68 @@
|
||||
{% extends 'base_' ~ framework ~ '.html' %}
|
||||
{% macro javascript(file) %}
|
||||
<script src="{{ url_for('static', filename=file) }}"></script>
|
||||
{% endmacro %}
|
||||
{% if framework == 'bootstrap' %}
|
||||
{% set button_class = 'btn btn-primary btn-small' %}
|
||||
{% set h1_class = 'text-center' %}
|
||||
{% set table_class = 'table table-striped table-hover table-bordered' %}
|
||||
{% set head_class = 'bg-primary text-white' %}
|
||||
{% set text_right = 'text-end' %}
|
||||
{% set container = 'container' %}
|
||||
{% set selected = 'selected' %}
|
||||
{%- macro header(title, close, url) -%}
|
||||
<div class="p-4 bg-primary text-white">
|
||||
{%- if close -%}
|
||||
<button type="button" class="btn-close" aria-label="Close" onclick="location.href = '{{ url }}'"></button>
|
||||
{%- endif -%}
|
||||
<h1 class="alternate-font">{{ title }}</h1>
|
||||
</div>
|
||||
{%- endmacro -%}
|
||||
{% else %}
|
||||
{% set button_class = 'button is-primary is-small' %}
|
||||
{% set h1_class = 'title is-1 has-text-centered' %}
|
||||
{% set table_class = 'table is-striped is-hoverable cell-border is-bordered' %}
|
||||
{% set head_class = 'is-selected' %}
|
||||
{% set text_right = 'has-text-right' %}
|
||||
{% set container = 'container' %}
|
||||
{% set selected = 'is-selected' %}
|
||||
{%- macro header(title, close, url) -%}
|
||||
<div class="hero is-info is-bold">
|
||||
<div class="hero-body">
|
||||
{%- if close -%}
|
||||
<button class="delete is-large" onclick="location.href = '{{ url }}'"></button>
|
||||
{%- endif -%}
|
||||
<h1 class="is-size-3 alternate-font">{{ title }}</h1>
|
||||
</div>
|
||||
</div>
|
||||
{%- endmacro -%}
|
||||
{% endif %}
|
||||
{% block content %}
|
||||
<div class="{{ container }}">
|
||||
{{ header('Benchmark Datasets Report', True, url_for('main.index', compare = compare)) }}
|
||||
<button class="{{ button_class }}" onclick="excelFiles(['datasets'], false)"><i class="mdi mdi-file-excel"></i> Excel</button>
|
||||
{% include 'partials/datasets_table.html' %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% block jscript %}
|
||||
{{ javascript("js/excelFiles.js") }}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$(document).ajaxStart(function(){
|
||||
$("body").addClass('ajaxLoading');
|
||||
});
|
||||
$(document).ajaxStop(function(){
|
||||
$("body").removeClass('ajaxLoading');
|
||||
});
|
||||
});
|
||||
// Check if row is selected
|
||||
$('#file-table tbody').on('click', 'tr', function () {
|
||||
if ($(this).hasClass('{{ selected }}')) {
|
||||
$(this).removeClass('{{ selected }}');
|
||||
} else {
|
||||
$('#file-table tbody tr.{{ selected }}').removeClass("{{ selected }}")
|
||||
$(this).addClass('{{ selected }}');
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
20
benchmark/scripts/app/templates/error.html
Normal file
20
benchmark/scripts/app/templates/error.html
Normal file
@@ -0,0 +1,20 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Error</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha3/dist/css/bootstrap.min.css" rel="stylesheet"
|
||||
integrity="sha384-KK94CHFLLe+nY2dmCWGMq91rCGa5gtU4mk92HdvYe+M/SXH301p5ILy+dN9+nJOZ" crossorigin="anonymous">
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="alert alert-danger my-5" role="alert">
|
||||
|
||||
<h4 class="alert-heading"><button class="btn-close btn-sm" type="button"
|
||||
onclick="location.href='/index/{{ compare }}';"></button>Error</h4>
|
||||
<p>There was an error processing action, {{ message }}. Please try again later.</p>
|
||||
<hr>
|
||||
<p class="mb-0">If the problem persists, please contact support.</p>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
22
benchmark/scripts/app/templates/js/report.js
Normal file
22
benchmark/scripts/app/templates/js/report.js
Normal file
@@ -0,0 +1,22 @@
|
||||
$(document).ready(function () {
|
||||
// Check if row is selected
|
||||
$('#report-table tbody').on('click', 'tr', function () {
|
||||
if ($(this).hasClass('{{ selected }}')) {
|
||||
$(this).removeClass('{{ selected }}');
|
||||
} else {
|
||||
$('#report-table tbody tr.{{ selected }}').removeClass("{{ selected }}")
|
||||
$(this).addClass('{{ selected }}');
|
||||
}
|
||||
});
|
||||
$(document).ajaxStart(function(){
|
||||
$("body").addClass('ajaxLoading');
|
||||
});
|
||||
$(document).ajaxStop(function(){
|
||||
$("body").removeClass('ajaxLoading');
|
||||
});
|
||||
});
|
||||
function excelFile() {
|
||||
var selectedFiles = ["{{ file }}"];
|
||||
var compare = "{{ compare }}" == "True";
|
||||
excelFiles(selectedFiles, compare)
|
||||
}
|
97
benchmark/scripts/app/templates/js/select.js
Normal file
97
benchmark/scripts/app/templates/js/select.js
Normal file
@@ -0,0 +1,97 @@
|
||||
$(document).ready(function () {
|
||||
var table = $("#file-table").DataTable({
|
||||
paging: true,
|
||||
searching: true,
|
||||
ordering: true,
|
||||
info: true,
|
||||
"select.items": "row",
|
||||
pageLength: 25,
|
||||
columnDefs: [
|
||||
{
|
||||
targets: 8,
|
||||
orderable: false,
|
||||
},
|
||||
],
|
||||
//"language": {
|
||||
// "lengthMenu": "_MENU_"
|
||||
//}
|
||||
});
|
||||
$('#file-table').on( 'draw.dt', function () {
|
||||
enable_disable_best_buttons();
|
||||
} );
|
||||
// Check if row is selected
|
||||
$("#file-table tbody").on("click", "tr", function () {
|
||||
if ($(this).hasClass("{{ select.selected() }}")) {
|
||||
$(this).removeClass("{{ select.selected() }}");
|
||||
} else {
|
||||
table
|
||||
.$("tr.{{ select.selected() }}")
|
||||
.removeClass("{{ select.selected() }}");
|
||||
$(this).addClass("{{ select.selected() }}");
|
||||
}
|
||||
});
|
||||
// Show file with doubleclick
|
||||
$("#file-table tbody").on("dblclick", "tr", function () {
|
||||
showFile($(this).attr("id"));
|
||||
});
|
||||
$(document).ajaxStart(function () {
|
||||
$("body").addClass("ajaxLoading");
|
||||
});
|
||||
$(document).ajaxStop(function () {
|
||||
$("body").removeClass("ajaxLoading");
|
||||
});
|
||||
$('#compare').change(function() {
|
||||
enable_disable_best_buttons();
|
||||
});
|
||||
enable_disable_best_buttons();
|
||||
});
|
||||
function enable_disable_best_buttons(){
|
||||
if ($('#compare').is(':checked')) {
|
||||
$("[name='best_buttons']").addClass("tag is-link is-normal");
|
||||
$("[name='best_buttons']").removeAttr("hidden");
|
||||
} else {
|
||||
$("[name='best_buttons']").removeClass("tag is-link is-normal");
|
||||
$("[name='best_buttons']").attr("hidden", true);
|
||||
}
|
||||
}
|
||||
function showFile(selectedFile) {
|
||||
var form = $(
|
||||
'<form action="/show" method="post">' +
|
||||
'<input type="hidden" name="selected-file" value="' +
|
||||
selectedFile +
|
||||
'" />' +
|
||||
'<input type="hidden" name="compare" value=' +
|
||||
$("#compare").is(":checked") +
|
||||
" />" +
|
||||
"</form>"
|
||||
);
|
||||
$("body").append(form);
|
||||
form.submit();
|
||||
}
|
||||
function excel() {
|
||||
var checkbox = document.getElementsByName("selected_files");
|
||||
var selectedFiles = [];
|
||||
for (var i = 0; i < checkbox.length; i++) {
|
||||
if (checkbox[i].checked) {
|
||||
selectedFiles.push(checkbox[i].value);
|
||||
}
|
||||
}
|
||||
if (selectedFiles.length == 0) {
|
||||
alert("Select at least one file");
|
||||
return;
|
||||
}
|
||||
var compare = $("#compare").is(":checked");
|
||||
excelFiles(selectedFiles, compare);
|
||||
}
|
||||
function setCheckBoxes(value) {
|
||||
var checkbox = document.getElementsByName("selected_files");
|
||||
for (i = 0; i < checkbox.length; i++) {
|
||||
checkbox[i].checked = value;
|
||||
}
|
||||
}
|
||||
function redirectDouble(route, parameter) {
|
||||
location.href = "/"+ route + "/" + parameter + "/" + $("#compare").is(":checked");
|
||||
}
|
||||
function redirectSimple(route) {
|
||||
location.href = "/" + route + "/" + $("#compare").is(":checked");
|
||||
}
|
@@ -0,0 +1,56 @@
|
||||
{%- macro header(title, close=False, url="") -%}
|
||||
<div class="p-4 bg-primary text-white">
|
||||
{%- if close -%}
|
||||
<button type="button" class="btn-close" aria-label="Close" onclick="location.href = '{{url}}'"></button>
|
||||
{%- endif -%}
|
||||
<h1 class="alternate-font">{{ title }}</h1>
|
||||
</div>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_table_class() -%}
|
||||
table table-striped table-hover table-bordered
|
||||
{%- endmacro -%}
|
||||
{%- macro icon(icon_name) -%}
|
||||
<i class="mdi mdi-{{icon_name}}"></i>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_button(text, action) -%}
|
||||
<button class="btn btn-primary btn-small" onclick="{{ action }}">{{ text|safe }}</button>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_button_class() -%}
|
||||
button btn-primary btn-small
|
||||
{%- endmacro %}
|
||||
{%- macro get_button_tag(icon_name, method, visible=True, name="") -%}
|
||||
<button class="btn btn-primary btn-small" onclick="{{ method }}" {{ "" if visible else "hidden='true'" }} {{ "" if name=="" else "name='" + name +"'"}}><i class="mdi mdi-{{ icon_name }}"></i></button>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_button_reset() -%}
|
||||
<button class="btn btn-primary btn-small btn-danger" onclick="setCheckBoxes(false)"><i class="mdi mdi-checkbox-multiple-blank"></i></button>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_button_all() -%}
|
||||
<button class="btn btn-primary btn-small btn-success" onclick="setCheckBoxes(true)"><i class="mdi mdi-checkbox-multiple-marked"></i></button>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_tag_class() -%}
|
||||
badge bg-info bg-small
|
||||
{%- endmacro -%}
|
||||
{%- macro get_container_class() -%}
|
||||
container-fluid
|
||||
{%- endmacro -%}
|
||||
{%- macro selected() -%}
|
||||
selected
|
||||
{%- endmacro -%}
|
||||
{%- macro get_level_class() -%}
|
||||
navbar
|
||||
{%- endmacro -%}
|
||||
{%- macro get_align_right() -%}
|
||||
text-end
|
||||
{%- endmacro -%}
|
||||
{%- macro get_left_position() -%}
|
||||
float-left
|
||||
{%- endmacro -%}
|
||||
{%- macro get_right_position() -%}
|
||||
float-right
|
||||
{%- endmacro -%}
|
||||
{%- macro get_row_head_class() -%}
|
||||
bg-primary text-white
|
||||
{%- endmacro -%}
|
||||
{%- macro get_align_center() -%}
|
||||
text-center
|
||||
{%- endmacro -%}
|
@@ -0,0 +1,58 @@
|
||||
{%- macro header(title, close=False, url="") -%}
|
||||
<div class="hero is-info is-bold">
|
||||
<div class="hero-body">
|
||||
{%- if close -%}
|
||||
<button class="delete is-large" onclick="location.href = '{{ url }}'"></button>
|
||||
{%- endif -%}
|
||||
<h1 class="is-size-3 alternate-font">{{ title }}</h1>
|
||||
</div>
|
||||
</div>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_table_class() -%}
|
||||
table is-striped is-hoverable cell-border is-bordered
|
||||
{%- endmacro -%}
|
||||
{%- macro icon(icon_name) -%}
|
||||
<i class="mdi mdi-{{icon_name}}"></i>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_button(text, action) -%}
|
||||
<button class="button is-primary is-small" onclick="{{ action }}">{{ text|safe }}</button>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_button_tag(icon_name, method, visible=True, name="") -%}
|
||||
<span class="{{ "tag is-link is-normal" if visible else "" }}" type="button" onclick="{{ method }}" {{ "" if visible else "hidden='true'" }} {{ "" if name=="" else "name='" + name +"'"}}>{{icon(icon_name)}}</span>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_button_reset() -%}
|
||||
<span class="tag is-link is-danger" type="button" onclick="setCheckBoxes(false)"><i class="mdi mdi-checkbox-multiple-blank"></i></span>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_button_all() -%}
|
||||
<span class="tag is-link is-success" type="button" onclick="setCheckBoxes(true)"><i class="mdi mdi-checkbox-multiple-marked"></i></span>
|
||||
{%- endmacro -%}
|
||||
{%- macro get_tag_class() -%}
|
||||
tag is-info is-small
|
||||
{%- endmacro -%}
|
||||
{%- macro get_container_class() -%}
|
||||
container is-fluid
|
||||
{%- endmacro -%}
|
||||
{%- macro selected() -%}
|
||||
is-selected
|
||||
{%- endmacro -%}
|
||||
{%- macro get_level_class() -%}
|
||||
level
|
||||
{%- endmacro -%}
|
||||
{%- macro get_align_right() -%}
|
||||
has-text-right
|
||||
{%- endmacro -%}
|
||||
{%- macro get_align_center() -%}
|
||||
has-text-center
|
||||
{%- endmacro -%}
|
||||
{%- macro get_left_position() -%}
|
||||
float-left
|
||||
{%- endmacro -%}
|
||||
{%- macro get_right_position() -%}
|
||||
float-right
|
||||
{%- endmacro -%}
|
||||
{%- macro get_row_head_class() -%}
|
||||
is-selected
|
||||
{%- endmacro -%}
|
||||
{%- macro get_align_center() -%}
|
||||
has-text-center
|
||||
{%- endmacro -%}
|
27
benchmark/scripts/app/templates/partials/datasets_table.html
Normal file
27
benchmark/scripts/app/templates/partials/datasets_table.html
Normal file
@@ -0,0 +1,27 @@
|
||||
{% extends "base_" ~ framework ~ ".html" %}
|
||||
{% block content %}
|
||||
<table id="file-table" class="{{ table_class }}">
|
||||
<thead>
|
||||
<tr class="{{ head_class }}">
|
||||
<th class="{{ text_center }}">Dataset</th>
|
||||
<th class="{{ text_center }}">Samples</th>
|
||||
<th class="{{ text_center }}">Features</th>
|
||||
<th class="{{ text_center }}">Cont. Feat.</th>
|
||||
<th class="{{ text_center }}">Classes</th>
|
||||
<th class="{{ text_center }}">Balance</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for dataset in datasets %}
|
||||
<tr>
|
||||
<td>{{ dataset.dataset }}</td>
|
||||
<td class="{{ text_right }}">{{ "{:,}".format(dataset.samples) }}</td>
|
||||
<td class="{{ text_right }}">{{ "{:,}".format(dataset.features) }}</td>
|
||||
<td class="{{ text_right }}">{{ dataset.cont_features }}</td>
|
||||
<td class="{{ text_right }}">{{ dataset.classes }}</td>
|
||||
<td>{{ dataset.balance }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% endblock %}
|
14
benchmark/scripts/app/templates/partials/table_report.html
Normal file
14
benchmark/scripts/app/templates/partials/table_report.html
Normal file
@@ -0,0 +1,14 @@
|
||||
{% for item in data.results %}
|
||||
<tr>
|
||||
<td>{{ item.dataset }}</td>
|
||||
<td class="{{ right }}">{{ '{:,}'.format(item.samples) }}</td>
|
||||
<td class="{{ right }}">{{"%d" % item.features}}</td>
|
||||
<td class="{{ right }}">{{"%d" % item.classes}}</td>
|
||||
<td class="{{ right }}">{{ '{:,.2f}'.format(item.nodes|float) }}</td>
|
||||
<td class="{{ right }}">{{ '{:,.2f}'.format(item.leaves|float) }}</td>
|
||||
<td class="{{ right }}">{{ '{:,.2f}'.format(item.depth|float) }}</td>
|
||||
<td class="{{ right }}">{{"%.6f±%.4f" % (item.score, item.score_std)}} {{ item.symbol|safe }}</td>
|
||||
<td class="{{ right }}">{{"%.6f±%.4f" % (item.time, item.time_std)}}</td>
|
||||
<td class="{{ center }}">{{ item.hyperparameters }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
@@ -0,0 +1,102 @@
|
||||
<div id="app">
|
||||
<section class="section">
|
||||
<div class="container-fluid">
|
||||
<div class="p-4 bg-primary text-white">
|
||||
<button type="button"
|
||||
class="btn-close"
|
||||
aria-label="Close"
|
||||
onclick="location.href = '{{ back }}'"></button>
|
||||
<h1>{{ data.title }}</h1>
|
||||
</div>
|
||||
<div>
|
||||
<table class="table table-bordered">
|
||||
<thead>
|
||||
<tr class="bg-info text-white">
|
||||
<th class="text-center">Platform</th>
|
||||
<th class="text-center">Model</th>
|
||||
<th class="text-center">Date</th>
|
||||
<th class="text-center">Time</th>
|
||||
{% if data.duration > 7200 %}
|
||||
{% set unit = "h" %}
|
||||
{% set divider = 3600 %}
|
||||
{% else %}
|
||||
{% set unit = "min" %}
|
||||
{% set divider = 60 %}
|
||||
{% endif %}
|
||||
<th class="text-center">Duration ({{ unit }})</th>
|
||||
<th class="text-center">Stratified</th>
|
||||
<th class="text-center">Discretized</th>
|
||||
<th class="text-center"># Folds</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th class="text-center">{{ data.platform }}</th>
|
||||
<th class="text-center">{{ data.model }} {{ data.version }}</th>
|
||||
<th class="text-center">{{ data.date }}</th>
|
||||
<th class="text-center">{{ data.time }}</th>
|
||||
<th class="text-center">{{ "%.2f" % (data.duration/divider) }}</th>
|
||||
<th class="text-center">{{ data.stratified }}</th>
|
||||
<th class="text-center">{{ data.discretized }}</th>
|
||||
<th class="text-center">{{ data.folds }}</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th class="text-center bg-info text-white">Language</th>
|
||||
<th class="text-center" colspan=3>{{ data.language }} {{ data.language_version }}</th>
|
||||
<th class="text-center bg-info text-white">Seeds</th>
|
||||
<th class="text-center" colspan=6>{{ data.seeds }}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
<div>
|
||||
<button class="{{ button }}" onclick="excelFile()">
|
||||
<i class="mdi mdi-file-excel"></i> Excel
|
||||
</button>
|
||||
</div>
|
||||
<table id="report-table"
|
||||
class="table table-striped table-hover table-bordered">
|
||||
<thead>
|
||||
<tr class="bg-primary text-white">
|
||||
<th class="text-center">Dataset</th>
|
||||
<th class="text-center">Samples</th>
|
||||
<th class="text-center">Features</th>
|
||||
<th class="text-center">Classes</th>
|
||||
<th class="text-center">{{ app_config.nodes }}</th>
|
||||
<th class="text-center">{{ app_config.leaves }}</th>
|
||||
<th class="text-center">{{ app_config.depth }}</th>
|
||||
<th class="text-center">{{ data.score_name|capitalize }}</th>
|
||||
<th class="text-center">Time</th>
|
||||
<th class="text-center">hyperparameters</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% include "partials/table_report.html" %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% if summary|length > 0 %}
|
||||
<div class="col-4 col-lg-4">
|
||||
<table class="table table-bordered">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="text-center bg-primary text-white">Symbol</th>
|
||||
<th class="text-center bg-primary text-white">Meaning</th>
|
||||
<th class="text-center bg-primary text-white">Count</th>
|
||||
</tr>
|
||||
</thead>
|
||||
{% include "partials/table_summary.html" %}
|
||||
</table>
|
||||
</div>
|
||||
{% endif %}
|
||||
<button type="button"
|
||||
class="btn-close"
|
||||
aria-label="Close"
|
||||
onclick="location.href = '{{ back }}'"></button>
|
||||
<h7>
|
||||
<b>
|
||||
Total score: {{ "%.6f" % (data.results | sum(attribute="score") ) }}
|
||||
</b>
|
||||
</h7>
|
||||
<h7>
|
||||
Number of files: {{ data.results | length }}
|
||||
</h7>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
100
benchmark/scripts/app/templates/partials/table_report_bulma.html
Normal file
100
benchmark/scripts/app/templates/partials/table_report_bulma.html
Normal file
@@ -0,0 +1,100 @@
|
||||
<div id="app">
|
||||
<header>
|
||||
<div class="container is-fluid">
|
||||
<div class="hero is-info is-bold">
|
||||
<div class="hero-body">
|
||||
<button class="delete is-large" onclick="location.href = '{{ back }}'"></button>
|
||||
<h1 class="is-size-3">{{ data.title }}</h1>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
<section class="section">
|
||||
<div class="container is-fluid">
|
||||
<div>
|
||||
<table class="table is-fullwidth is-striped is-bordered">
|
||||
<thead>
|
||||
<tr class="is-selected">
|
||||
<th class="has-text-centered">Platform</th>
|
||||
<th class="has-text-centered">Model</th>
|
||||
<th class="has-text-centered">Date</th>
|
||||
<th class="has-text-centered">Time</th>
|
||||
{% if data.duration > 7200 %}
|
||||
{% set unit = "h" %}
|
||||
{% set divider = 3600 %}
|
||||
{% else %}
|
||||
{% set unit = "min" %}
|
||||
{% set divider = 60 %}
|
||||
{% endif %}
|
||||
<th class="has-text-centered">Duration ({{ unit }})</th>
|
||||
<th class="has-text-centered">Stratified</th>
|
||||
<th class="has-text-centered">Discretized</th>
|
||||
<th class="has-text-centered"># Folds</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th class="has-text-centered">{{ data.platform }}</th>
|
||||
<th class="has-text-centered">{{ data.model }} {{ data.version }}</th>
|
||||
<th class="has-text-centered">{{ data.date }}</th>
|
||||
<th class="has-text-centered">{{ data.time }}</th>
|
||||
<th class="has-text-centered">{{ "%.2f" % (data.duration/divider) }}</th>
|
||||
<th class="has-text-centered">{{ data.stratified }}</th>
|
||||
<th class="has-text-centered">{{ data.discretized }}</th>
|
||||
<th class="has-text-centered">{{ data.folds }}</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th class="has-text-center is-selected">Language</th>
|
||||
<th class="has-text-centered" colspan=3>{{ data.language }} {{ data.language_version }}</th>
|
||||
<th class="has-text-centered is-selected">Seeds</th>
|
||||
<th class="has-text-centered" colspan=6>{{ data.seeds }}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
<div>
|
||||
<button class="{{ button }}" onclick="excelFile()">
|
||||
<i class="mdi mdi-file-excel"></i> Excel
|
||||
</button>
|
||||
</div>
|
||||
<table id="report-table"
|
||||
class="table is-fullwidth is-striped is-hoverable is-bordered">
|
||||
<thead>
|
||||
<tr class="is-selected">
|
||||
<th class="has-text-centered">Dataset</th>
|
||||
<th class="has-text-centered">Samples</th>
|
||||
<th class="has-text-centered">Features</th>
|
||||
<th class="has-text-centered">Classes</th>
|
||||
<th class="has-text-centered">{{ app_config.nodes }}</th>
|
||||
<th class="has-text-centered">{{ app_config.leaves }}</th>
|
||||
<th class="has-text-centered">{{ app_config.depth }}</th>
|
||||
<th class="has-text-centered">{{ data.score_name|capitalize }}</th>
|
||||
<th class="has-text-centered">Time</th>
|
||||
<th class="has-text-centered">hyperparameters</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% include "partials/table_report.html" %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% if summary|length > 0 %}
|
||||
<div class="col-2 col-lg-2">
|
||||
<table class="table is-bordered">
|
||||
<thead>
|
||||
<tr class="is-selected">
|
||||
<th class="has-text-centered">Symbol</th>
|
||||
<th class="has-text-centered">Meaning</th>
|
||||
<th class="has-text-centered">Count</th>
|
||||
</tr>
|
||||
</thead>
|
||||
{% include "partials/table_summary.html" %}
|
||||
</table>
|
||||
</div>
|
||||
{% endif %}
|
||||
<h2 class="has-text-white has-background-primary">
|
||||
<b>
|
||||
<button class="delete" onclick="location.href = '{{ back }}'"></button>
|
||||
Total score: {{ "%.6f" % (data.results | sum(attribute="score") ) }}
|
||||
</b>
|
||||
</h2>
|
||||
<h2>Number of files: {{ data.results | length }}</h2>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
41
benchmark/scripts/app/templates/partials/table_select.html
Normal file
41
benchmark/scripts/app/templates/partials/table_select.html
Normal file
@@ -0,0 +1,41 @@
|
||||
<table id="file-table" class="{{ select.get_table_class() }}">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Model</th>
|
||||
<th>Metric</th>
|
||||
<th>Platform</th>
|
||||
<th>Date</th>
|
||||
<th>Time</th>
|
||||
<th>Stratified</th>
|
||||
<th>Title</th>
|
||||
<th>Score</th>
|
||||
<th>{{ select.get_button_reset()|safe }} {{ select.get_button_all()|safe }}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for file, data in files.items() %}
|
||||
{% set parts = file.split('_') %}
|
||||
{% set stratified = parts[6].split('.')[0] %}
|
||||
<tr id="{{ file }}">
|
||||
<td>{{ parts[2] }}</td>
|
||||
<td>{{ parts[1] }}</td>
|
||||
<td>{{ parts[3] }}</td>
|
||||
<td>{{ parts[4] }}</td>
|
||||
<td>{{ parts[5] }}</td>
|
||||
<td>{{ 'True' if stratified =='1' else 'False' }}</td>
|
||||
<td>{{ "%s" % data["title"] }}</td>
|
||||
<td class="{{ select.get_align_right() }}">{{ "%.6f" % data["score"] }}</td>
|
||||
<td>
|
||||
{{ select.get_button_tag("table-eye", "showFile('" ~ file ~ "')") | safe }}
|
||||
{% set file_best = "best_results_" ~ parts[1] ~ "_" ~ parts[2] ~ ".json" %}
|
||||
{{ select.get_button_tag("star-circle-outline", "redirectDouble('best_results', '" ~ file_best ~ "')", visible=False, name="best_buttons") | safe }}
|
||||
<input
|
||||
type="checkbox"
|
||||
name="selected_files"
|
||||
value="{{ file }}"
|
||||
/>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
@@ -0,0 +1,15 @@
|
||||
<div class="{{ select.get_container_class() }}">
|
||||
{{ select.header("Benchmark Results") }}
|
||||
<div class="{{ select.get_level_class() }}">
|
||||
<div class="{{ select.get_left_position() }}">
|
||||
{{ select.get_button("Use " ~ candidate, "redirectDouble('config', '" ~ candidate ~ "')")|safe }}
|
||||
{{ select.get_button(select.icon("excel") ~ " Excel", "excel()")|safe }}
|
||||
{{ select.get_button(select.icon("database-eye") ~ " Datasets", "redirectSimple('datasets')")|safe }}
|
||||
</div>
|
||||
<div class="{{ select.get_right_position() }}">
|
||||
<input type="checkbox" id="compare" name="compare" {% if compare %} {{ "checked" }} {% endif %}>
|
||||
<span class="{{ select.get_tag_class() }}">Comparing with best results</span>
|
||||
</div>
|
||||
</div>
|
||||
{% include "partials/table_select.html" %}
|
||||
</div>
|
13
benchmark/scripts/app/templates/partials/table_summary.html
Normal file
13
benchmark/scripts/app/templates/partials/table_summary.html
Normal file
@@ -0,0 +1,13 @@
|
||||
{% for key, value in summary.items() %}
|
||||
<tr>
|
||||
<td class="{{ center }}">
|
||||
{{key}}
|
||||
</td>
|
||||
<td >
|
||||
{{value[0]}}
|
||||
</td>
|
||||
<td class={{ right }}>
|
||||
{{'{:,}'.format(value[1])}}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
29
benchmark/scripts/app/templates/report.html
Normal file
29
benchmark/scripts/app/templates/report.html
Normal file
@@ -0,0 +1,29 @@
|
||||
{% macro javascript(file) %}
|
||||
<script src="{{ url_for('static', filename=file) }}"></script>
|
||||
{% endmacro %}
|
||||
{% set title = 'Report Viewer' %}
|
||||
{% extends 'base_' ~ framework ~ '.html' %}
|
||||
{% block content %}
|
||||
{% if framework == 'bootstrap' %}
|
||||
{% set center = 'text-center' %}
|
||||
{% set right = 'text-end' %}
|
||||
{% set button = 'btn btn-primary' %}
|
||||
{% include 'partials/table_report_bootstrap.html' %}
|
||||
{% else %}
|
||||
{% set center = 'has-text-centered' %}
|
||||
{% set right = 'has-text-right' %}
|
||||
{% set button = 'button is-primary' %}
|
||||
{% include 'partials/table_report_bulma.html' %}
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
{% block jscript %}
|
||||
{% if framework == 'bootstrap' %}
|
||||
{% set selected = 'selected' %}
|
||||
{% else %}
|
||||
{% set selected = 'is-selected' %}
|
||||
{% endif %}
|
||||
<script>
|
||||
{% include "js/report.js" %}
|
||||
</script>
|
||||
{{ javascript("js/excelFiles.js") }}
|
||||
{% endblock %}
|
47
benchmark/scripts/app/templates/report_best.html
Normal file
47
benchmark/scripts/app/templates/report_best.html
Normal file
@@ -0,0 +1,47 @@
|
||||
{% set title = "Best Results" %}
|
||||
{% extends "base_" ~ framework ~ ".html" %}
|
||||
{% import "partials/cfg_select_" ~ framework ~ ".jinja" as select %}
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
{{ select.header(title, True, url_for("main.index", compare=compare)) }}
|
||||
<table id="file-table" class="{{ select.get_table_class() }}">
|
||||
<thead>
|
||||
<tr class="{{ select.get_row_head_class() }}">
|
||||
<th class="{{ select.get_align_center() }}">Dataset</th>
|
||||
<th class="{{ select.get_align_center() }}">Score</th>
|
||||
<th class="{{ select.get_align_center() }}">Hyperparameters</th>
|
||||
<th class="{{ select.get_align_center() }}">File</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for dataset, info in data.items() %}
|
||||
<tr>
|
||||
<td>{{ dataset }}</td>
|
||||
<td class="{{ select.get_align_right() }}">{{ '%9.7f' % info[0] }}</td>
|
||||
<td class="{{ select.get_align_center() }}">{{ info[1] }}</td>
|
||||
<td>
|
||||
{% set url = url_for(request.endpoint, **request.view_args)|urlencode %}
|
||||
<a href="{{ url_for('main.showfile', file_name = info[2], compare = compare) }}?url={{ url }}">{{ info[2] }}</a>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% block jscript %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
// Check if row is selected
|
||||
$('#file-table tbody').on('click', 'tr', function () {
|
||||
if ($(this).hasClass('{{ select.selected() }}')) {
|
||||
$(this).removeClass('{{ select.selected() }}');
|
||||
} else {
|
||||
$('#file-table tbody tr.{{ select.selected() }}').removeClass("{{ select.selected() }}")
|
||||
$(this).addClass('{{ select.selected() }}');
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
20
benchmark/scripts/app/templates/select.html
Normal file
20
benchmark/scripts/app/templates/select.html
Normal file
@@ -0,0 +1,20 @@
|
||||
{% macro javascript(file) %}
|
||||
<script src="{{ url_for('static', filename=file) }}"></script>
|
||||
{% endmacro %}
|
||||
{% set title = 'Benchmark Results' %}
|
||||
{% extends 'base_' ~ framework ~ '.html' %}
|
||||
{% import 'partials/cfg_select_' ~ framework ~ '.jinja' as select %}
|
||||
{% block content %}
|
||||
{% include 'partials/table_select_design.html' %}
|
||||
{% endblock %}
|
||||
|
||||
{% block jscript %}
|
||||
<script src="https://cdn.datatables.net/1.10.25/js/jquery.dataTables.min.js"></script>
|
||||
{% if framework == 'bootstrap' %}
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/js/bootstrap.min.js" integrity="sha384-cuYeSxntonz0PPNlHhBs68uyIAVpIIOZZ5JqeqvYYIcEL727kskC66kF92t6Xl2V" crossorigin="anonymous"></script>
|
||||
{% endif %}
|
||||
<script>
|
||||
{% include'/js/select.js' %}
|
||||
</script>
|
||||
{{ javascript('js/excelFiles.js') }}
|
||||
{% endblock %}
|
@@ -1,14 +1,18 @@
|
||||
#!/usr/bin/env python
|
||||
import webbrowser
|
||||
from benchmark.scripts.flask_app.app import create_app
|
||||
|
||||
from benchmark.Arguments import Arguments
|
||||
from benchmark.scripts.app.app import create_app, TEST, OUTPUT
|
||||
|
||||
# Launch a flask server to serve the results
|
||||
|
||||
def main(args_test=None):
|
||||
arguments = Arguments(prog="be_flask")
|
||||
arguments.xset("output")
|
||||
args = arguments.parse(args_test)
|
||||
app = create_app()
|
||||
app.config["TEST"] = args_test is not None
|
||||
output = app.config["OUTPUT"]
|
||||
print("Output is ", output)
|
||||
if output == "local":
|
||||
app.config[TEST] = args_test is not None
|
||||
app.config[OUTPUT] = args.output
|
||||
print("Output is ", args.output)
|
||||
if args.output == "local":
|
||||
webbrowser.open_new("http://127.0.0.1:1234/")
|
||||
app.run(port=1234, host="0.0.0.0")
|
||||
|
@@ -14,7 +14,7 @@ def main(args_test=None):
|
||||
arguments.xset("stratified").xset("score").xset("model", mandatory=True)
|
||||
arguments.xset("n_folds").xset("platform").xset("quiet").xset("title")
|
||||
arguments.xset("report").xset("ignore_nan").xset("discretize")
|
||||
arguments.xset("fit_features")
|
||||
arguments.xset("fit_features").xset("iwss")
|
||||
arguments.add_exclusive(
|
||||
["grid_paramfile", "best_paramfile", "hyperparameters"]
|
||||
)
|
||||
@@ -43,6 +43,7 @@ def main(args_test=None):
|
||||
folds=args.n_folds,
|
||||
fit_features=args.fit_features,
|
||||
discretize=args.discretize,
|
||||
iwss=args.iwss,
|
||||
)
|
||||
job.do_experiment()
|
||||
except ValueError as e:
|
||||
|
@@ -1,2 +0,0 @@
|
||||
OUTPUT="local"
|
||||
FRAMEWORK="bulma"
|
Binary file not shown.
@@ -1,39 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from flask import Flask
|
||||
from flask_bootstrap import Bootstrap5
|
||||
from flask_login import LoginManager
|
||||
from .config import Config
|
||||
from .models import User, db
|
||||
|
||||
from .results.main import results
|
||||
from .main import main
|
||||
|
||||
bootstrap = Bootstrap5()
|
||||
|
||||
login_manager = LoginManager()
|
||||
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id):
|
||||
return User.query.get(int(user_id))
|
||||
|
||||
|
||||
def make_shell_context():
|
||||
return {"db": db, "User": User}
|
||||
|
||||
|
||||
def create_app():
|
||||
app = Flask(__name__)
|
||||
bootstrap.init_app(app)
|
||||
# app.register_blueprint(results)
|
||||
app.config.from_object(Config)
|
||||
db.init_app(app)
|
||||
login_manager.init_app(app)
|
||||
login_manager.login_view = "main.login"
|
||||
app.jinja_env.auto_reload = True
|
||||
app.register_blueprint(results, url_prefix="/results")
|
||||
app.register_blueprint(main)
|
||||
app.shell_context_processor(make_shell_context)
|
||||
with app.app_context():
|
||||
db.create_all()
|
||||
return app
|
@@ -1,17 +0,0 @@
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
basedir = os.path.abspath(os.path.dirname(__file__))
|
||||
load_dotenv(os.path.join(basedir, ".env"))
|
||||
|
||||
|
||||
class Config(object):
|
||||
FRAMEWORKS = ["bootstrap", "bulma"]
|
||||
FRAMEWORK = os.environ.get("FRAMEWORK") or FRAMEWORKS[0]
|
||||
OUTPUT = os.environ.get("OUTPUT") or "local" # local or docker
|
||||
TEMPLATES_AUTO_RELOAD = True
|
||||
SECRET_KEY = os.environ.get("SECRET_KEY") or "really-hard-to-guess-key"
|
||||
SQLALCHEMY_DATABASE_URI = os.environ.get(
|
||||
"DATABASE_URL"
|
||||
) or "sqlite:///" + os.path.join(basedir, "app.db")
|
||||
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
@@ -1,22 +0,0 @@
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import (
|
||||
StringField,
|
||||
PasswordField,
|
||||
BooleanField,
|
||||
SubmitField,
|
||||
)
|
||||
from wtforms.validators import (
|
||||
DataRequired,
|
||||
Length,
|
||||
)
|
||||
|
||||
|
||||
class LoginForm(FlaskForm):
|
||||
username = StringField(
|
||||
"Username", validators=[DataRequired(), Length(1, 20)]
|
||||
)
|
||||
password = PasswordField(
|
||||
"Password", validators=[DataRequired(), Length(4, 150)]
|
||||
)
|
||||
remember_me = BooleanField("Remember me")
|
||||
submit = SubmitField()
|
@@ -1,51 +0,0 @@
|
||||
from flask import (
|
||||
Blueprint,
|
||||
render_template,
|
||||
url_for,
|
||||
flash,
|
||||
redirect,
|
||||
request,
|
||||
)
|
||||
from flask_login import login_user, current_user, logout_user, login_required
|
||||
from werkzeug.urls import url_parse
|
||||
from .forms import LoginForm
|
||||
from .models import User
|
||||
|
||||
main = Blueprint("main", __name__)
|
||||
|
||||
|
||||
@main.route("/")
|
||||
@main.route("/index")
|
||||
def index():
|
||||
return render_template("index.html")
|
||||
|
||||
|
||||
@main.route("/config")
|
||||
@login_required
|
||||
def config():
|
||||
return render_template("config.html")
|
||||
|
||||
|
||||
@main.route("/login", methods=["GET", "POST"])
|
||||
def login():
|
||||
if current_user.is_authenticated:
|
||||
return redirect(url_for("main.index"))
|
||||
form = LoginForm()
|
||||
if form.validate_on_submit():
|
||||
user = User.query.filter_by(username=form.username.data).first()
|
||||
if user is None or not user.check_password(form.password.data):
|
||||
flash("Invalid username or password")
|
||||
return redirect(url_for("main.login"))
|
||||
login_user(user, remember=form.remember_me.data)
|
||||
flash("Logged in successfully.")
|
||||
next_page = request.args.get("next")
|
||||
if not next_page or url_parse(next_page).netloc != "":
|
||||
next_page = url_for("main.index")
|
||||
return redirect(next_page)
|
||||
return render_template("login.html", title="Sign In", form=form)
|
||||
|
||||
|
||||
@main.route("/logout")
|
||||
def logout():
|
||||
logout_user()
|
||||
return redirect(url_for("main.index"))
|
@@ -1,29 +0,0 @@
|
||||
from hashlib import md5
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from sqlalchemy import Column, Integer, String
|
||||
from flask_login import UserMixin
|
||||
from werkzeug.security import generate_password_hash, check_password_hash
|
||||
|
||||
db = SQLAlchemy()
|
||||
|
||||
|
||||
class User(UserMixin, db.Model):
|
||||
id = Column(Integer, primary_key=True)
|
||||
username = Column(String(64), index=True, unique=True)
|
||||
email = Column(String(120), index=True, unique=True)
|
||||
password_hash = Column(String(128))
|
||||
|
||||
def __repr__(self):
|
||||
return "<User {} {}>".format(self.username, self.email)
|
||||
|
||||
def set_password(self, password):
|
||||
self.password_hash = generate_password_hash(password)
|
||||
|
||||
def check_password(self, password):
|
||||
return check_password_hash(self.password_hash, password)
|
||||
|
||||
def avatar(self, size):
|
||||
digest = md5(self.email.lower().encode("utf-8")).hexdigest()
|
||||
return "https://www.gravatar.com/avatar/{}?d=identicon&s={}".format(
|
||||
digest, size
|
||||
)
|
@@ -1,46 +0,0 @@
|
||||
import os
|
||||
import json
|
||||
import shutil
|
||||
import xlsxwriter
|
||||
from benchmark.Utils import Files, Folders
|
||||
from benchmark.Arguments import EnvData
|
||||
from benchmark.ResultsBase import StubReport
|
||||
from benchmark.ResultsFiles import Excel, ReportDatasets
|
||||
from benchmark.Datasets import Datasets
|
||||
from flask import Blueprint, current_app, send_file
|
||||
from flask import render_template, current_app, request, redirect, url_for
|
||||
from flask_login import login_required
|
||||
|
||||
results = Blueprint("results", __name__, template_folder="templates")
|
||||
|
||||
|
||||
@results.route("/select")
|
||||
@login_required
|
||||
def select(compare="False"):
|
||||
# Get a list of files in a directory
|
||||
files = {}
|
||||
names = Files.get_all_results(hidden=False)
|
||||
for name in names:
|
||||
report = StubReport(os.path.join(Folders.results, name))
|
||||
report.report()
|
||||
files[name] = {
|
||||
"duration": report.duration,
|
||||
"score": report.score,
|
||||
"title": report.title,
|
||||
}
|
||||
candidate = current_app.config["FRAMEWORKS"].copy()
|
||||
candidate.remove(current_app.config["FRAMEWORK"])
|
||||
return render_template(
|
||||
"select.html",
|
||||
files=files,
|
||||
candidate=candidate[0],
|
||||
framework=current_app.config["FRAMEWORK"],
|
||||
compare=compare.capitalize() == "True",
|
||||
)
|
||||
return render_template("test.html")
|
||||
|
||||
|
||||
@results.route("/datasets")
|
||||
@login_required
|
||||
def datasets(compare="False"):
|
||||
return render_template("test.html")
|
@@ -1,50 +0,0 @@
|
||||
{%- macro get_button_tag(icon_name, method, visible=True, name="") -%}
|
||||
<button class="btn btn-primary btn-small" onclick="{{ method }}" {{ "" if visible else "hidden='true'" }} {{ "" if name=="" else "name='" + name +"'"}}><i class="mdi mdi-{{ icon_name }}"></i>
|
||||
</button>
|
||||
{%- endmacro -%}
|
||||
<table id="file-table"
|
||||
class="table table-striped table-hover table-bordered">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Model</th>
|
||||
<th>Metric</th>
|
||||
<th>Platform</th>
|
||||
<th>Date</th>
|
||||
<th>Time</th>
|
||||
<th>Stratified</th>
|
||||
<th>Title</th>
|
||||
<th>Score</th>
|
||||
<th>
|
||||
<button class="btn btn-primary btn-small btn-danger"
|
||||
onclick="setCheckBoxes(false)">
|
||||
<i class="mdi mdi-checkbox-multiple-blank"></i>
|
||||
</button>
|
||||
<button class="btn btn-primary btn-small btn-success"
|
||||
onclick="setCheckBoxes(true)">
|
||||
<i class="mdi mdi-checkbox-multiple-marked"></i>
|
||||
</button>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for file, data in files.items() %}
|
||||
{% set parts = file.split('_') %}
|
||||
{% set stratified = parts[6].split('.')[0] %}
|
||||
<tr id="{{ file }}">
|
||||
<td>{{ parts[2] }}</td>
|
||||
<td>{{ parts[1] }}</td>
|
||||
<td>{{ parts[3] }}</td>
|
||||
<td>{{ parts[4] }}</td>
|
||||
<td>{{ parts[5] }}</td>
|
||||
<td>{{ 'True' if stratified =='1' else 'False' }}</td>
|
||||
<td>{{ "%s" % data["title"] }}</td>
|
||||
<td class="text-end">{{ "%.6f" % data["score"] }}</td>
|
||||
<td>
|
||||
{{ get_button_tag("table-eye", "showFile('" ~ file ~ "') ") | safe }}
|
||||
{% set file_best = "best_results_" ~ parts[1] ~ "_" ~ parts[2] ~ ".json" %}
|
||||
{{ get_button_tag("star-circle-outline", "redirectDouble('best_results', '" ~ file_best ~ "') ", visible=False, name="best_buttons") | safe }}
|
||||
<input type="checkbox" name="selected_files" value="{{ file }}" />
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
@@ -1,9 +0,0 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
{% include "_table_select.html" %}
|
||||
{% endblock %}
|
||||
{% block jscript %}
|
||||
{{ super() }}
|
||||
<script src="https://cdn.datatables.net/1.10.25/js/jquery.dataTables.min.js"></script>
|
||||
<script src="{{ url_for('static', filename="js/select.js") }}"></script>
|
||||
{% endblock %}
|
@@ -1,97 +0,0 @@
|
||||
$(document).ready(function () {
|
||||
var table = $("#file-table").DataTable({
|
||||
paging: true,
|
||||
searching: true,
|
||||
ordering: true,
|
||||
info: true,
|
||||
"select.items": "row",
|
||||
pageLength: 25,
|
||||
columnDefs: [
|
||||
{
|
||||
targets: 8,
|
||||
orderable: false,
|
||||
},
|
||||
],
|
||||
//"language": {
|
||||
// "lengthMenu": "_MENU_"
|
||||
//}
|
||||
});
|
||||
$('#file-table').on( 'draw.dt', function () {
|
||||
enable_disable_best_buttons();
|
||||
} );
|
||||
// Check if row is selected
|
||||
$("#file-table tbody").on("click", "tr", function () {
|
||||
if ($(this).hasClass("selected")) {
|
||||
$(this).removeClass("selected");
|
||||
} else {
|
||||
table
|
||||
.$("tr.selected")
|
||||
.removeClass("selected");
|
||||
$(this).addClass("selected");
|
||||
}
|
||||
});
|
||||
// Show file with doubleclick
|
||||
$("#file-table tbody").on("dblclick", "tr", function () {
|
||||
showFile($(this).attr("id"));
|
||||
});
|
||||
$(document).ajaxStart(function () {
|
||||
$("body").addClass("ajaxLoading");
|
||||
});
|
||||
$(document).ajaxStop(function () {
|
||||
$("body").removeClass("ajaxLoading");
|
||||
});
|
||||
$('#compare').change(function() {
|
||||
enable_disable_best_buttons();
|
||||
});
|
||||
enable_disable_best_buttons();
|
||||
});
|
||||
function enable_disable_best_buttons(){
|
||||
if ($('#compare').is(':checked')) {
|
||||
$("[name='best_buttons']").addClass("tag is-link is-normal");
|
||||
$("[name='best_buttons']").removeAttr("hidden");
|
||||
} else {
|
||||
$("[name='best_buttons']").removeClass("tag is-link is-normal");
|
||||
$("[name='best_buttons']").attr("hidden", true);
|
||||
}
|
||||
}
|
||||
function showFile(selectedFile) {
|
||||
var form = $(
|
||||
'<form action="/show" method="post">' +
|
||||
'<input type="hidden" name="selected-file" value="' +
|
||||
selectedFile +
|
||||
'" />' +
|
||||
'<input type="hidden" name="compare" value=' +
|
||||
$("#compare").is(":checked") +
|
||||
" />" +
|
||||
"</form>"
|
||||
);
|
||||
$("body").append(form);
|
||||
form.submit();
|
||||
}
|
||||
function excel() {
|
||||
var checkbox = document.getElementsByName("selected_files");
|
||||
var selectedFiles = [];
|
||||
for (var i = 0; i < checkbox.length; i++) {
|
||||
if (checkbox[i].checked) {
|
||||
selectedFiles.push(checkbox[i].value);
|
||||
}
|
||||
}
|
||||
if (selectedFiles.length == 0) {
|
||||
alert("Select at least one file");
|
||||
return;
|
||||
}
|
||||
var compare = $("#compare").is(":checked");
|
||||
excelFiles(selectedFiles, compare);
|
||||
}
|
||||
function setCheckBoxes(value) {
|
||||
var checkbox = document.getElementsByName("selected_files");
|
||||
for (i = 0; i < checkbox.length; i++) {
|
||||
checkbox[i].checked = value;
|
||||
}
|
||||
}
|
||||
function redirectDouble(route, parameter) {
|
||||
location.href = "/"+ route + "/" + parameter + "/" + $("#compare").is(":checked");
|
||||
}
|
||||
function redirectSimple(route) {
|
||||
location.href = "/" + route + "/" + $("#compare").is(":checked");
|
||||
}
|
@@ -1,30 +0,0 @@
|
||||
{% from 'bootstrap5/nav.html' import render_nav_item %}
|
||||
<nav class="navbar navbar-expand-sm navbar-light bg-light mb-4 justify-content-end">
|
||||
<div class="container">
|
||||
<button class="navbar-toggler"
|
||||
type="button"
|
||||
data-bs-toggle="collapse"
|
||||
data-bs-target="#navbarSupportedContent"
|
||||
aria-controls="navbarSupportedContent"
|
||||
aria-expanded="false"
|
||||
aria-label="Toggle navigation">
|
||||
<span class="navbar-toggler-icon"></span>
|
||||
</button>
|
||||
<div class="collapse navbar-collapse" id="navbarSupportedContent">
|
||||
<!-- Left side of navbar -->
|
||||
<ul class="navbar-nav me-auto">
|
||||
{{ render_nav_item('main.index', 'Home') }}
|
||||
</ul>
|
||||
<ul class="navbar-nav justify-content-end">
|
||||
{{ render_nav_item('results.select', 'Results') }}
|
||||
{{ render_nav_item('results.datasets', 'Datasets') }}
|
||||
{{ render_nav_item('main.config', 'Config') }}
|
||||
{% if current_user.is_authenticated %}
|
||||
{{ render_nav_item('main.logout', 'Logout') }}
|
||||
{% else %}
|
||||
{{ render_nav_item('main.login', 'Login') }}
|
||||
{% endif %}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
@@ -1,27 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
{% block head %}
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport"
|
||||
content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
{% block styles %}{{ bootstrap.load_css() }}{% endblock %}
|
||||
<title>Benchmark</title>
|
||||
{% endblock %}
|
||||
</head>
|
||||
<body>
|
||||
{% include "_nav.html" %}
|
||||
{% with messages = get_flashed_messages() %}
|
||||
{% if messages %}
|
||||
{% for message in messages %}<div class="alert alert-info" role="alert">{{ message }}</div>{% endfor %}
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
<div class="container">
|
||||
{% block content %}{% endblock %}
|
||||
</div>
|
||||
</body>
|
||||
{% block jscript %}
|
||||
<script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
|
||||
{{ bootstrap.load_js() }}
|
||||
{% endblock %}
|
||||
</html>
|
@@ -1,5 +0,0 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h1>Home</h1>
|
||||
<p>Welcome to the home page!</p>
|
||||
{% endblock content %}
|
@@ -1,5 +0,0 @@
|
||||
{% extends "base.html" %}
|
||||
{% block content %}
|
||||
<h1>My First Heading</h1>
|
||||
<p>My first paragraph.</p>
|
||||
{% endblock %}
|
@@ -1,6 +0,0 @@
|
||||
{% extends 'base.html' %}
|
||||
{% from 'bootstrap5/form.html' import render_form %}
|
||||
{% block content %}
|
||||
<h2>Login</h2>
|
||||
{{ render_form(form) }}
|
||||
{% endblock content %}
|
@@ -68,7 +68,7 @@ class ArgumentsTest(TestBase):
|
||||
test_args = ["-n", "3", "-k", "date"]
|
||||
with self.assertRaises(SystemExit):
|
||||
arguments.parse(test_args)
|
||||
self.assertRegexpMatches(
|
||||
self.assertRegex(
|
||||
stderr.getvalue(),
|
||||
r"error: the following arguments are required: -m/--model",
|
||||
)
|
||||
@@ -79,7 +79,7 @@ class ArgumentsTest(TestBase):
|
||||
test_args = ["-n", "3", "-m", "SVC"]
|
||||
with self.assertRaises(SystemExit):
|
||||
arguments.parse(test_args)
|
||||
self.assertRegexpMatches(
|
||||
self.assertRegex(
|
||||
stderr.getvalue(),
|
||||
r"error: the following arguments are required: -k/--key",
|
||||
)
|
||||
@@ -114,7 +114,7 @@ class ArgumentsTest(TestBase):
|
||||
test_args = None
|
||||
with self.assertRaises(SystemExit):
|
||||
arguments.parse(test_args)
|
||||
self.assertRegexpMatches(
|
||||
self.assertRegex(
|
||||
stderr.getvalue(),
|
||||
r"error: the following arguments are required: -m/--model, "
|
||||
"-k/--key, --title",
|
||||
|
@@ -102,7 +102,7 @@ class ModelTest(TestBase):
|
||||
test = {
|
||||
"STree": ((11, 6, 4), 1.0),
|
||||
"Wodt": ((303, 152, 50), 0.9382022471910112),
|
||||
"ODTE": ((7.86, 4.43, 3.37), 1.0),
|
||||
"ODTE": ((786, 443, 337), 1.0),
|
||||
"Cart": ((23, 12, 5), 1.0),
|
||||
"SVC": ((0, 0, 0), 0.7078651685393258),
|
||||
"RandomForest": ((21.3, 11, 5.26), 1.0),
|
||||
|
@@ -118,7 +118,7 @@ class UtilTest(TestBase):
|
||||
def test_Files_get_results(self):
|
||||
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
||||
self.assertCountEqual(
|
||||
Files().get_all_results(hidden=False),
|
||||
Files.get_all_results(hidden=False),
|
||||
[
|
||||
"results_accuracy_STree_iMac27_2021-10-27_09:40:40_0.json",
|
||||
"results_accuracy_STree_iMac27_2021-09-30_11:42:07_0.json",
|
||||
@@ -130,7 +130,7 @@ class UtilTest(TestBase):
|
||||
],
|
||||
)
|
||||
self.assertCountEqual(
|
||||
Files().get_all_results(hidden=True),
|
||||
Files.get_all_results(hidden=True),
|
||||
[
|
||||
"results_accuracy_STree_iMac27_2021-11-01_23:55:16_0.json",
|
||||
"results_accuracy_XGBoost_MacBookpro16_2022-05-04_11:00:35_"
|
||||
@@ -143,7 +143,7 @@ class UtilTest(TestBase):
|
||||
# check with results
|
||||
os.rename(Folders.results, f"{Folders.results}.test")
|
||||
try:
|
||||
Files().get_all_results(hidden=False)
|
||||
Files.get_all_results(hidden=False)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
@@ -153,7 +153,7 @@ class UtilTest(TestBase):
|
||||
# check with hidden_results
|
||||
os.rename(Folders.hidden_results, f"{Folders.hidden_results}.test")
|
||||
try:
|
||||
Files().get_all_results(hidden=True)
|
||||
Files.get_all_results(hidden=True)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
|
@@ -1,2 +1,2 @@
|
||||
iris,class,all
|
||||
wine,class,[0, 1]
|
||||
iris;class;all
|
||||
wine;class;[0, 1]
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"kernel": "liblinear",
|
||||
"multiclass_strategy": "ovr"
|
||||
},
|
||||
"v. 1.3.1, Computed on Test on 2022-02-22 at 12:00:00 took 1s"
|
||||
"v. 1.4.0, Computed on Test on 2022-02-22 at 12:00:00 took 1s"
|
||||
],
|
||||
"balloons": [
|
||||
0.625,
|
||||
@@ -15,6 +15,6 @@
|
||||
"kernel": "linear",
|
||||
"multiclass_strategy": "ovr"
|
||||
},
|
||||
"v. 1.3.1, Computed on Test on 2022-02-22 at 12:00:00 took 1s"
|
||||
"v. 1.4.0, Computed on Test on 2022-02-22 at 12:00:00 took 1s"
|
||||
]
|
||||
}
|
@@ -120,7 +120,7 @@ class BeMainTest(TestBase):
|
||||
module.main(parameter)
|
||||
self.assertEqual(msg.exception.code, 2)
|
||||
self.assertEqual(stderr.getvalue(), "")
|
||||
self.assertRegexpMatches(stdout.getvalue(), message)
|
||||
self.assertRegex(stdout.getvalue(), message)
|
||||
|
||||
def test_be_main_best_params_non_existent(self):
|
||||
model = "GBC"
|
||||
|
@@ -1,4 +1,4 @@
|
||||
1;1;"Datasets used in benchmark ver. 0.5.0"
|
||||
1;1;"Datasets used in benchmark ver. 1.0.1"
|
||||
2;1;" Default score accuracy"
|
||||
2;2;"Cross validation"
|
||||
2;6;"5 Folds"
|
||||
|
@@ -1,4 +1,4 @@
|
||||
[94mDatasets used in benchmark ver. 0.5.0
|
||||
[94mDatasets used in benchmark ver. 1.0.1
|
||||
|
||||
Dataset Sampl. Feat. Cont Cls Balance
|
||||
============================== ====== ===== ==== === ==========================================
|
||||
|
@@ -1,18 +1,11 @@
|
||||
pandas
|
||||
scikit-learn
|
||||
scipy
|
||||
python-dotenv
|
||||
flask
|
||||
bootstrap-flask
|
||||
flask-wtf
|
||||
flask-login
|
||||
flask-migrate
|
||||
flask_sqlalchemy
|
||||
odte
|
||||
cython
|
||||
fimdlp
|
||||
mufs
|
||||
bayesclass @ git+ssh://git@github.com/doctorado-ml/bayesclass.git
|
||||
#bayesclass @ git+ssh://git@github.com/doctorado-ml/bayesclass.git
|
||||
xlsxwriter
|
||||
openpyxl
|
||||
tqdm
|
||||
@@ -20,3 +13,4 @@ xgboost
|
||||
graphviz
|
||||
Wodt @ git+ssh://git@github.com/doctorado-ml/Wodt.git#egg=Wodt
|
||||
unittest-xml-reporting
|
||||
flask
|
||||
|
Reference in New Issue
Block a user