Complete schema validation
This commit is contained in:
4
Makefile
4
Makefile
@@ -55,10 +55,10 @@ dependency: ## Create a dependency graph diagram of the project (build/dependenc
|
||||
cd $(f_debug) && cmake .. --graphviz=dependency.dot && dot -Tpng dependency.dot -o dependency.png
|
||||
|
||||
buildd: ## Build the debug targets
|
||||
cmake --build $(f_debug) -t $(app_targets) PlatformSample --parallel
|
||||
@cmake --build $(f_debug) -t $(app_targets) PlatformSample --parallel
|
||||
|
||||
buildr: ## Build the release targets
|
||||
cmake --build $(f_release) -t $(app_targets) --parallel
|
||||
@cmake --build $(f_release) -t $(app_targets) --parallel
|
||||
|
||||
clean: ## Clean the tests info
|
||||
@echo ">>> Cleaning Debug Platform tests...";
|
||||
|
@@ -40,7 +40,7 @@ export MPI_HOME="/usr/lib64/openmpi"
|
||||
In Mac OS X, install mpich with brew and if cmake doesn't find it, edit mpicxx wrapper to remove the ",-commons,use_dylibs" from final_ldflags
|
||||
|
||||
```bash
|
||||
vi /opt/homebrew/bin/mpicx
|
||||
vi /opt/homebrew/bin/mpicxx
|
||||
```
|
||||
|
||||
### boost library
|
||||
|
@@ -2,11 +2,12 @@
|
||||
#include <filesystem>
|
||||
#include <fstream>
|
||||
#include <vector>
|
||||
#include <regex>
|
||||
#include <argparse/argparse.hpp>
|
||||
#include <nlohmann/json.hpp>
|
||||
#include "common/Paths.h"
|
||||
#include "results/JsonValidator.h"
|
||||
#include "results/SchemaV1_0.h"
|
||||
#include "config_platform.h"
|
||||
|
||||
using json = nlohmann::json;
|
||||
namespace fs = std::filesystem;
|
||||
@@ -18,23 +19,51 @@ void header(const std::string& message, int length, const std::string& symbol)
|
||||
}
|
||||
int main(int argc, char* argv[])
|
||||
{
|
||||
argparse::ArgumentParser program("b_results", { platform_project_version.begin(), platform_project_version.end() });
|
||||
program.add_description("Check the results files and optionally fixes them.");
|
||||
program.add_argument("--fix").help("Fix any errors in results").default_value(false).implicit_value(true);
|
||||
program.add_argument("--file").help("check only this results file").default_value("");
|
||||
std::string nameSuffix = "results_";
|
||||
std::string schemaVersion = "1.0";
|
||||
bool fix_it = false;
|
||||
|
||||
std::string selected_file;
|
||||
try {
|
||||
program.parse_args(argc, argv);
|
||||
fix_it = program.get<bool>("fix");
|
||||
selected_file = program.get<std::string>("file");
|
||||
}
|
||||
catch (const std::exception& err) {
|
||||
std::cerr << err.what() << std::endl;
|
||||
std::cerr << program;
|
||||
exit(1);
|
||||
}
|
||||
//
|
||||
// Determine the files to process
|
||||
//
|
||||
std::vector<std::string> result_files;
|
||||
int max_length = 0;
|
||||
// Load the result files and find the longest file name
|
||||
for (const auto& entry : fs::directory_iterator(platform::Paths::results())) {
|
||||
if (entry.is_regular_file() && entry.path().filename().string().starts_with(nameSuffix) && entry.path().filename().string().ends_with(".json")) {
|
||||
std::string fileName = entry.path().string();
|
||||
if (fileName.length() > max_length) {
|
||||
max_length = fileName.length();
|
||||
if (selected_file != "") {
|
||||
if (!selected_file.starts_with(platform::Paths::results())) {
|
||||
selected_file = platform::Paths::results() + selected_file;
|
||||
}
|
||||
// Only check the selected file
|
||||
result_files.push_back(selected_file);
|
||||
max_length = selected_file.length();
|
||||
} else {
|
||||
// Load the result files and find the longest file name
|
||||
for (const auto& entry : fs::directory_iterator(platform::Paths::results())) {
|
||||
if (entry.is_regular_file() && entry.path().filename().string().starts_with(nameSuffix) && entry.path().filename().string().ends_with(".json")) {
|
||||
std::string fileName = entry.path().string();
|
||||
if (fileName.length() > max_length) {
|
||||
max_length = fileName.length();
|
||||
}
|
||||
result_files.push_back(fileName);
|
||||
}
|
||||
result_files.push_back(fileName);
|
||||
}
|
||||
}
|
||||
// Process the result files
|
||||
//
|
||||
// Process the results files
|
||||
//
|
||||
if (result_files.empty()) {
|
||||
std::cerr << "Error: No result files found." << std::endl;
|
||||
return 1;
|
||||
@@ -43,6 +72,7 @@ int main(int argc, char* argv[])
|
||||
header(header_message, max_length, "*");
|
||||
platform::JsonValidator validator(platform::SchemaV1_0::schema);
|
||||
int n_errors = 0;
|
||||
std::vector<std::string> files_with_errors;
|
||||
for (const auto& file_name : result_files) {
|
||||
std::vector<std::string> errors = validator.validate(file_name);
|
||||
if (!errors.empty()) {
|
||||
@@ -53,8 +83,9 @@ int main(int argc, char* argv[])
|
||||
}
|
||||
if (fix_it) {
|
||||
validator.fix_it(file_name);
|
||||
std::cout << " -> File fixed." << std::endl;
|
||||
std::cout << " -> File fixed." << std::endl;
|
||||
}
|
||||
files_with_errors.push_back(file_name);
|
||||
}
|
||||
}
|
||||
if (n_errors == 0) {
|
||||
@@ -63,6 +94,9 @@ int main(int argc, char* argv[])
|
||||
std::string $verb = (fix_it) ? "had" : "have";
|
||||
std::string msg = std::to_string(n_errors) + " files " + $verb + " errors.";
|
||||
header(msg, max_length, "*");
|
||||
for (const auto& file_name : files_with_errors) {
|
||||
std::cout << "- " << file_name << std::endl;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
@@ -28,10 +28,8 @@ namespace platform {
|
||||
|
||||
std::vector<std::string> errors;
|
||||
|
||||
// Validate all fields defined in the schema
|
||||
for (const auto& [key, value] : schema["properties"].items()) {
|
||||
validateField(key, value, data, errors);
|
||||
}
|
||||
// Validate the top-level object
|
||||
validateObject("", schema, data, errors);
|
||||
|
||||
return errors;
|
||||
}
|
||||
@@ -68,13 +66,11 @@ namespace platform {
|
||||
data[key] = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
// Fix const fields to match the schema value
|
||||
if (value.contains("const")) {
|
||||
data[key] = value["const"];
|
||||
}
|
||||
}
|
||||
|
||||
// Save fixed JSON
|
||||
std::ofstream outFile(fileName);
|
||||
if (!outFile.is_open()) {
|
||||
@@ -85,45 +81,72 @@ namespace platform {
|
||||
outFile << data.dump(4);
|
||||
outFile.close();
|
||||
}
|
||||
|
||||
private:
|
||||
json schema;
|
||||
|
||||
void validateField(const std::string& field, const json& value, const json& data, std::vector<std::string>& errors)
|
||||
void validateObject(const std::string& path, const json& schema, const json& data, std::vector<std::string>& errors)
|
||||
{
|
||||
// Check if the field is present
|
||||
if (!data.contains(field)) {
|
||||
errors.push_back("Missing required field: " + field);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for type constraints
|
||||
if (value.contains("type")) {
|
||||
const std::string type = value["type"];
|
||||
if (type == "string" && !data[field].is_string()) {
|
||||
errors.push_back("Field '" + field + "' should be a string.");
|
||||
} else if (type == "number" && !data[field].is_number()) {
|
||||
errors.push_back("Field '" + field + "' should be a number.");
|
||||
} else if (type == "integer" && !data[field].is_number_integer()) {
|
||||
errors.push_back("Field '" + field + "' should be an integer.");
|
||||
} else if (type == "boolean" && !data[field].is_boolean()) {
|
||||
errors.push_back("Field '" + field + "' should be a boolean.");
|
||||
} else if (type == "array" && !data[field].is_array()) {
|
||||
errors.push_back("Field '" + field + "' should be an array.");
|
||||
} else if (type == "object" && !data[field].is_object()) {
|
||||
errors.push_back("Field '" + field + "' should be an object.");
|
||||
if (schema.contains("required")) {
|
||||
for (const auto& requiredField : schema["required"]) {
|
||||
if (!data.contains(requiredField)) {
|
||||
std::string fullPath = path.empty() ? requiredField.get<std::string>() : path + "." + requiredField.get<std::string>();
|
||||
errors.push_back("Missing required field: " + fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for const constraints
|
||||
if (value.contains("const")) {
|
||||
const auto& expectedValue = value["const"];
|
||||
if (data[field] != expectedValue) {
|
||||
errors.push_back("Field '" + field + "' has an invalid value. Expected: " +
|
||||
expectedValue.dump() + ", Found: " + data[field].dump());
|
||||
if (schema.contains("properties")) {
|
||||
for (const auto& [key, value] : schema["properties"].items()) {
|
||||
if (data.contains(key)) {
|
||||
std::string fullPath = path.empty() ? key : path + "." + key;
|
||||
validateField(fullPath, value, data[key], errors); // Pass data[key] for nested validation
|
||||
} else if (value.contains("required")) {
|
||||
errors.push_back("Missing required field: " + (path.empty() ? key : path + "." + key));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void validateField(const std::string& field, const json& value, const json& data, std::vector<std::string>& errors)
|
||||
{
|
||||
if (value.contains("type")) {
|
||||
const std::string& type = value["type"];
|
||||
if (type == "array") {
|
||||
if (!data.is_array()) {
|
||||
errors.push_back("Field '" + field + "' should be an array.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (value.contains("items")) {
|
||||
for (size_t i = 0; i < data.size(); ++i) {
|
||||
validateObject(field + "[" + std::to_string(i) + "]", value["items"], data[i], errors);
|
||||
}
|
||||
}
|
||||
} else if (type == "object") {
|
||||
if (!data.is_object()) {
|
||||
errors.push_back("Field '" + field + "' should be an object.");
|
||||
return;
|
||||
}
|
||||
|
||||
validateObject(field, value, data, errors);
|
||||
} else if (type == "string" && !data.is_string()) {
|
||||
errors.push_back("Field '" + field + "' should be a string.");
|
||||
} else if (type == "number" && !data.is_number()) {
|
||||
errors.push_back("Field '" + field + "' should be a number.");
|
||||
} else if (type == "integer" && !data.is_number_integer()) {
|
||||
errors.push_back("Field '" + field + "' should be an integer.");
|
||||
} else if (type == "boolean" && !data.is_boolean()) {
|
||||
errors.push_back("Field '" + field + "' should be a boolean.");
|
||||
}
|
||||
}
|
||||
if (value.contains("const")) {
|
||||
const auto& expectedValue = value["const"];
|
||||
if (data != expectedValue) {
|
||||
errors.push_back("Field '" + field + "' has an invalid value. Expected: " +
|
||||
expectedValue.dump() + ", Found: " + data.dump());
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
#endif
|
@@ -85,10 +85,10 @@ namespace platform {
|
||||
}},
|
||||
{"required", {
|
||||
"scores_train", "scores_test", "times_train", "times_test",
|
||||
"notes", "train_time", "train_time_std", "test_time", "test_time_std",
|
||||
"train_time", "train_time_std", "test_time", "test_time_std",
|
||||
"samples", "features", "classes", "hyperparameters", "score", "score_train",
|
||||
"score_std", "score_train_std", "time", "time_std", "nodes", "leaves",
|
||||
"depth", "dataset", "confusion_matrices"
|
||||
"depth", "dataset"
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
|
Reference in New Issue
Block a user