Remove using namespace from Library

This commit is contained in:
2023-11-08 18:45:35 +01:00
parent 92820555da
commit f9258e43b9
96 changed files with 1316 additions and 1340 deletions

View File

@@ -5,20 +5,20 @@ namespace platform {
Dataset::Dataset(const Dataset& dataset) : path(dataset.path), name(dataset.name), className(dataset.className), n_samples(dataset.n_samples), n_features(dataset.n_features), features(dataset.features), states(dataset.states), loaded(dataset.loaded), discretize(dataset.discretize), X(dataset.X), y(dataset.y), Xv(dataset.Xv), Xd(dataset.Xd), yv(dataset.yv), fileType(dataset.fileType)
{
}
string Dataset::getName() const
std::string Dataset::getName() const
{
return name;
}
string Dataset::getClassName() const
std::string Dataset::getClassName() const
{
return className;
}
vector<string> Dataset::getFeatures() const
std::vector<std::string> Dataset::getFeatures() const
{
if (loaded) {
return features;
} else {
throw invalid_argument("Dataset not loaded.");
throw std::invalid_argument("Dataset not loaded.");
}
}
int Dataset::getNFeatures() const
@@ -26,7 +26,7 @@ namespace platform {
if (loaded) {
return n_features;
} else {
throw invalid_argument("Dataset not loaded.");
throw std::invalid_argument("Dataset not loaded.");
}
}
int Dataset::getNSamples() const
@@ -34,31 +34,31 @@ namespace platform {
if (loaded) {
return n_samples;
} else {
throw invalid_argument("Dataset not loaded.");
throw std::invalid_argument("Dataset not loaded.");
}
}
map<string, vector<int>> Dataset::getStates() const
std::map<std::string, std::vector<int>> Dataset::getStates() const
{
if (loaded) {
return states;
} else {
throw invalid_argument("Dataset not loaded.");
throw std::invalid_argument("Dataset not loaded.");
}
}
pair<vector<vector<float>>&, vector<int>&> Dataset::getVectors()
pair<std::vector<std::vector<float>>&, std::vector<int>&> Dataset::getVectors()
{
if (loaded) {
return { Xv, yv };
} else {
throw invalid_argument("Dataset not loaded.");
throw std::invalid_argument("Dataset not loaded.");
}
}
pair<vector<vector<int>>&, vector<int>&> Dataset::getVectorsDiscretized()
pair<std::vector<std::vector<int>>&, std::vector<int>&> Dataset::getVectorsDiscretized()
{
if (loaded) {
return { Xd, yv };
} else {
throw invalid_argument("Dataset not loaded.");
throw std::invalid_argument("Dataset not loaded.");
}
}
pair<torch::Tensor&, torch::Tensor&> Dataset::getTensors()
@@ -67,22 +67,22 @@ namespace platform {
buildTensors();
return { X, y };
} else {
throw invalid_argument("Dataset not loaded.");
throw std::invalid_argument("Dataset not loaded.");
}
}
void Dataset::load_csv()
{
ifstream file(path + "/" + name + ".csv");
if (file.is_open()) {
string line;
std::string line;
getline(file, line);
vector<string> tokens = split(line, ',');
features = vector<string>(tokens.begin(), tokens.end() - 1);
std::vector<std::string> tokens = split(line, ',');
features = std::vector<std::string>(tokens.begin(), tokens.end() - 1);
if (className == "-1") {
className = tokens.back();
}
for (auto i = 0; i < features.size(); ++i) {
Xv.push_back(vector<float>());
Xv.push_back(std::vector<float>());
}
while (getline(file, line)) {
tokens = split(line, ',');
@@ -93,17 +93,17 @@ namespace platform {
}
file.close();
} else {
throw invalid_argument("Unable to open dataset file.");
throw std::invalid_argument("Unable to open dataset file.");
}
}
void Dataset::computeStates()
{
for (int i = 0; i < features.size(); ++i) {
states[features[i]] = vector<int>(*max_element(Xd[i].begin(), Xd[i].end()) + 1);
states[features[i]] = std::vector<int>(*max_element(Xd[i].begin(), Xd[i].end()) + 1);
auto item = states.at(features[i]);
iota(begin(item), end(item), 0);
}
states[className] = vector<int>(*max_element(yv.begin(), yv.end()) + 1);
states[className] = std::vector<int>(*max_element(yv.begin(), yv.end()) + 1);
iota(begin(states.at(className)), end(states.at(className)), 0);
}
void Dataset::load_arff()
@@ -118,12 +118,12 @@ namespace platform {
auto attributes = arff.getAttributes();
transform(attributes.begin(), attributes.end(), back_inserter(features), [](const auto& attribute) { return attribute.first; });
}
vector<string> tokenize(string line)
std::vector<std::string> tokenize(std::string line)
{
vector<string> tokens;
std::vector<std::string> tokens;
for (auto i = 0; i < line.size(); ++i) {
if (line[i] == ' ' || line[i] == '\t' || line[i] == '\n') {
string token = line.substr(0, i);
std::string token = line.substr(0, i);
tokens.push_back(token);
line.erase(line.begin(), line.begin() + i + 1);
i = 0;
@@ -140,16 +140,16 @@ namespace platform {
{
ifstream file(path + "/" + name + "_R.dat");
if (file.is_open()) {
string line;
std::string line;
getline(file, line);
line = ArffFiles::trim(line);
vector<string> tokens = tokenize(line);
std::vector<std::string> tokens = tokenize(line);
transform(tokens.begin(), tokens.end() - 1, back_inserter(features), [](const auto& attribute) { return ArffFiles::trim(attribute); });
if (className == "-1") {
className = ArffFiles::trim(tokens.back());
}
for (auto i = 0; i < features.size(); ++i) {
Xv.push_back(vector<float>());
Xv.push_back(std::vector<float>());
}
while (getline(file, line)) {
tokens = tokenize(line);
@@ -162,7 +162,7 @@ namespace platform {
}
file.close();
} else {
throw invalid_argument("Unable to open dataset file.");
throw std::invalid_argument("Unable to open dataset file.");
}
}
void Dataset::load()
@@ -201,9 +201,9 @@ namespace platform {
}
y = torch::tensor(yv, torch::kInt32);
}
vector<mdlp::labels_t> Dataset::discretizeDataset(vector<mdlp::samples_t>& X, mdlp::labels_t& y)
std::vector<mdlp::labels_t> Dataset::discretizeDataset(std::vector<mdlp::samples_t>& X, mdlp::labels_t& y)
{
vector<mdlp::labels_t> Xd;
std::vector<mdlp::labels_t> Xd;
auto fimdlp = mdlp::CPPFImdlp();
for (int i = 0; i < X.size(); i++) {
fimdlp.fit(X[i], y);