Complete proposal with only discretizing numeric features
This commit is contained in:
@@ -37,6 +37,7 @@ namespace bayesnet {
|
||||
className = className_;
|
||||
states = iterativeLocalDiscretization(y, static_cast<KDB*>(this), dataset, features, className, states_, smoothing);
|
||||
KDB::fit(dataset, features, className, states, smoothing);
|
||||
fitted = true;
|
||||
return *this;
|
||||
}
|
||||
torch::Tensor KDBLd::predict(torch::Tensor& X)
|
||||
|
@@ -118,17 +118,20 @@ namespace bayesnet {
|
||||
}
|
||||
return states;
|
||||
}
|
||||
map<std::string, std::vector<int>> Proposal::fit_local_discretization(const torch::Tensor& y, map<std::string, std::vector<int>> states_)
|
||||
map<std::string, std::vector<int>> Proposal::fit_local_discretization(const torch::Tensor& y, map<std::string, std::vector<int>> states)
|
||||
{
|
||||
// Discretize the continuous input data and build pDataset (Classifier::dataset)
|
||||
// We expect to have in states for numeric features an empty vector and for discretized features a vector of states
|
||||
int m = Xf.size(1);
|
||||
int n = Xf.size(0);
|
||||
map<std::string, std::vector<int>> states;
|
||||
pDataset = torch::zeros({ n + 1, m }, torch::kInt32);
|
||||
auto yv = std::vector<int>(y.data_ptr<int>(), y.data_ptr<int>() + y.size(0));
|
||||
// discretize input data by feature(row)
|
||||
std::unique_ptr<mdlp::Discretizer> discretizer;
|
||||
wasNumeric.resize(pFeatures.size());
|
||||
for (auto i = 0; i < pFeatures.size(); ++i) {
|
||||
auto Xt_ptr = Xf.index({ i }).data_ptr<float>();
|
||||
auto Xt = std::vector<float>(Xt_ptr, Xt_ptr + Xf.size(1));
|
||||
if (discretizationType == discretization_t::BINQ) {
|
||||
discretizer = std::make_unique<mdlp::BinDisc>(ld_params.proposed_cuts, mdlp::strategy_t::QUANTILE);
|
||||
} else if (discretizationType == discretization_t::BINU) {
|
||||
@@ -136,13 +139,19 @@ namespace bayesnet {
|
||||
} else { // Default is MDLP
|
||||
discretizer = std::make_unique<mdlp::CPPFImdlp>(ld_params.min_length, ld_params.max_depth, ld_params.proposed_cuts);
|
||||
}
|
||||
auto Xt_ptr = Xf.index({ i }).data_ptr<float>();
|
||||
auto Xt = std::vector<float>(Xt_ptr, Xt_ptr + Xf.size(1));
|
||||
discretizer->fit(Xt, yv);
|
||||
pDataset.index_put_({ i, "..." }, torch::tensor(discretizer->transform(Xt)));
|
||||
auto xStates = std::vector<int>(discretizer->getCutPoints().size() + 1);
|
||||
if (states[pFeatures[i]].empty()) {
|
||||
// If the feature is numeric, we discretize it
|
||||
pDataset.index_put_({ i, "..." }, torch::tensor(discretizer->fit_transform(Xt, yv)));
|
||||
int n_states = discretizer->getCutPoints().size() + 1;
|
||||
auto xStates = std::vector<int>(n_states);
|
||||
iota(xStates.begin(), xStates.end(), 0);
|
||||
states[pFeatures[i]] = xStates;
|
||||
wasNumeric[i] = true;
|
||||
} else {
|
||||
wasNumeric[i] = false;
|
||||
// If the feature is categorical, we just copy it
|
||||
pDataset.index_put_({ i, "..." }, Xf[i].to(torch::kInt32));
|
||||
}
|
||||
discretizers[pFeatures[i]] = std::move(discretizer);
|
||||
}
|
||||
int n_classes = torch::max(y).item<int>() + 1;
|
||||
@@ -157,8 +166,13 @@ namespace bayesnet {
|
||||
auto Xtd = torch::zeros_like(X, torch::kInt32);
|
||||
for (int i = 0; i < X.size(0); ++i) {
|
||||
auto Xt = std::vector<float>(X[i].data_ptr<float>(), X[i].data_ptr<float>() + X.size(1));
|
||||
std::vector<int> Xd;
|
||||
if (wasNumeric[i]) {
|
||||
auto Xd = discretizers[pFeatures[i]]->transform(Xt);
|
||||
Xtd.index_put_({ i }, torch::tensor(Xd, torch::kInt32));
|
||||
} else {
|
||||
Xtd.index_put_({ i }, Xf[i].to(torch::kInt32));
|
||||
}
|
||||
}
|
||||
return Xtd;
|
||||
}
|
||||
|
@@ -61,6 +61,7 @@ namespace bayesnet {
|
||||
std::vector<std::string>& notes; // Notes during fit from BaseClassifier
|
||||
torch::Tensor& pDataset; // (n+1)xm tensor
|
||||
std::vector<std::string>& pFeatures;
|
||||
std::vector<bool> wasNumeric;
|
||||
std::string& pClassName;
|
||||
enum class discretization_t {
|
||||
MDLP,
|
||||
|
@@ -36,6 +36,7 @@ namespace bayesnet {
|
||||
className = className_;
|
||||
states = iterativeLocalDiscretization(y, static_cast<SPODE*>(this), dataset, features, className, states_, smoothing);
|
||||
SPODE::fit(dataset, features, className, states, smoothing);
|
||||
fitted = true;
|
||||
return *this;
|
||||
}
|
||||
torch::Tensor SPODELd::predict(torch::Tensor& X)
|
||||
|
@@ -35,6 +35,7 @@ namespace bayesnet {
|
||||
className = className_;
|
||||
states = iterativeLocalDiscretization(y, static_cast<TAN*>(this), dataset, features, className, states_, smoothing);
|
||||
TAN::fit(dataset, features, className, states, smoothing);
|
||||
fitted = true;
|
||||
return *this;
|
||||
}
|
||||
torch::Tensor TANLd::predict(torch::Tensor& X)
|
||||
|
@@ -24,6 +24,7 @@ namespace bayesnet {
|
||||
// 1st we need to fit the model to build the normal AODE structure, Ensemble::fit
|
||||
// calls buildModel to initialize the base models
|
||||
Ensemble::fit(dataset, features, className, states, smoothing);
|
||||
fitted = true;
|
||||
return *this;
|
||||
|
||||
}
|
||||
|
@@ -520,34 +520,35 @@ TEST_CASE("Test Dataset Loading", "[Datasets]")
|
||||
std::cout << "| " << dataset.yt[sample].item<int>() << std::endl;
|
||||
}
|
||||
// Test loading a dataset
|
||||
dataset = RawDatasets("adult", true);
|
||||
REQUIRE(dataset.Xt.size(0) == 14);
|
||||
REQUIRE(dataset.Xt.size(1) == 45222);
|
||||
REQUIRE(dataset.yt.size(0) == 45222);
|
||||
std::cout << "Dataset adult discretized " << std::endl;
|
||||
dataset = RawDatasets("heart-statlog", true);
|
||||
REQUIRE(dataset.Xt.size(0) == 13);
|
||||
REQUIRE(dataset.Xt.size(1) == 270);
|
||||
REQUIRE(dataset.yt.size(0) == 270);
|
||||
std::cout << "Dataset heart-statlog discretized " << std::endl;
|
||||
for (int sample = 0; sample < max_sample; sample++) {
|
||||
for (int feature = 0; feature < 14; feature++) {
|
||||
for (int feature = 0; feature < 13; feature++) {
|
||||
std::cout << dataset.Xt[feature][sample].item<int>() << " ";
|
||||
}
|
||||
std::cout << "| " << dataset.yt[sample].item<int>() << std::endl;
|
||||
}
|
||||
auto features = dataset.features;
|
||||
std::cout << "States:" << std::endl;
|
||||
for (int i = 0; i < 14; i++) {
|
||||
for (int i = 0; i < 13; i++) {
|
||||
std::cout << i << " has " << dataset.states.at(features[i]).size() << " states." << std::endl;
|
||||
}
|
||||
dataset = RawDatasets("adult", false);
|
||||
std::cout << "Dataset adult raw " << std::endl;
|
||||
dataset = RawDatasets("heart-statlog", false);
|
||||
std::cout << "Dataset heart-statlog raw " << std::endl;
|
||||
for (int sample = 0; sample < max_sample; sample++) {
|
||||
for (int feature = 0; feature < 14; feature++) {
|
||||
for (int feature = 0; feature < 13; feature++) {
|
||||
std::cout << dataset.Xt[feature][sample].item<float>() << " ";
|
||||
}
|
||||
std::cout << "| " << dataset.yt[sample].item<int>() << std::endl;
|
||||
}
|
||||
std::cout << "States:" << std::endl;
|
||||
for (int i = 0; i < 14; i++) {
|
||||
for (int i = 0; i < 13; i++) {
|
||||
std::cout << i << " has " << dataset.states.at(features[i]).size() << " states." << std::endl;
|
||||
}
|
||||
auto clf = bayesnet::TANLd();
|
||||
clf.fit(dataset.Xt, dataset.yt, dataset.features, dataset.className, dataset.states, dataset.smoothing);
|
||||
std::cout << "Score: " << clf.score(dataset.Xt, dataset.yt) << std::endl;
|
||||
}
|
||||
|
48861
tests/data/adult.arff
48861
tests/data/adult.arff
File diff suppressed because it is too large
Load Diff
338
tests/data/heart-statlog.arff
Executable file
338
tests/data/heart-statlog.arff
Executable file
@@ -0,0 +1,338 @@
|
||||
% This database contains 13 attributes (which have been extracted from
|
||||
% a larger set of 75)
|
||||
%
|
||||
%
|
||||
%
|
||||
% Attribute Information:
|
||||
% ------------------------
|
||||
% 0 -- 1. age
|
||||
% 1 -- 2. sex
|
||||
% 2 -- 3. chest pain type (4 values)
|
||||
% 3 -- 4. resting blood pressure
|
||||
% 4 -- 5. serum cholestoral in mg/dl
|
||||
% 5 -- 6. fasting blood sugar > 120 mg/dl
|
||||
% 6 -- 7. resting electrocardiographic results (values 0,1,2)
|
||||
% 7 -- 8. maximum heart rate achieved
|
||||
% 8 -- 9. exercise induced angina
|
||||
% 9 -- 10. oldpeak = ST depression induced by exercise relative to rest
|
||||
% 10 -- 11. the slope of the peak exercise ST segment
|
||||
% 11 -- 12. number of major vessels (0-3) colored by flourosopy
|
||||
% 12 -- 13. thal: 3 = normal; 6 = fixed defect; 7 = reversable defect
|
||||
%
|
||||
% Attributes types
|
||||
% -----------------
|
||||
%
|
||||
% Real: 1,4,5,8,10,12
|
||||
% Ordered:11,
|
||||
% Binary: 2,6,9
|
||||
% Nominal:7,3,13
|
||||
%
|
||||
% Variable to be predicted
|
||||
% ------------------------
|
||||
% Absence (1) or presence (2) of heart disease
|
||||
%
|
||||
% Cost Matrix
|
||||
%
|
||||
% abse pres
|
||||
% absence 0 1
|
||||
% presence 5 0
|
||||
%
|
||||
% where the rows represent the true values and the columns the predicted.
|
||||
%
|
||||
% No missing values.
|
||||
%
|
||||
% 270 observations
|
||||
%
|
||||
%
|
||||
%
|
||||
%
|
||||
% Relabeled values in attribute class
|
||||
% From: 1 To: absent
|
||||
% From: 2 To: present
|
||||
%
|
||||
@relation heart-statlog
|
||||
@attribute age real
|
||||
@attribute sex real
|
||||
@attribute chest real
|
||||
@attribute resting_blood_pressure real
|
||||
@attribute serum_cholestoral real
|
||||
@attribute fasting_blood_sugar real
|
||||
@attribute resting_electrocardiographic_results real
|
||||
@attribute maximum_heart_rate_achieved real
|
||||
@attribute exercise_induced_angina real
|
||||
@attribute oldpeak real
|
||||
@attribute slope real
|
||||
@attribute number_of_major_vessels real
|
||||
@attribute thal real
|
||||
@attribute class { absent, present}
|
||||
@data
|
||||
70,1,4,130,322,0,2,109,0,2.4,2,3,3,present
|
||||
67,0,3,115,564,0,2,160,0,1.6,2,0,7,absent
|
||||
57,1,2,124,261,0,0,141,0,0.3,1,0,7,present
|
||||
64,1,4,128,263,0,0,105,1,0.2,2,1,7,absent
|
||||
74,0,2,120,269,0,2,121,1,0.2,1,1,3,absent
|
||||
65,1,4,120,177,0,0,140,0,0.4,1,0,7,absent
|
||||
56,1,3,130,256,1,2,142,1,0.6,2,1,6,present
|
||||
59,1,4,110,239,0,2,142,1,1.2,2,1,7,present
|
||||
60,1,4,140,293,0,2,170,0,1.2,2,2,7,present
|
||||
63,0,4,150,407,0,2,154,0,4,2,3,7,present
|
||||
59,1,4,135,234,0,0,161,0,0.5,2,0,7,absent
|
||||
53,1,4,142,226,0,2,111,1,0,1,0,7,absent
|
||||
44,1,3,140,235,0,2,180,0,0,1,0,3,absent
|
||||
61,1,1,134,234,0,0,145,0,2.6,2,2,3,present
|
||||
57,0,4,128,303,0,2,159,0,0,1,1,3,absent
|
||||
71,0,4,112,149,0,0,125,0,1.6,2,0,3,absent
|
||||
46,1,4,140,311,0,0,120,1,1.8,2,2,7,present
|
||||
53,1,4,140,203,1,2,155,1,3.1,3,0,7,present
|
||||
64,1,1,110,211,0,2,144,1,1.8,2,0,3,absent
|
||||
40,1,1,140,199,0,0,178,1,1.4,1,0,7,absent
|
||||
67,1,4,120,229,0,2,129,1,2.6,2,2,7,present
|
||||
48,1,2,130,245,0,2,180,0,0.2,2,0,3,absent
|
||||
43,1,4,115,303,0,0,181,0,1.2,2,0,3,absent
|
||||
47,1,4,112,204,0,0,143,0,0.1,1,0,3,absent
|
||||
54,0,2,132,288,1,2,159,1,0,1,1,3,absent
|
||||
48,0,3,130,275,0,0,139,0,0.2,1,0,3,absent
|
||||
46,0,4,138,243,0,2,152,1,0,2,0,3,absent
|
||||
51,0,3,120,295,0,2,157,0,0.6,1,0,3,absent
|
||||
58,1,3,112,230,0,2,165,0,2.5,2,1,7,present
|
||||
71,0,3,110,265,1,2,130,0,0,1,1,3,absent
|
||||
57,1,3,128,229,0,2,150,0,0.4,2,1,7,present
|
||||
66,1,4,160,228,0,2,138,0,2.3,1,0,6,absent
|
||||
37,0,3,120,215,0,0,170,0,0,1,0,3,absent
|
||||
59,1,4,170,326,0,2,140,1,3.4,3,0,7,present
|
||||
50,1,4,144,200,0,2,126,1,0.9,2,0,7,present
|
||||
48,1,4,130,256,1,2,150,1,0,1,2,7,present
|
||||
61,1,4,140,207,0,2,138,1,1.9,1,1,7,present
|
||||
59,1,1,160,273,0,2,125,0,0,1,0,3,present
|
||||
42,1,3,130,180,0,0,150,0,0,1,0,3,absent
|
||||
48,1,4,122,222,0,2,186,0,0,1,0,3,absent
|
||||
40,1,4,152,223,0,0,181,0,0,1,0,7,present
|
||||
62,0,4,124,209,0,0,163,0,0,1,0,3,absent
|
||||
44,1,3,130,233,0,0,179,1,0.4,1,0,3,absent
|
||||
46,1,2,101,197,1,0,156,0,0,1,0,7,absent
|
||||
59,1,3,126,218,1,0,134,0,2.2,2,1,6,present
|
||||
58,1,3,140,211,1,2,165,0,0,1,0,3,absent
|
||||
49,1,3,118,149,0,2,126,0,0.8,1,3,3,present
|
||||
44,1,4,110,197,0,2,177,0,0,1,1,3,present
|
||||
66,1,2,160,246,0,0,120,1,0,2,3,6,present
|
||||
65,0,4,150,225,0,2,114,0,1,2,3,7,present
|
||||
42,1,4,136,315,0,0,125,1,1.8,2,0,6,present
|
||||
52,1,2,128,205,1,0,184,0,0,1,0,3,absent
|
||||
65,0,3,140,417,1,2,157,0,0.8,1,1,3,absent
|
||||
63,0,2,140,195,0,0,179,0,0,1,2,3,absent
|
||||
45,0,2,130,234,0,2,175,0,0.6,2,0,3,absent
|
||||
41,0,2,105,198,0,0,168,0,0,1,1,3,absent
|
||||
61,1,4,138,166,0,2,125,1,3.6,2,1,3,present
|
||||
60,0,3,120,178,1,0,96,0,0,1,0,3,absent
|
||||
59,0,4,174,249,0,0,143,1,0,2,0,3,present
|
||||
62,1,2,120,281,0,2,103,0,1.4,2,1,7,present
|
||||
57,1,3,150,126,1,0,173,0,0.2,1,1,7,absent
|
||||
51,0,4,130,305,0,0,142,1,1.2,2,0,7,present
|
||||
44,1,3,120,226,0,0,169,0,0,1,0,3,absent
|
||||
60,0,1,150,240,0,0,171,0,0.9,1,0,3,absent
|
||||
63,1,1,145,233,1,2,150,0,2.3,3,0,6,absent
|
||||
57,1,4,150,276,0,2,112,1,0.6,2,1,6,present
|
||||
51,1,4,140,261,0,2,186,1,0,1,0,3,absent
|
||||
58,0,2,136,319,1,2,152,0,0,1,2,3,present
|
||||
44,0,3,118,242,0,0,149,0,0.3,2,1,3,absent
|
||||
47,1,3,108,243,0,0,152,0,0,1,0,3,present
|
||||
61,1,4,120,260,0,0,140,1,3.6,2,1,7,present
|
||||
57,0,4,120,354,0,0,163,1,0.6,1,0,3,absent
|
||||
70,1,2,156,245,0,2,143,0,0,1,0,3,absent
|
||||
76,0,3,140,197,0,1,116,0,1.1,2,0,3,absent
|
||||
67,0,4,106,223,0,0,142,0,0.3,1,2,3,absent
|
||||
45,1,4,142,309,0,2,147,1,0,2,3,7,present
|
||||
45,1,4,104,208,0,2,148,1,3,2,0,3,absent
|
||||
39,0,3,94,199,0,0,179,0,0,1,0,3,absent
|
||||
42,0,3,120,209,0,0,173,0,0,2,0,3,absent
|
||||
56,1,2,120,236,0,0,178,0,0.8,1,0,3,absent
|
||||
58,1,4,146,218,0,0,105,0,2,2,1,7,present
|
||||
35,1,4,120,198,0,0,130,1,1.6,2,0,7,present
|
||||
58,1,4,150,270,0,2,111,1,0.8,1,0,7,present
|
||||
41,1,3,130,214,0,2,168,0,2,2,0,3,absent
|
||||
57,1,4,110,201,0,0,126,1,1.5,2,0,6,absent
|
||||
42,1,1,148,244,0,2,178,0,0.8,1,2,3,absent
|
||||
62,1,2,128,208,1,2,140,0,0,1,0,3,absent
|
||||
59,1,1,178,270,0,2,145,0,4.2,3,0,7,absent
|
||||
41,0,2,126,306,0,0,163,0,0,1,0,3,absent
|
||||
50,1,4,150,243,0,2,128,0,2.6,2,0,7,present
|
||||
59,1,2,140,221,0,0,164,1,0,1,0,3,absent
|
||||
61,0,4,130,330,0,2,169,0,0,1,0,3,present
|
||||
54,1,4,124,266,0,2,109,1,2.2,2,1,7,present
|
||||
54,1,4,110,206,0,2,108,1,0,2,1,3,present
|
||||
52,1,4,125,212,0,0,168,0,1,1,2,7,present
|
||||
47,1,4,110,275,0,2,118,1,1,2,1,3,present
|
||||
66,1,4,120,302,0,2,151,0,0.4,2,0,3,absent
|
||||
58,1,4,100,234,0,0,156,0,0.1,1,1,7,present
|
||||
64,0,3,140,313,0,0,133,0,0.2,1,0,7,absent
|
||||
50,0,2,120,244,0,0,162,0,1.1,1,0,3,absent
|
||||
44,0,3,108,141,0,0,175,0,0.6,2,0,3,absent
|
||||
67,1,4,120,237,0,0,71,0,1,2,0,3,present
|
||||
49,0,4,130,269,0,0,163,0,0,1,0,3,absent
|
||||
57,1,4,165,289,1,2,124,0,1,2,3,7,present
|
||||
63,1,4,130,254,0,2,147,0,1.4,2,1,7,present
|
||||
48,1,4,124,274,0,2,166,0,0.5,2,0,7,present
|
||||
51,1,3,100,222,0,0,143,1,1.2,2,0,3,absent
|
||||
60,0,4,150,258,0,2,157,0,2.6,2,2,7,present
|
||||
59,1,4,140,177,0,0,162,1,0,1,1,7,present
|
||||
45,0,2,112,160,0,0,138,0,0,2,0,3,absent
|
||||
55,0,4,180,327,0,1,117,1,3.4,2,0,3,present
|
||||
41,1,2,110,235,0,0,153,0,0,1,0,3,absent
|
||||
60,0,4,158,305,0,2,161,0,0,1,0,3,present
|
||||
54,0,3,135,304,1,0,170,0,0,1,0,3,absent
|
||||
42,1,2,120,295,0,0,162,0,0,1,0,3,absent
|
||||
49,0,2,134,271,0,0,162,0,0,2,0,3,absent
|
||||
46,1,4,120,249,0,2,144,0,0.8,1,0,7,present
|
||||
56,0,4,200,288,1,2,133,1,4,3,2,7,present
|
||||
66,0,1,150,226,0,0,114,0,2.6,3,0,3,absent
|
||||
56,1,4,130,283,1,2,103,1,1.6,3,0,7,present
|
||||
49,1,3,120,188,0,0,139,0,2,2,3,7,present
|
||||
54,1,4,122,286,0,2,116,1,3.2,2,2,3,present
|
||||
57,1,4,152,274,0,0,88,1,1.2,2,1,7,present
|
||||
65,0,3,160,360,0,2,151,0,0.8,1,0,3,absent
|
||||
54,1,3,125,273,0,2,152,0,0.5,3,1,3,absent
|
||||
54,0,3,160,201,0,0,163,0,0,1,1,3,absent
|
||||
62,1,4,120,267,0,0,99,1,1.8,2,2,7,present
|
||||
52,0,3,136,196,0,2,169,0,0.1,2,0,3,absent
|
||||
52,1,2,134,201,0,0,158,0,0.8,1,1,3,absent
|
||||
60,1,4,117,230,1,0,160,1,1.4,1,2,7,present
|
||||
63,0,4,108,269,0,0,169,1,1.8,2,2,3,present
|
||||
66,1,4,112,212,0,2,132,1,0.1,1,1,3,present
|
||||
42,1,4,140,226,0,0,178,0,0,1,0,3,absent
|
||||
64,1,4,120,246,0,2,96,1,2.2,3,1,3,present
|
||||
54,1,3,150,232,0,2,165,0,1.6,1,0,7,absent
|
||||
46,0,3,142,177,0,2,160,1,1.4,3,0,3,absent
|
||||
67,0,3,152,277,0,0,172,0,0,1,1,3,absent
|
||||
56,1,4,125,249,1,2,144,1,1.2,2,1,3,present
|
||||
34,0,2,118,210,0,0,192,0,0.7,1,0,3,absent
|
||||
57,1,4,132,207,0,0,168,1,0,1,0,7,absent
|
||||
64,1,4,145,212,0,2,132,0,2,2,2,6,present
|
||||
59,1,4,138,271,0,2,182,0,0,1,0,3,absent
|
||||
50,1,3,140,233,0,0,163,0,0.6,2,1,7,present
|
||||
51,1,1,125,213,0,2,125,1,1.4,1,1,3,absent
|
||||
54,1,2,192,283,0,2,195,0,0,1,1,7,present
|
||||
53,1,4,123,282,0,0,95,1,2,2,2,7,present
|
||||
52,1,4,112,230,0,0,160,0,0,1,1,3,present
|
||||
40,1,4,110,167,0,2,114,1,2,2,0,7,present
|
||||
58,1,3,132,224,0,2,173,0,3.2,1,2,7,present
|
||||
41,0,3,112,268,0,2,172,1,0,1,0,3,absent
|
||||
41,1,3,112,250,0,0,179,0,0,1,0,3,absent
|
||||
50,0,3,120,219,0,0,158,0,1.6,2,0,3,absent
|
||||
54,0,3,108,267,0,2,167,0,0,1,0,3,absent
|
||||
64,0,4,130,303,0,0,122,0,2,2,2,3,absent
|
||||
51,0,3,130,256,0,2,149,0,0.5,1,0,3,absent
|
||||
46,0,2,105,204,0,0,172,0,0,1,0,3,absent
|
||||
55,1,4,140,217,0,0,111,1,5.6,3,0,7,present
|
||||
45,1,2,128,308,0,2,170,0,0,1,0,3,absent
|
||||
56,1,1,120,193,0,2,162,0,1.9,2,0,7,absent
|
||||
66,0,4,178,228,1,0,165,1,1,2,2,7,present
|
||||
38,1,1,120,231,0,0,182,1,3.8,2,0,7,present
|
||||
62,0,4,150,244,0,0,154,1,1.4,2,0,3,present
|
||||
55,1,2,130,262,0,0,155,0,0,1,0,3,absent
|
||||
58,1,4,128,259,0,2,130,1,3,2,2,7,present
|
||||
43,1,4,110,211,0,0,161,0,0,1,0,7,absent
|
||||
64,0,4,180,325,0,0,154,1,0,1,0,3,absent
|
||||
50,0,4,110,254,0,2,159,0,0,1,0,3,absent
|
||||
53,1,3,130,197,1,2,152,0,1.2,3,0,3,absent
|
||||
45,0,4,138,236,0,2,152,1,0.2,2,0,3,absent
|
||||
65,1,1,138,282,1,2,174,0,1.4,2,1,3,present
|
||||
69,1,1,160,234,1,2,131,0,0.1,2,1,3,absent
|
||||
69,1,3,140,254,0,2,146,0,2,2,3,7,present
|
||||
67,1,4,100,299,0,2,125,1,0.9,2,2,3,present
|
||||
68,0,3,120,211,0,2,115,0,1.5,2,0,3,absent
|
||||
34,1,1,118,182,0,2,174,0,0,1,0,3,absent
|
||||
62,0,4,138,294,1,0,106,0,1.9,2,3,3,present
|
||||
51,1,4,140,298,0,0,122,1,4.2,2,3,7,present
|
||||
46,1,3,150,231,0,0,147,0,3.6,2,0,3,present
|
||||
67,1,4,125,254,1,0,163,0,0.2,2,2,7,present
|
||||
50,1,3,129,196,0,0,163,0,0,1,0,3,absent
|
||||
42,1,3,120,240,1,0,194,0,0.8,3,0,7,absent
|
||||
56,0,4,134,409,0,2,150,1,1.9,2,2,7,present
|
||||
41,1,4,110,172,0,2,158,0,0,1,0,7,present
|
||||
42,0,4,102,265,0,2,122,0,0.6,2,0,3,absent
|
||||
53,1,3,130,246,1,2,173,0,0,1,3,3,absent
|
||||
43,1,3,130,315,0,0,162,0,1.9,1,1,3,absent
|
||||
56,1,4,132,184,0,2,105,1,2.1,2,1,6,present
|
||||
52,1,4,108,233,1,0,147,0,0.1,1,3,7,absent
|
||||
62,0,4,140,394,0,2,157,0,1.2,2,0,3,absent
|
||||
70,1,3,160,269,0,0,112,1,2.9,2,1,7,present
|
||||
54,1,4,140,239,0,0,160,0,1.2,1,0,3,absent
|
||||
70,1,4,145,174,0,0,125,1,2.6,3,0,7,present
|
||||
54,1,2,108,309,0,0,156,0,0,1,0,7,absent
|
||||
35,1,4,126,282,0,2,156,1,0,1,0,7,present
|
||||
48,1,3,124,255,1,0,175,0,0,1,2,3,absent
|
||||
55,0,2,135,250,0,2,161,0,1.4,2,0,3,absent
|
||||
58,0,4,100,248,0,2,122,0,1,2,0,3,absent
|
||||
54,0,3,110,214,0,0,158,0,1.6,2,0,3,absent
|
||||
69,0,1,140,239,0,0,151,0,1.8,1,2,3,absent
|
||||
77,1,4,125,304,0,2,162,1,0,1,3,3,present
|
||||
68,1,3,118,277,0,0,151,0,1,1,1,7,absent
|
||||
58,1,4,125,300,0,2,171,0,0,1,2,7,present
|
||||
60,1,4,125,258,0,2,141,1,2.8,2,1,7,present
|
||||
51,1,4,140,299,0,0,173,1,1.6,1,0,7,present
|
||||
55,1,4,160,289,0,2,145,1,0.8,2,1,7,present
|
||||
52,1,1,152,298,1,0,178,0,1.2,2,0,7,absent
|
||||
60,0,3,102,318,0,0,160,0,0,1,1,3,absent
|
||||
58,1,3,105,240,0,2,154,1,0.6,2,0,7,absent
|
||||
64,1,3,125,309,0,0,131,1,1.8,2,0,7,present
|
||||
37,1,3,130,250,0,0,187,0,3.5,3,0,3,absent
|
||||
59,1,1,170,288,0,2,159,0,0.2,2,0,7,present
|
||||
51,1,3,125,245,1,2,166,0,2.4,2,0,3,absent
|
||||
43,0,3,122,213,0,0,165,0,0.2,2,0,3,absent
|
||||
58,1,4,128,216,0,2,131,1,2.2,2,3,7,present
|
||||
29,1,2,130,204,0,2,202,0,0,1,0,3,absent
|
||||
41,0,2,130,204,0,2,172,0,1.4,1,0,3,absent
|
||||
63,0,3,135,252,0,2,172,0,0,1,0,3,absent
|
||||
51,1,3,94,227,0,0,154,1,0,1,1,7,absent
|
||||
54,1,3,120,258,0,2,147,0,0.4,2,0,7,absent
|
||||
44,1,2,120,220,0,0,170,0,0,1,0,3,absent
|
||||
54,1,4,110,239,0,0,126,1,2.8,2,1,7,present
|
||||
65,1,4,135,254,0,2,127,0,2.8,2,1,7,present
|
||||
57,1,3,150,168,0,0,174,0,1.6,1,0,3,absent
|
||||
63,1,4,130,330,1,2,132,1,1.8,1,3,7,present
|
||||
35,0,4,138,183,0,0,182,0,1.4,1,0,3,absent
|
||||
41,1,2,135,203,0,0,132,0,0,2,0,6,absent
|
||||
62,0,3,130,263,0,0,97,0,1.2,2,1,7,present
|
||||
43,0,4,132,341,1,2,136,1,3,2,0,7,present
|
||||
58,0,1,150,283,1,2,162,0,1,1,0,3,absent
|
||||
52,1,1,118,186,0,2,190,0,0,2,0,6,absent
|
||||
61,0,4,145,307,0,2,146,1,1,2,0,7,present
|
||||
39,1,4,118,219,0,0,140,0,1.2,2,0,7,present
|
||||
45,1,4,115,260,0,2,185,0,0,1,0,3,absent
|
||||
52,1,4,128,255,0,0,161,1,0,1,1,7,present
|
||||
62,1,3,130,231,0,0,146,0,1.8,2,3,7,absent
|
||||
62,0,4,160,164,0,2,145,0,6.2,3,3,7,present
|
||||
53,0,4,138,234,0,2,160,0,0,1,0,3,absent
|
||||
43,1,4,120,177,0,2,120,1,2.5,2,0,7,present
|
||||
47,1,3,138,257,0,2,156,0,0,1,0,3,absent
|
||||
52,1,2,120,325,0,0,172,0,0.2,1,0,3,absent
|
||||
68,1,3,180,274,1,2,150,1,1.6,2,0,7,present
|
||||
39,1,3,140,321,0,2,182,0,0,1,0,3,absent
|
||||
53,0,4,130,264,0,2,143,0,0.4,2,0,3,absent
|
||||
62,0,4,140,268,0,2,160,0,3.6,3,2,3,present
|
||||
51,0,3,140,308,0,2,142,0,1.5,1,1,3,absent
|
||||
60,1,4,130,253,0,0,144,1,1.4,1,1,7,present
|
||||
65,1,4,110,248,0,2,158,0,0.6,1,2,6,present
|
||||
65,0,3,155,269,0,0,148,0,0.8,1,0,3,absent
|
||||
60,1,3,140,185,0,2,155,0,3,2,0,3,present
|
||||
60,1,4,145,282,0,2,142,1,2.8,2,2,7,present
|
||||
54,1,4,120,188,0,0,113,0,1.4,2,1,7,present
|
||||
44,1,2,130,219,0,2,188,0,0,1,0,3,absent
|
||||
44,1,4,112,290,0,2,153,0,0,1,1,3,present
|
||||
51,1,3,110,175,0,0,123,0,0.6,1,0,3,absent
|
||||
59,1,3,150,212,1,0,157,0,1.6,1,0,3,absent
|
||||
71,0,2,160,302,0,0,162,0,0.4,1,2,3,absent
|
||||
61,1,3,150,243,1,0,137,1,1,2,0,3,absent
|
||||
55,1,4,132,353,0,0,132,1,1.2,2,1,7,present
|
||||
64,1,3,140,335,0,0,158,0,0,1,0,3,present
|
||||
43,1,4,150,247,0,0,171,0,1.5,1,0,3,absent
|
||||
58,0,3,120,340,0,0,172,0,0,1,0,3,absent
|
||||
60,1,4,130,206,0,2,132,1,2.4,2,2,7,present
|
||||
58,1,2,120,284,0,2,160,0,1.8,2,0,3,present
|
||||
49,1,2,130,266,0,0,171,0,0.6,1,0,3,absent
|
||||
48,1,2,110,229,0,0,168,0,1,3,0,7,present
|
||||
52,1,3,172,199,1,0,162,0,0.5,1,0,7,absent
|
||||
44,1,2,120,263,0,0,173,0,0,1,0,7,absent
|
||||
56,0,2,140,294,0,2,153,0,1.3,2,0,3,absent
|
||||
57,1,4,140,192,0,0,148,0,0.4,2,0,6,absent
|
||||
67,1,4,160,286,0,2,108,1,1.5,2,3,3,present
|
Reference in New Issue
Block a user