Implement predict and predict_proba

Add samples and add parameters to main
This commit is contained in:
Ricardo Montañana Gómez 2023-07-01 14:45:44 +02:00
parent 79e7912ab3
commit 23f0b0f55c
Signed by: rmontanana
GPG Key ID: 46064262FD9A7ADE
13 changed files with 34538 additions and 43 deletions

5
.vscode/launch.json vendored
View File

@ -6,7 +6,10 @@
"request": "launch",
"name": "bayesnet",
"program": "${workspaceFolder}/build/sample/main",
"args": [],
"args": [
"-f",
"iris"
],
"cwd": "${workspaceFolder}",
"preLaunchTask": "CMake: build"
},

863
data/diabetes.arff Executable file
View File

@ -0,0 +1,863 @@
% 1. Title: Pima Indians Diabetes Database
%
% 2. Sources:
% (a) Original owners: National Institute of Diabetes and Digestive and
% Kidney Diseases
% (b) Donor of database: Vincent Sigillito (vgs@aplcen.apl.jhu.edu)
% Research Center, RMI Group Leader
% Applied Physics Laboratory
% The Johns Hopkins University
% Johns Hopkins Road
% Laurel, MD 20707
% (301) 953-6231
% (c) Date received: 9 May 1990
%
% 3. Past Usage:
% 1. Smith,~J.~W., Everhart,~J.~E., Dickson,~W.~C., Knowler,~W.~C., \&
% Johannes,~R.~S. (1988). Using the ADAP learning algorithm to forecast
% the onset of diabetes mellitus. In {\it Proceedings of the Symposium
% on Computer Applications and Medical Care} (pp. 261--265). IEEE
% Computer Society Press.
%
% The diagnostic, binary-valued variable investigated is whether the
% patient shows signs of diabetes according to World Health Organization
% criteria (i.e., if the 2 hour post-load plasma glucose was at least
% 200 mg/dl at any survey examination or if found during routine medical
% care). The population lives near Phoenix, Arizona, USA.
%
% Results: Their ADAP algorithm makes a real-valued prediction between
% 0 and 1. This was transformed into a binary decision using a cutoff of
% 0.448. Using 576 training instances, the sensitivity and specificity
% of their algorithm was 76% on the remaining 192 instances.
%
% 4. Relevant Information:
% Several constraints were placed on the selection of these instances from
% a larger database. In particular, all patients here are females at
% least 21 years old of Pima Indian heritage. ADAP is an adaptive learning
% routine that generates and executes digital analogs of perceptron-like
% devices. It is a unique algorithm; see the paper for details.
%
% 5. Number of Instances: 768
%
% 6. Number of Attributes: 8 plus class
%
% 7. For Each Attribute: (all numeric-valued)
% 1. Number of times pregnant
% 2. Plasma glucose concentration a 2 hours in an oral glucose tolerance test
% 3. Diastolic blood pressure (mm Hg)
% 4. Triceps skin fold thickness (mm)
% 5. 2-Hour serum insulin (mu U/ml)
% 6. Body mass index (weight in kg/(height in m)^2)
% 7. Diabetes pedigree function
% 8. Age (years)
% 9. Class variable (0 or 1)
%
% 8. Missing Attribute Values: None
%
% 9. Class Distribution: (class value 1 is interpreted as "tested positive for
% diabetes")
%
% Class Value Number of instances
% 0 500
% 1 268
%
% 10. Brief statistical analysis:
%
% Attribute number: Mean: Standard Deviation:
% 1. 3.8 3.4
% 2. 120.9 32.0
% 3. 69.1 19.4
% 4. 20.5 16.0
% 5. 79.8 115.2
% 6. 32.0 7.9
% 7. 0.5 0.3
% 8. 33.2 11.8
%
%
%
%
%
%
% Relabeled values in attribute 'class'
% From: 0 To: tested_negative
% From: 1 To: tested_positive
%
@relation pima_diabetes
@attribute 'preg' real
@attribute 'plas' real
@attribute 'pres' real
@attribute 'skin' real
@attribute 'insu' real
@attribute 'mass' real
@attribute 'pedi' real
@attribute 'age' real
@attribute 'class' { tested_negative, tested_positive}
@data
6,148,72,35,0,33.6,0.627,50,tested_positive
1,85,66,29,0,26.6,0.351,31,tested_negative
8,183,64,0,0,23.3,0.672,32,tested_positive
1,89,66,23,94,28.1,0.167,21,tested_negative
0,137,40,35,168,43.1,2.288,33,tested_positive
5,116,74,0,0,25.6,0.201,30,tested_negative
3,78,50,32,88,31,0.248,26,tested_positive
10,115,0,0,0,35.3,0.134,29,tested_negative
2,197,70,45,543,30.5,0.158,53,tested_positive
8,125,96,0,0,0,0.232,54,tested_positive
4,110,92,0,0,37.6,0.191,30,tested_negative
10,168,74,0,0,38,0.537,34,tested_positive
10,139,80,0,0,27.1,1.441,57,tested_negative
1,189,60,23,846,30.1,0.398,59,tested_positive
5,166,72,19,175,25.8,0.587,51,tested_positive
7,100,0,0,0,30,0.484,32,tested_positive
0,118,84,47,230,45.8,0.551,31,tested_positive
7,107,74,0,0,29.6,0.254,31,tested_positive
1,103,30,38,83,43.3,0.183,33,tested_negative
1,115,70,30,96,34.6,0.529,32,tested_positive
3,126,88,41,235,39.3,0.704,27,tested_negative
8,99,84,0,0,35.4,0.388,50,tested_negative
7,196,90,0,0,39.8,0.451,41,tested_positive
9,119,80,35,0,29,0.263,29,tested_positive
11,143,94,33,146,36.6,0.254,51,tested_positive
10,125,70,26,115,31.1,0.205,41,tested_positive
7,147,76,0,0,39.4,0.257,43,tested_positive
1,97,66,15,140,23.2,0.487,22,tested_negative
13,145,82,19,110,22.2,0.245,57,tested_negative
5,117,92,0,0,34.1,0.337,38,tested_negative
5,109,75,26,0,36,0.546,60,tested_negative
3,158,76,36,245,31.6,0.851,28,tested_positive
3,88,58,11,54,24.8,0.267,22,tested_negative
6,92,92,0,0,19.9,0.188,28,tested_negative
10,122,78,31,0,27.6,0.512,45,tested_negative
4,103,60,33,192,24,0.966,33,tested_negative
11,138,76,0,0,33.2,0.42,35,tested_negative
9,102,76,37,0,32.9,0.665,46,tested_positive
2,90,68,42,0,38.2,0.503,27,tested_positive
4,111,72,47,207,37.1,1.39,56,tested_positive
3,180,64,25,70,34,0.271,26,tested_negative
7,133,84,0,0,40.2,0.696,37,tested_negative
7,106,92,18,0,22.7,0.235,48,tested_negative
9,171,110,24,240,45.4,0.721,54,tested_positive
7,159,64,0,0,27.4,0.294,40,tested_negative
0,180,66,39,0,42,1.893,25,tested_positive
1,146,56,0,0,29.7,0.564,29,tested_negative
2,71,70,27,0,28,0.586,22,tested_negative
7,103,66,32,0,39.1,0.344,31,tested_positive
7,105,0,0,0,0,0.305,24,tested_negative
1,103,80,11,82,19.4,0.491,22,tested_negative
1,101,50,15,36,24.2,0.526,26,tested_negative
5,88,66,21,23,24.4,0.342,30,tested_negative
8,176,90,34,300,33.7,0.467,58,tested_positive
7,150,66,42,342,34.7,0.718,42,tested_negative
1,73,50,10,0,23,0.248,21,tested_negative
7,187,68,39,304,37.7,0.254,41,tested_positive
0,100,88,60,110,46.8,0.962,31,tested_negative
0,146,82,0,0,40.5,1.781,44,tested_negative
0,105,64,41,142,41.5,0.173,22,tested_negative
2,84,0,0,0,0,0.304,21,tested_negative
8,133,72,0,0,32.9,0.27,39,tested_positive
5,44,62,0,0,25,0.587,36,tested_negative
2,141,58,34,128,25.4,0.699,24,tested_negative
7,114,66,0,0,32.8,0.258,42,tested_positive
5,99,74,27,0,29,0.203,32,tested_negative
0,109,88,30,0,32.5,0.855,38,tested_positive
2,109,92,0,0,42.7,0.845,54,tested_negative
1,95,66,13,38,19.6,0.334,25,tested_negative
4,146,85,27,100,28.9,0.189,27,tested_negative
2,100,66,20,90,32.9,0.867,28,tested_positive
5,139,64,35,140,28.6,0.411,26,tested_negative
13,126,90,0,0,43.4,0.583,42,tested_positive
4,129,86,20,270,35.1,0.231,23,tested_negative
1,79,75,30,0,32,0.396,22,tested_negative
1,0,48,20,0,24.7,0.14,22,tested_negative
7,62,78,0,0,32.6,0.391,41,tested_negative
5,95,72,33,0,37.7,0.37,27,tested_negative
0,131,0,0,0,43.2,0.27,26,tested_positive
2,112,66,22,0,25,0.307,24,tested_negative
3,113,44,13,0,22.4,0.14,22,tested_negative
2,74,0,0,0,0,0.102,22,tested_negative
7,83,78,26,71,29.3,0.767,36,tested_negative
0,101,65,28,0,24.6,0.237,22,tested_negative
5,137,108,0,0,48.8,0.227,37,tested_positive
2,110,74,29,125,32.4,0.698,27,tested_negative
13,106,72,54,0,36.6,0.178,45,tested_negative
2,100,68,25,71,38.5,0.324,26,tested_negative
15,136,70,32,110,37.1,0.153,43,tested_positive
1,107,68,19,0,26.5,0.165,24,tested_negative
1,80,55,0,0,19.1,0.258,21,tested_negative
4,123,80,15,176,32,0.443,34,tested_negative
7,81,78,40,48,46.7,0.261,42,tested_negative
4,134,72,0,0,23.8,0.277,60,tested_positive
2,142,82,18,64,24.7,0.761,21,tested_negative
6,144,72,27,228,33.9,0.255,40,tested_negative
2,92,62,28,0,31.6,0.13,24,tested_negative
1,71,48,18,76,20.4,0.323,22,tested_negative
6,93,50,30,64,28.7,0.356,23,tested_negative
1,122,90,51,220,49.7,0.325,31,tested_positive
1,163,72,0,0,39,1.222,33,tested_positive
1,151,60,0,0,26.1,0.179,22,tested_negative
0,125,96,0,0,22.5,0.262,21,tested_negative
1,81,72,18,40,26.6,0.283,24,tested_negative
2,85,65,0,0,39.6,0.93,27,tested_negative
1,126,56,29,152,28.7,0.801,21,tested_negative
1,96,122,0,0,22.4,0.207,27,tested_negative
4,144,58,28,140,29.5,0.287,37,tested_negative
3,83,58,31,18,34.3,0.336,25,tested_negative
0,95,85,25,36,37.4,0.247,24,tested_positive
3,171,72,33,135,33.3,0.199,24,tested_positive
8,155,62,26,495,34,0.543,46,tested_positive
1,89,76,34,37,31.2,0.192,23,tested_negative
4,76,62,0,0,34,0.391,25,tested_negative
7,160,54,32,175,30.5,0.588,39,tested_positive
4,146,92,0,0,31.2,0.539,61,tested_positive
5,124,74,0,0,34,0.22,38,tested_positive
5,78,48,0,0,33.7,0.654,25,tested_negative
4,97,60,23,0,28.2,0.443,22,tested_negative
4,99,76,15,51,23.2,0.223,21,tested_negative
0,162,76,56,100,53.2,0.759,25,tested_positive
6,111,64,39,0,34.2,0.26,24,tested_negative
2,107,74,30,100,33.6,0.404,23,tested_negative
5,132,80,0,0,26.8,0.186,69,tested_negative
0,113,76,0,0,33.3,0.278,23,tested_positive
1,88,30,42,99,55,0.496,26,tested_positive
3,120,70,30,135,42.9,0.452,30,tested_negative
1,118,58,36,94,33.3,0.261,23,tested_negative
1,117,88,24,145,34.5,0.403,40,tested_positive
0,105,84,0,0,27.9,0.741,62,tested_positive
4,173,70,14,168,29.7,0.361,33,tested_positive
9,122,56,0,0,33.3,1.114,33,tested_positive
3,170,64,37,225,34.5,0.356,30,tested_positive
8,84,74,31,0,38.3,0.457,39,tested_negative
2,96,68,13,49,21.1,0.647,26,tested_negative
2,125,60,20,140,33.8,0.088,31,tested_negative
0,100,70,26,50,30.8,0.597,21,tested_negative
0,93,60,25,92,28.7,0.532,22,tested_negative
0,129,80,0,0,31.2,0.703,29,tested_negative
5,105,72,29,325,36.9,0.159,28,tested_negative
3,128,78,0,0,21.1,0.268,55,tested_negative
5,106,82,30,0,39.5,0.286,38,tested_negative
2,108,52,26,63,32.5,0.318,22,tested_negative
10,108,66,0,0,32.4,0.272,42,tested_positive
4,154,62,31,284,32.8,0.237,23,tested_negative
0,102,75,23,0,0,0.572,21,tested_negative
9,57,80,37,0,32.8,0.096,41,tested_negative
2,106,64,35,119,30.5,1.4,34,tested_negative
5,147,78,0,0,33.7,0.218,65,tested_negative
2,90,70,17,0,27.3,0.085,22,tested_negative
1,136,74,50,204,37.4,0.399,24,tested_negative
4,114,65,0,0,21.9,0.432,37,tested_negative
9,156,86,28,155,34.3,1.189,42,tested_positive
1,153,82,42,485,40.6,0.687,23,tested_negative
8,188,78,0,0,47.9,0.137,43,tested_positive
7,152,88,44,0,50,0.337,36,tested_positive
2,99,52,15,94,24.6,0.637,21,tested_negative
1,109,56,21,135,25.2,0.833,23,tested_negative
2,88,74,19,53,29,0.229,22,tested_negative
17,163,72,41,114,40.9,0.817,47,tested_positive
4,151,90,38,0,29.7,0.294,36,tested_negative
7,102,74,40,105,37.2,0.204,45,tested_negative
0,114,80,34,285,44.2,0.167,27,tested_negative
2,100,64,23,0,29.7,0.368,21,tested_negative
0,131,88,0,0,31.6,0.743,32,tested_positive
6,104,74,18,156,29.9,0.722,41,tested_positive
3,148,66,25,0,32.5,0.256,22,tested_negative
4,120,68,0,0,29.6,0.709,34,tested_negative
4,110,66,0,0,31.9,0.471,29,tested_negative
3,111,90,12,78,28.4,0.495,29,tested_negative
6,102,82,0,0,30.8,0.18,36,tested_positive
6,134,70,23,130,35.4,0.542,29,tested_positive
2,87,0,23,0,28.9,0.773,25,tested_negative
1,79,60,42,48,43.5,0.678,23,tested_negative
2,75,64,24,55,29.7,0.37,33,tested_negative
8,179,72,42,130,32.7,0.719,36,tested_positive
6,85,78,0,0,31.2,0.382,42,tested_negative
0,129,110,46,130,67.1,0.319,26,tested_positive
5,143,78,0,0,45,0.19,47,tested_negative
5,130,82,0,0,39.1,0.956,37,tested_positive
6,87,80,0,0,23.2,0.084,32,tested_negative
0,119,64,18,92,34.9,0.725,23,tested_negative
1,0,74,20,23,27.7,0.299,21,tested_negative
5,73,60,0,0,26.8,0.268,27,tested_negative
4,141,74,0,0,27.6,0.244,40,tested_negative
7,194,68,28,0,35.9,0.745,41,tested_positive
8,181,68,36,495,30.1,0.615,60,tested_positive
1,128,98,41,58,32,1.321,33,tested_positive
8,109,76,39,114,27.9,0.64,31,tested_positive
5,139,80,35,160,31.6,0.361,25,tested_positive
3,111,62,0,0,22.6,0.142,21,tested_negative
9,123,70,44,94,33.1,0.374,40,tested_negative
7,159,66,0,0,30.4,0.383,36,tested_positive
11,135,0,0,0,52.3,0.578,40,tested_positive
8,85,55,20,0,24.4,0.136,42,tested_negative
5,158,84,41,210,39.4,0.395,29,tested_positive
1,105,58,0,0,24.3,0.187,21,tested_negative
3,107,62,13,48,22.9,0.678,23,tested_positive
4,109,64,44,99,34.8,0.905,26,tested_positive
4,148,60,27,318,30.9,0.15,29,tested_positive
0,113,80,16,0,31,0.874,21,tested_negative
1,138,82,0,0,40.1,0.236,28,tested_negative
0,108,68,20,0,27.3,0.787,32,tested_negative
2,99,70,16,44,20.4,0.235,27,tested_negative
6,103,72,32,190,37.7,0.324,55,tested_negative
5,111,72,28,0,23.9,0.407,27,tested_negative
8,196,76,29,280,37.5,0.605,57,tested_positive
5,162,104,0,0,37.7,0.151,52,tested_positive
1,96,64,27,87,33.2,0.289,21,tested_negative
7,184,84,33,0,35.5,0.355,41,tested_positive
2,81,60,22,0,27.7,0.29,25,tested_negative
0,147,85,54,0,42.8,0.375,24,tested_negative
7,179,95,31,0,34.2,0.164,60,tested_negative
0,140,65,26,130,42.6,0.431,24,tested_positive
9,112,82,32,175,34.2,0.26,36,tested_positive
12,151,70,40,271,41.8,0.742,38,tested_positive
5,109,62,41,129,35.8,0.514,25,tested_positive
6,125,68,30,120,30,0.464,32,tested_negative
5,85,74,22,0,29,1.224,32,tested_positive
5,112,66,0,0,37.8,0.261,41,tested_positive
0,177,60,29,478,34.6,1.072,21,tested_positive
2,158,90,0,0,31.6,0.805,66,tested_positive
7,119,0,0,0,25.2,0.209,37,tested_negative
7,142,60,33,190,28.8,0.687,61,tested_negative
1,100,66,15,56,23.6,0.666,26,tested_negative
1,87,78,27,32,34.6,0.101,22,tested_negative
0,101,76,0,0,35.7,0.198,26,tested_negative
3,162,52,38,0,37.2,0.652,24,tested_positive
4,197,70,39,744,36.7,2.329,31,tested_negative
0,117,80,31,53,45.2,0.089,24,tested_negative
4,142,86,0,0,44,0.645,22,tested_positive
6,134,80,37,370,46.2,0.238,46,tested_positive
1,79,80,25,37,25.4,0.583,22,tested_negative
4,122,68,0,0,35,0.394,29,tested_negative
3,74,68,28,45,29.7,0.293,23,tested_negative
4,171,72,0,0,43.6,0.479,26,tested_positive
7,181,84,21,192,35.9,0.586,51,tested_positive
0,179,90,27,0,44.1,0.686,23,tested_positive
9,164,84,21,0,30.8,0.831,32,tested_positive
0,104,76,0,0,18.4,0.582,27,tested_negative
1,91,64,24,0,29.2,0.192,21,tested_negative
4,91,70,32,88,33.1,0.446,22,tested_negative
3,139,54,0,0,25.6,0.402,22,tested_positive
6,119,50,22,176,27.1,1.318,33,tested_positive
2,146,76,35,194,38.2,0.329,29,tested_negative
9,184,85,15,0,30,1.213,49,tested_positive
10,122,68,0,0,31.2,0.258,41,tested_negative
0,165,90,33,680,52.3,0.427,23,tested_negative
9,124,70,33,402,35.4,0.282,34,tested_negative
1,111,86,19,0,30.1,0.143,23,tested_negative
9,106,52,0,0,31.2,0.38,42,tested_negative
2,129,84,0,0,28,0.284,27,tested_negative
2,90,80,14,55,24.4,0.249,24,tested_negative
0,86,68,32,0,35.8,0.238,25,tested_negative
12,92,62,7,258,27.6,0.926,44,tested_positive
1,113,64,35,0,33.6,0.543,21,tested_positive
3,111,56,39,0,30.1,0.557,30,tested_negative
2,114,68,22,0,28.7,0.092,25,tested_negative
1,193,50,16,375,25.9,0.655,24,tested_negative
11,155,76,28,150,33.3,1.353,51,tested_positive
3,191,68,15,130,30.9,0.299,34,tested_negative
3,141,0,0,0,30,0.761,27,tested_positive
4,95,70,32,0,32.1,0.612,24,tested_negative
3,142,80,15,0,32.4,0.2,63,tested_negative
4,123,62,0,0,32,0.226,35,tested_positive
5,96,74,18,67,33.6,0.997,43,tested_negative
0,138,0,0,0,36.3,0.933,25,tested_positive
2,128,64,42,0,40,1.101,24,tested_negative
0,102,52,0,0,25.1,0.078,21,tested_negative
2,146,0,0,0,27.5,0.24,28,tested_positive
10,101,86,37,0,45.6,1.136,38,tested_positive
2,108,62,32,56,25.2,0.128,21,tested_negative
3,122,78,0,0,23,0.254,40,tested_negative
1,71,78,50,45,33.2,0.422,21,tested_negative
13,106,70,0,0,34.2,0.251,52,tested_negative
2,100,70,52,57,40.5,0.677,25,tested_negative
7,106,60,24,0,26.5,0.296,29,tested_positive
0,104,64,23,116,27.8,0.454,23,tested_negative
5,114,74,0,0,24.9,0.744,57,tested_negative
2,108,62,10,278,25.3,0.881,22,tested_negative
0,146,70,0,0,37.9,0.334,28,tested_positive
10,129,76,28,122,35.9,0.28,39,tested_negative
7,133,88,15,155,32.4,0.262,37,tested_negative
7,161,86,0,0,30.4,0.165,47,tested_positive
2,108,80,0,0,27,0.259,52,tested_positive
7,136,74,26,135,26,0.647,51,tested_negative
5,155,84,44,545,38.7,0.619,34,tested_negative
1,119,86,39,220,45.6,0.808,29,tested_positive
4,96,56,17,49,20.8,0.34,26,tested_negative
5,108,72,43,75,36.1,0.263,33,tested_negative
0,78,88,29,40,36.9,0.434,21,tested_negative
0,107,62,30,74,36.6,0.757,25,tested_positive
2,128,78,37,182,43.3,1.224,31,tested_positive
1,128,48,45,194,40.5,0.613,24,tested_positive
0,161,50,0,0,21.9,0.254,65,tested_negative
6,151,62,31,120,35.5,0.692,28,tested_negative
2,146,70,38,360,28,0.337,29,tested_positive
0,126,84,29,215,30.7,0.52,24,tested_negative
14,100,78,25,184,36.6,0.412,46,tested_positive
8,112,72,0,0,23.6,0.84,58,tested_negative
0,167,0,0,0,32.3,0.839,30,tested_positive
2,144,58,33,135,31.6,0.422,25,tested_positive
5,77,82,41,42,35.8,0.156,35,tested_negative
5,115,98,0,0,52.9,0.209,28,tested_positive
3,150,76,0,0,21,0.207,37,tested_negative
2,120,76,37,105,39.7,0.215,29,tested_negative
10,161,68,23,132,25.5,0.326,47,tested_positive
0,137,68,14,148,24.8,0.143,21,tested_negative
0,128,68,19,180,30.5,1.391,25,tested_positive
2,124,68,28,205,32.9,0.875,30,tested_positive
6,80,66,30,0,26.2,0.313,41,tested_negative
0,106,70,37,148,39.4,0.605,22,tested_negative
2,155,74,17,96,26.6,0.433,27,tested_positive
3,113,50,10,85,29.5,0.626,25,tested_negative
7,109,80,31,0,35.9,1.127,43,tested_positive
2,112,68,22,94,34.1,0.315,26,tested_negative
3,99,80,11,64,19.3,0.284,30,tested_negative
3,182,74,0,0,30.5,0.345,29,tested_positive
3,115,66,39,140,38.1,0.15,28,tested_negative
6,194,78,0,0,23.5,0.129,59,tested_positive
4,129,60,12,231,27.5,0.527,31,tested_negative
3,112,74,30,0,31.6,0.197,25,tested_positive
0,124,70,20,0,27.4,0.254,36,tested_positive
13,152,90,33,29,26.8,0.731,43,tested_positive
2,112,75,32,0,35.7,0.148,21,tested_negative
1,157,72,21,168,25.6,0.123,24,tested_negative
1,122,64,32,156,35.1,0.692,30,tested_positive
10,179,70,0,0,35.1,0.2,37,tested_negative
2,102,86,36,120,45.5,0.127,23,tested_positive
6,105,70,32,68,30.8,0.122,37,tested_negative
8,118,72,19,0,23.1,1.476,46,tested_negative
2,87,58,16,52,32.7,0.166,25,tested_negative
1,180,0,0,0,43.3,0.282,41,tested_positive
12,106,80,0,0,23.6,0.137,44,tested_negative
1,95,60,18,58,23.9,0.26,22,tested_negative
0,165,76,43,255,47.9,0.259,26,tested_negative
0,117,0,0,0,33.8,0.932,44,tested_negative
5,115,76,0,0,31.2,0.343,44,tested_positive
9,152,78,34,171,34.2,0.893,33,tested_positive
7,178,84,0,0,39.9,0.331,41,tested_positive
1,130,70,13,105,25.9,0.472,22,tested_negative
1,95,74,21,73,25.9,0.673,36,tested_negative
1,0,68,35,0,32,0.389,22,tested_negative
5,122,86,0,0,34.7,0.29,33,tested_negative
8,95,72,0,0,36.8,0.485,57,tested_negative
8,126,88,36,108,38.5,0.349,49,tested_negative
1,139,46,19,83,28.7,0.654,22,tested_negative
3,116,0,0,0,23.5,0.187,23,tested_negative
3,99,62,19,74,21.8,0.279,26,tested_negative
5,0,80,32,0,41,0.346,37,tested_positive
4,92,80,0,0,42.2,0.237,29,tested_negative
4,137,84,0,0,31.2,0.252,30,tested_negative
3,61,82,28,0,34.4,0.243,46,tested_negative
1,90,62,12,43,27.2,0.58,24,tested_negative
3,90,78,0,0,42.7,0.559,21,tested_negative
9,165,88,0,0,30.4,0.302,49,tested_positive
1,125,50,40,167,33.3,0.962,28,tested_positive
13,129,0,30,0,39.9,0.569,44,tested_positive
12,88,74,40,54,35.3,0.378,48,tested_negative
1,196,76,36,249,36.5,0.875,29,tested_positive
5,189,64,33,325,31.2,0.583,29,tested_positive
5,158,70,0,0,29.8,0.207,63,tested_negative
5,103,108,37,0,39.2,0.305,65,tested_negative
4,146,78,0,0,38.5,0.52,67,tested_positive
4,147,74,25,293,34.9,0.385,30,tested_negative
5,99,54,28,83,34,0.499,30,tested_negative
6,124,72,0,0,27.6,0.368,29,tested_positive
0,101,64,17,0,21,0.252,21,tested_negative
3,81,86,16,66,27.5,0.306,22,tested_negative
1,133,102,28,140,32.8,0.234,45,tested_positive
3,173,82,48,465,38.4,2.137,25,tested_positive
0,118,64,23,89,0,1.731,21,tested_negative
0,84,64,22,66,35.8,0.545,21,tested_negative
2,105,58,40,94,34.9,0.225,25,tested_negative
2,122,52,43,158,36.2,0.816,28,tested_negative
12,140,82,43,325,39.2,0.528,58,tested_positive
0,98,82,15,84,25.2,0.299,22,tested_negative
1,87,60,37,75,37.2,0.509,22,tested_negative
4,156,75,0,0,48.3,0.238,32,tested_positive
0,93,100,39,72,43.4,1.021,35,tested_negative
1,107,72,30,82,30.8,0.821,24,tested_negative
0,105,68,22,0,20,0.236,22,tested_negative
1,109,60,8,182,25.4,0.947,21,tested_negative
1,90,62,18,59,25.1,1.268,25,tested_negative
1,125,70,24,110,24.3,0.221,25,tested_negative
1,119,54,13,50,22.3,0.205,24,tested_negative
5,116,74,29,0,32.3,0.66,35,tested_positive
8,105,100,36,0,43.3,0.239,45,tested_positive
5,144,82,26,285,32,0.452,58,tested_positive
3,100,68,23,81,31.6,0.949,28,tested_negative
1,100,66,29,196,32,0.444,42,tested_negative
5,166,76,0,0,45.7,0.34,27,tested_positive
1,131,64,14,415,23.7,0.389,21,tested_negative
4,116,72,12,87,22.1,0.463,37,tested_negative
4,158,78,0,0,32.9,0.803,31,tested_positive
2,127,58,24,275,27.7,1.6,25,tested_negative
3,96,56,34,115,24.7,0.944,39,tested_negative
0,131,66,40,0,34.3,0.196,22,tested_positive
3,82,70,0,0,21.1,0.389,25,tested_negative
3,193,70,31,0,34.9,0.241,25,tested_positive
4,95,64,0,0,32,0.161,31,tested_positive
6,137,61,0,0,24.2,0.151,55,tested_negative
5,136,84,41,88,35,0.286,35,tested_positive
9,72,78,25,0,31.6,0.28,38,tested_negative
5,168,64,0,0,32.9,0.135,41,tested_positive
2,123,48,32,165,42.1,0.52,26,tested_negative
4,115,72,0,0,28.9,0.376,46,tested_positive
0,101,62,0,0,21.9,0.336,25,tested_negative
8,197,74,0,0,25.9,1.191,39,tested_positive
1,172,68,49,579,42.4,0.702,28,tested_positive
6,102,90,39,0,35.7,0.674,28,tested_negative
1,112,72,30,176,34.4,0.528,25,tested_negative
1,143,84,23,310,42.4,1.076,22,tested_negative
1,143,74,22,61,26.2,0.256,21,tested_negative
0,138,60,35,167,34.6,0.534,21,tested_positive
3,173,84,33,474,35.7,0.258,22,tested_positive
1,97,68,21,0,27.2,1.095,22,tested_negative
4,144,82,32,0,38.5,0.554,37,tested_positive
1,83,68,0,0,18.2,0.624,27,tested_negative
3,129,64,29,115,26.4,0.219,28,tested_positive
1,119,88,41,170,45.3,0.507,26,tested_negative
2,94,68,18,76,26,0.561,21,tested_negative
0,102,64,46,78,40.6,0.496,21,tested_negative
2,115,64,22,0,30.8,0.421,21,tested_negative
8,151,78,32,210,42.9,0.516,36,tested_positive
4,184,78,39,277,37,0.264,31,tested_positive
0,94,0,0,0,0,0.256,25,tested_negative
1,181,64,30,180,34.1,0.328,38,tested_positive
0,135,94,46,145,40.6,0.284,26,tested_negative
1,95,82,25,180,35,0.233,43,tested_positive
2,99,0,0,0,22.2,0.108,23,tested_negative
3,89,74,16,85,30.4,0.551,38,tested_negative
1,80,74,11,60,30,0.527,22,tested_negative
2,139,75,0,0,25.6,0.167,29,tested_negative
1,90,68,8,0,24.5,1.138,36,tested_negative
0,141,0,0,0,42.4,0.205,29,tested_positive
12,140,85,33,0,37.4,0.244,41,tested_negative
5,147,75,0,0,29.9,0.434,28,tested_negative
1,97,70,15,0,18.2,0.147,21,tested_negative
6,107,88,0,0,36.8,0.727,31,tested_negative
0,189,104,25,0,34.3,0.435,41,tested_positive
2,83,66,23,50,32.2,0.497,22,tested_negative
4,117,64,27,120,33.2,0.23,24,tested_negative
8,108,70,0,0,30.5,0.955,33,tested_positive
4,117,62,12,0,29.7,0.38,30,tested_positive
0,180,78,63,14,59.4,2.42,25,tested_positive
1,100,72,12,70,25.3,0.658,28,tested_negative
0,95,80,45,92,36.5,0.33,26,tested_negative
0,104,64,37,64,33.6,0.51,22,tested_positive
0,120,74,18,63,30.5,0.285,26,tested_negative
1,82,64,13,95,21.2,0.415,23,tested_negative
2,134,70,0,0,28.9,0.542,23,tested_positive
0,91,68,32,210,39.9,0.381,25,tested_negative
2,119,0,0,0,19.6,0.832,72,tested_negative
2,100,54,28,105,37.8,0.498,24,tested_negative
14,175,62,30,0,33.6,0.212,38,tested_positive
1,135,54,0,0,26.7,0.687,62,tested_negative
5,86,68,28,71,30.2,0.364,24,tested_negative
10,148,84,48,237,37.6,1.001,51,tested_positive
9,134,74,33,60,25.9,0.46,81,tested_negative
9,120,72,22,56,20.8,0.733,48,tested_negative
1,71,62,0,0,21.8,0.416,26,tested_negative
8,74,70,40,49,35.3,0.705,39,tested_negative
5,88,78,30,0,27.6,0.258,37,tested_negative
10,115,98,0,0,24,1.022,34,tested_negative
0,124,56,13,105,21.8,0.452,21,tested_negative
0,74,52,10,36,27.8,0.269,22,tested_negative
0,97,64,36,100,36.8,0.6,25,tested_negative
8,120,0,0,0,30,0.183,38,tested_positive
6,154,78,41,140,46.1,0.571,27,tested_negative
1,144,82,40,0,41.3,0.607,28,tested_negative
0,137,70,38,0,33.2,0.17,22,tested_negative
0,119,66,27,0,38.8,0.259,22,tested_negative
7,136,90,0,0,29.9,0.21,50,tested_negative
4,114,64,0,0,28.9,0.126,24,tested_negative
0,137,84,27,0,27.3,0.231,59,tested_negative
2,105,80,45,191,33.7,0.711,29,tested_positive
7,114,76,17,110,23.8,0.466,31,tested_negative
8,126,74,38,75,25.9,0.162,39,tested_negative
4,132,86,31,0,28,0.419,63,tested_negative
3,158,70,30,328,35.5,0.344,35,tested_positive
0,123,88,37,0,35.2,0.197,29,tested_negative
4,85,58,22,49,27.8,0.306,28,tested_negative
0,84,82,31,125,38.2,0.233,23,tested_negative
0,145,0,0,0,44.2,0.63,31,tested_positive
0,135,68,42,250,42.3,0.365,24,tested_positive
1,139,62,41,480,40.7,0.536,21,tested_negative
0,173,78,32,265,46.5,1.159,58,tested_negative
4,99,72,17,0,25.6,0.294,28,tested_negative
8,194,80,0,0,26.1,0.551,67,tested_negative
2,83,65,28,66,36.8,0.629,24,tested_negative
2,89,90,30,0,33.5,0.292,42,tested_negative
4,99,68,38,0,32.8,0.145,33,tested_negative
4,125,70,18,122,28.9,1.144,45,tested_positive
3,80,0,0,0,0,0.174,22,tested_negative
6,166,74,0,0,26.6,0.304,66,tested_negative
5,110,68,0,0,26,0.292,30,tested_negative
2,81,72,15,76,30.1,0.547,25,tested_negative
7,195,70,33,145,25.1,0.163,55,tested_positive
6,154,74,32,193,29.3,0.839,39,tested_negative
2,117,90,19,71,25.2,0.313,21,tested_negative
3,84,72,32,0,37.2,0.267,28,tested_negative
6,0,68,41,0,39,0.727,41,tested_positive
7,94,64,25,79,33.3,0.738,41,tested_negative
3,96,78,39,0,37.3,0.238,40,tested_negative
10,75,82,0,0,33.3,0.263,38,tested_negative
0,180,90,26,90,36.5,0.314,35,tested_positive
1,130,60,23,170,28.6,0.692,21,tested_negative
2,84,50,23,76,30.4,0.968,21,tested_negative
8,120,78,0,0,25,0.409,64,tested_negative
12,84,72,31,0,29.7,0.297,46,tested_positive
0,139,62,17,210,22.1,0.207,21,tested_negative
9,91,68,0,0,24.2,0.2,58,tested_negative
2,91,62,0,0,27.3,0.525,22,tested_negative
3,99,54,19,86,25.6,0.154,24,tested_negative
3,163,70,18,105,31.6,0.268,28,tested_positive
9,145,88,34,165,30.3,0.771,53,tested_positive
7,125,86,0,0,37.6,0.304,51,tested_negative
13,76,60,0,0,32.8,0.18,41,tested_negative
6,129,90,7,326,19.6,0.582,60,tested_negative
2,68,70,32,66,25,0.187,25,tested_negative
3,124,80,33,130,33.2,0.305,26,tested_negative
6,114,0,0,0,0,0.189,26,tested_negative
9,130,70,0,0,34.2,0.652,45,tested_positive
3,125,58,0,0,31.6,0.151,24,tested_negative
3,87,60,18,0,21.8,0.444,21,tested_negative
1,97,64,19,82,18.2,0.299,21,tested_negative
3,116,74,15,105,26.3,0.107,24,tested_negative
0,117,66,31,188,30.8,0.493,22,tested_negative
0,111,65,0,0,24.6,0.66,31,tested_negative
2,122,60,18,106,29.8,0.717,22,tested_negative
0,107,76,0,0,45.3,0.686,24,tested_negative
1,86,66,52,65,41.3,0.917,29,tested_negative
6,91,0,0,0,29.8,0.501,31,tested_negative
1,77,56,30,56,33.3,1.251,24,tested_negative
4,132,0,0,0,32.9,0.302,23,tested_positive
0,105,90,0,0,29.6,0.197,46,tested_negative
0,57,60,0,0,21.7,0.735,67,tested_negative
0,127,80,37,210,36.3,0.804,23,tested_negative
3,129,92,49,155,36.4,0.968,32,tested_positive
8,100,74,40,215,39.4,0.661,43,tested_positive
3,128,72,25,190,32.4,0.549,27,tested_positive
10,90,85,32,0,34.9,0.825,56,tested_positive
4,84,90,23,56,39.5,0.159,25,tested_negative
1,88,78,29,76,32,0.365,29,tested_negative
8,186,90,35,225,34.5,0.423,37,tested_positive
5,187,76,27,207,43.6,1.034,53,tested_positive
4,131,68,21,166,33.1,0.16,28,tested_negative
1,164,82,43,67,32.8,0.341,50,tested_negative
4,189,110,31,0,28.5,0.68,37,tested_negative
1,116,70,28,0,27.4,0.204,21,tested_negative
3,84,68,30,106,31.9,0.591,25,tested_negative
6,114,88,0,0,27.8,0.247,66,tested_negative
1,88,62,24,44,29.9,0.422,23,tested_negative
1,84,64,23,115,36.9,0.471,28,tested_negative
7,124,70,33,215,25.5,0.161,37,tested_negative
1,97,70,40,0,38.1,0.218,30,tested_negative
8,110,76,0,0,27.8,0.237,58,tested_negative
11,103,68,40,0,46.2,0.126,42,tested_negative
11,85,74,0,0,30.1,0.3,35,tested_negative
6,125,76,0,0,33.8,0.121,54,tested_positive
0,198,66,32,274,41.3,0.502,28,tested_positive
1,87,68,34,77,37.6,0.401,24,tested_negative
6,99,60,19,54,26.9,0.497,32,tested_negative
0,91,80,0,0,32.4,0.601,27,tested_negative
2,95,54,14,88,26.1,0.748,22,tested_negative
1,99,72,30,18,38.6,0.412,21,tested_negative
6,92,62,32,126,32,0.085,46,tested_negative
4,154,72,29,126,31.3,0.338,37,tested_negative
0,121,66,30,165,34.3,0.203,33,tested_positive
3,78,70,0,0,32.5,0.27,39,tested_negative
2,130,96,0,0,22.6,0.268,21,tested_negative
3,111,58,31,44,29.5,0.43,22,tested_negative
2,98,60,17,120,34.7,0.198,22,tested_negative
1,143,86,30,330,30.1,0.892,23,tested_negative
1,119,44,47,63,35.5,0.28,25,tested_negative
6,108,44,20,130,24,0.813,35,tested_negative
2,118,80,0,0,42.9,0.693,21,tested_positive
10,133,68,0,0,27,0.245,36,tested_negative
2,197,70,99,0,34.7,0.575,62,tested_positive
0,151,90,46,0,42.1,0.371,21,tested_positive
6,109,60,27,0,25,0.206,27,tested_negative
12,121,78,17,0,26.5,0.259,62,tested_negative
8,100,76,0,0,38.7,0.19,42,tested_negative
8,124,76,24,600,28.7,0.687,52,tested_positive
1,93,56,11,0,22.5,0.417,22,tested_negative
8,143,66,0,0,34.9,0.129,41,tested_positive
6,103,66,0,0,24.3,0.249,29,tested_negative
3,176,86,27,156,33.3,1.154,52,tested_positive
0,73,0,0,0,21.1,0.342,25,tested_negative
11,111,84,40,0,46.8,0.925,45,tested_positive
2,112,78,50,140,39.4,0.175,24,tested_negative
3,132,80,0,0,34.4,0.402,44,tested_positive
2,82,52,22,115,28.5,1.699,25,tested_negative
6,123,72,45,230,33.6,0.733,34,tested_negative
0,188,82,14,185,32,0.682,22,tested_positive
0,67,76,0,0,45.3,0.194,46,tested_negative
1,89,24,19,25,27.8,0.559,21,tested_negative
1,173,74,0,0,36.8,0.088,38,tested_positive
1,109,38,18,120,23.1,0.407,26,tested_negative
1,108,88,19,0,27.1,0.4,24,tested_negative
6,96,0,0,0,23.7,0.19,28,tested_negative
1,124,74,36,0,27.8,0.1,30,tested_negative
7,150,78,29,126,35.2,0.692,54,tested_positive
4,183,0,0,0,28.4,0.212,36,tested_positive
1,124,60,32,0,35.8,0.514,21,tested_negative
1,181,78,42,293,40,1.258,22,tested_positive
1,92,62,25,41,19.5,0.482,25,tested_negative
0,152,82,39,272,41.5,0.27,27,tested_negative
1,111,62,13,182,24,0.138,23,tested_negative
3,106,54,21,158,30.9,0.292,24,tested_negative
3,174,58,22,194,32.9,0.593,36,tested_positive
7,168,88,42,321,38.2,0.787,40,tested_positive
6,105,80,28,0,32.5,0.878,26,tested_negative
11,138,74,26,144,36.1,0.557,50,tested_positive
3,106,72,0,0,25.8,0.207,27,tested_negative
6,117,96,0,0,28.7,0.157,30,tested_negative
2,68,62,13,15,20.1,0.257,23,tested_negative
9,112,82,24,0,28.2,1.282,50,tested_positive
0,119,0,0,0,32.4,0.141,24,tested_positive
2,112,86,42,160,38.4,0.246,28,tested_negative
2,92,76,20,0,24.2,1.698,28,tested_negative
6,183,94,0,0,40.8,1.461,45,tested_negative
0,94,70,27,115,43.5,0.347,21,tested_negative
2,108,64,0,0,30.8,0.158,21,tested_negative
4,90,88,47,54,37.7,0.362,29,tested_negative
0,125,68,0,0,24.7,0.206,21,tested_negative
0,132,78,0,0,32.4,0.393,21,tested_negative
5,128,80,0,0,34.6,0.144,45,tested_negative
4,94,65,22,0,24.7,0.148,21,tested_negative
7,114,64,0,0,27.4,0.732,34,tested_positive
0,102,78,40,90,34.5,0.238,24,tested_negative
2,111,60,0,0,26.2,0.343,23,tested_negative
1,128,82,17,183,27.5,0.115,22,tested_negative
10,92,62,0,0,25.9,0.167,31,tested_negative
13,104,72,0,0,31.2,0.465,38,tested_positive
5,104,74,0,0,28.8,0.153,48,tested_negative
2,94,76,18,66,31.6,0.649,23,tested_negative
7,97,76,32,91,40.9,0.871,32,tested_positive
1,100,74,12,46,19.5,0.149,28,tested_negative
0,102,86,17,105,29.3,0.695,27,tested_negative
4,128,70,0,0,34.3,0.303,24,tested_negative
6,147,80,0,0,29.5,0.178,50,tested_positive
4,90,0,0,0,28,0.61,31,tested_negative
3,103,72,30,152,27.6,0.73,27,tested_negative
2,157,74,35,440,39.4,0.134,30,tested_negative
1,167,74,17,144,23.4,0.447,33,tested_positive
0,179,50,36,159,37.8,0.455,22,tested_positive
11,136,84,35,130,28.3,0.26,42,tested_positive
0,107,60,25,0,26.4,0.133,23,tested_negative
1,91,54,25,100,25.2,0.234,23,tested_negative
1,117,60,23,106,33.8,0.466,27,tested_negative
5,123,74,40,77,34.1,0.269,28,tested_negative
2,120,54,0,0,26.8,0.455,27,tested_negative
1,106,70,28,135,34.2,0.142,22,tested_negative
2,155,52,27,540,38.7,0.24,25,tested_positive
2,101,58,35,90,21.8,0.155,22,tested_negative
1,120,80,48,200,38.9,1.162,41,tested_negative
11,127,106,0,0,39,0.19,51,tested_negative
3,80,82,31,70,34.2,1.292,27,tested_positive
10,162,84,0,0,27.7,0.182,54,tested_negative
1,199,76,43,0,42.9,1.394,22,tested_positive
8,167,106,46,231,37.6,0.165,43,tested_positive
9,145,80,46,130,37.9,0.637,40,tested_positive
6,115,60,39,0,33.7,0.245,40,tested_positive
1,112,80,45,132,34.8,0.217,24,tested_negative
4,145,82,18,0,32.5,0.235,70,tested_positive
10,111,70,27,0,27.5,0.141,40,tested_positive
6,98,58,33,190,34,0.43,43,tested_negative
9,154,78,30,100,30.9,0.164,45,tested_negative
6,165,68,26,168,33.6,0.631,49,tested_negative
1,99,58,10,0,25.4,0.551,21,tested_negative
10,68,106,23,49,35.5,0.285,47,tested_negative
3,123,100,35,240,57.3,0.88,22,tested_negative
8,91,82,0,0,35.6,0.587,68,tested_negative
6,195,70,0,0,30.9,0.328,31,tested_positive
9,156,86,0,0,24.8,0.23,53,tested_positive
0,93,60,0,0,35.3,0.263,25,tested_negative
3,121,52,0,0,36,0.127,25,tested_positive
2,101,58,17,265,24.2,0.614,23,tested_negative
2,56,56,28,45,24.2,0.332,22,tested_negative
0,162,76,36,0,49.6,0.364,26,tested_positive
0,95,64,39,105,44.6,0.366,22,tested_negative
4,125,80,0,0,32.3,0.536,27,tested_positive
5,136,82,0,0,0,0.64,69,tested_negative
2,129,74,26,205,33.2,0.591,25,tested_negative
3,130,64,0,0,23.1,0.314,22,tested_negative
1,107,50,19,0,28.3,0.181,29,tested_negative
1,140,74,26,180,24.1,0.828,23,tested_negative
1,144,82,46,180,46.1,0.335,46,tested_positive
8,107,80,0,0,24.6,0.856,34,tested_negative
13,158,114,0,0,42.3,0.257,44,tested_positive
2,121,70,32,95,39.1,0.886,23,tested_negative
7,129,68,49,125,38.5,0.439,43,tested_positive
2,90,60,0,0,23.5,0.191,25,tested_negative
7,142,90,24,480,30.4,0.128,43,tested_positive
3,169,74,19,125,29.9,0.268,31,tested_positive
0,99,0,0,0,25,0.253,22,tested_negative
4,127,88,11,155,34.5,0.598,28,tested_negative
4,118,70,0,0,44.5,0.904,26,tested_negative
2,122,76,27,200,35.9,0.483,26,tested_negative
6,125,78,31,0,27.6,0.565,49,tested_positive
1,168,88,29,0,35,0.905,52,tested_positive
2,129,0,0,0,38.5,0.304,41,tested_negative
4,110,76,20,100,28.4,0.118,27,tested_negative
6,80,80,36,0,39.8,0.177,28,tested_negative
10,115,0,0,0,0,0.261,30,tested_positive
2,127,46,21,335,34.4,0.176,22,tested_negative
9,164,78,0,0,32.8,0.148,45,tested_positive
2,93,64,32,160,38,0.674,23,tested_positive
3,158,64,13,387,31.2,0.295,24,tested_negative
5,126,78,27,22,29.6,0.439,40,tested_negative
10,129,62,36,0,41.2,0.441,38,tested_positive
0,134,58,20,291,26.4,0.352,21,tested_negative
3,102,74,0,0,29.5,0.121,32,tested_negative
7,187,50,33,392,33.9,0.826,34,tested_positive
3,173,78,39,185,33.8,0.97,31,tested_positive
10,94,72,18,0,23.1,0.595,56,tested_negative
1,108,60,46,178,35.5,0.415,24,tested_negative
5,97,76,27,0,35.6,0.378,52,tested_positive
4,83,86,19,0,29.3,0.317,34,tested_negative
1,114,66,36,200,38.1,0.289,21,tested_negative
1,149,68,29,127,29.3,0.349,42,tested_positive
5,117,86,30,105,39.1,0.251,42,tested_negative
1,111,94,0,0,32.8,0.265,45,tested_negative
4,112,78,40,0,39.4,0.236,38,tested_negative
1,116,78,29,180,36.1,0.496,25,tested_negative
0,141,84,26,0,32.4,0.433,22,tested_negative
2,175,88,0,0,22.9,0.326,22,tested_negative
2,92,52,0,0,30.1,0.141,22,tested_negative
3,130,78,23,79,28.4,0.323,34,tested_positive
8,120,86,0,0,28.4,0.259,22,tested_positive
2,174,88,37,120,44.5,0.646,24,tested_positive
2,106,56,27,165,29,0.426,22,tested_negative
2,105,75,0,0,23.3,0.56,53,tested_negative
4,95,60,32,0,35.4,0.284,28,tested_negative
0,126,86,27,120,27.4,0.515,21,tested_negative
8,65,72,23,0,32,0.6,42,tested_negative
2,99,60,17,160,36.6,0.453,21,tested_negative
1,102,74,0,0,39.5,0.293,42,tested_positive
11,120,80,37,150,42.3,0.785,48,tested_positive
3,102,44,20,94,30.8,0.4,26,tested_negative
1,109,58,18,116,28.5,0.219,22,tested_negative
9,140,94,0,0,32.7,0.734,45,tested_positive
13,153,88,37,140,40.6,1.174,39,tested_negative
12,100,84,33,105,30,0.488,46,tested_negative
1,147,94,41,0,49.3,0.358,27,tested_positive
1,81,74,41,57,46.3,1.096,32,tested_negative
3,187,70,22,200,36.4,0.408,36,tested_positive
6,162,62,0,0,24.3,0.178,50,tested_positive
4,136,70,0,0,31.2,1.182,22,tested_positive
1,121,78,39,74,39,0.261,28,tested_negative
3,108,62,24,0,26,0.223,25,tested_negative
0,181,88,44,510,43.3,0.222,26,tested_positive
8,154,78,32,0,32.4,0.443,45,tested_positive
1,128,88,39,110,36.5,1.057,37,tested_positive
7,137,90,41,0,32,0.391,39,tested_negative
0,123,72,0,0,36.3,0.258,52,tested_positive
1,106,76,0,0,37.5,0.197,26,tested_negative
6,190,92,0,0,35.5,0.278,66,tested_positive
2,88,58,26,16,28.4,0.766,22,tested_negative
9,170,74,31,0,44,0.403,43,tested_positive
9,89,62,0,0,22.5,0.142,33,tested_negative
10,101,76,48,180,32.9,0.171,63,tested_negative
2,122,70,27,0,36.8,0.34,27,tested_negative
5,121,72,23,112,26.2,0.245,30,tested_negative
1,126,60,0,0,30.1,0.349,47,tested_positive
1,93,70,31,0,30.4,0.315,23,tested_negative

332
data/glass.arff Executable file
View File

@ -0,0 +1,332 @@
% 1. Title: Glass Identification Database
%
% 2. Sources:
% (a) Creator: B. German
% -- Central Research Establishment
% Home Office Forensic Science Service
% Aldermaston, Reading, Berkshire RG7 4PN
% (b) Donor: Vina Spiehler, Ph.D., DABFT
% Diagnostic Products Corporation
% (213) 776-0180 (ext 3014)
% (c) Date: September, 1987
%
% 3. Past Usage:
% -- Rule Induction in Forensic Science
% -- Ian W. Evett and Ernest J. Spiehler
% -- Central Research Establishment
% Home Office Forensic Science Service
% Aldermaston, Reading, Berkshire RG7 4PN
% -- Unknown technical note number (sorry, not listed here)
% -- General Results: nearest neighbor held its own with respect to the
% rule-based system
%
% 4. Relevant Information:n
% Vina conducted a comparison test of her rule-based system, BEAGLE, the
% nearest-neighbor algorithm, and discriminant analysis. BEAGLE is
% a product available through VRS Consulting, Inc.; 4676 Admiralty Way,
% Suite 206; Marina Del Ray, CA 90292 (213) 827-7890 and FAX: -3189.
% In determining whether the glass was a type of "float" glass or not,
% the following results were obtained (# incorrect answers):
%
% Type of Sample Beagle NN DA
% Windows that were float processed (87) 10 12 21
% Windows that were not: (76) 19 16 22
%
% The study of classification of types of glass was motivated by
% criminological investigation. At the scene of the crime, the glass left
% can be used as evidence...if it is correctly identified!
%
% 5. Number of Instances: 214
%
% 6. Number of Attributes: 10 (including an Id#) plus the class attribute
% -- all attributes are continuously valued
%
% 7. Attribute Information:
% 1. Id number: 1 to 214
% 2. RI: refractive index
% 3. Na: Sodium (unit measurement: weight percent in corresponding oxide, as
% are attributes 4-10)
% 4. Mg: Magnesium
% 5. Al: Aluminum
% 6. Si: Silicon
% 7. K: Potassium
% 8. Ca: Calcium
% 9. Ba: Barium
% 10. Fe: Iron
% 11. Type of glass: (class attribute)
% -- 1 building_windows_float_processed
% -- 2 building_windows_non_float_processed
% -- 3 vehicle_windows_float_processed
% -- 4 vehicle_windows_non_float_processed (none in this database)
% -- 5 containers
% -- 6 tableware
% -- 7 headlamps
%
% 8. Missing Attribute Values: None
%
% Summary Statistics:
% Attribute: Min Max Mean SD Correlation with class
% 2. RI: 1.5112 1.5339 1.5184 0.0030 -0.1642
% 3. Na: 10.73 17.38 13.4079 0.8166 0.5030
% 4. Mg: 0 4.49 2.6845 1.4424 -0.7447
% 5. Al: 0.29 3.5 1.4449 0.4993 0.5988
% 6. Si: 69.81 75.41 72.6509 0.7745 0.1515
% 7. K: 0 6.21 0.4971 0.6522 -0.0100
% 8. Ca: 5.43 16.19 8.9570 1.4232 0.0007
% 9. Ba: 0 3.15 0.1750 0.4972 0.5751
% 10. Fe: 0 0.51 0.0570 0.0974 -0.1879
%
% 9. Class Distribution: (out of 214 total instances)
% -- 163 Window glass (building windows and vehicle windows)
% -- 87 float processed
% -- 70 building windows
% -- 17 vehicle windows
% -- 76 non-float processed
% -- 76 building windows
% -- 0 vehicle windows
% -- 51 Non-window glass
% -- 13 containers
% -- 9 tableware
% -- 29 headlamps
%
%
%
%
%
%
%
% Relabeled values in attribute 'Type'
% From: '1' To: 'build wind float'
% From: '2' To: 'build wind non-float'
% From: '3' To: 'vehic wind float'
% From: '4' To: 'vehic wind non-float'
% From: '5' To: containers
% From: '6' To: tableware
% From: '7' To: headlamps
%
@relation Glass
@attribute 'RI' real
@attribute 'Na' real
@attribute 'Mg' real
@attribute 'Al' real
@attribute 'Si' real
@attribute 'K' real
@attribute 'Ca' real
@attribute 'Ba' real
@attribute 'Fe' real
@attribute 'Type' {'build wind float', 'build wind non-float', 'vehic wind float', 'vehic wind non-float', containers, tableware, headlamps}
@data
1.51793,12.79,3.5,1.12,73.03,0.64,8.77,0,0,'build wind float'
1.51643,12.16,3.52,1.35,72.89,0.57,8.53,0,0,'vehic wind float'
1.51793,13.21,3.48,1.41,72.64,0.59,8.43,0,0,'build wind float'
1.51299,14.4,1.74,1.54,74.55,0,7.59,0,0,tableware
1.53393,12.3,0,1,70.16,0.12,16.19,0,0.24,'build wind non-float'
1.51655,12.75,2.85,1.44,73.27,0.57,8.79,0.11,0.22,'build wind non-float'
1.51779,13.64,3.65,0.65,73,0.06,8.93,0,0,'vehic wind float'
1.51837,13.14,2.84,1.28,72.85,0.55,9.07,0,0,'build wind float'
1.51545,14.14,0,2.68,73.39,0.08,9.07,0.61,0.05,headlamps
1.51789,13.19,3.9,1.3,72.33,0.55,8.44,0,0.28,'build wind non-float'
1.51625,13.36,3.58,1.49,72.72,0.45,8.21,0,0,'build wind non-float'
1.51743,12.2,3.25,1.16,73.55,0.62,8.9,0,0.24,'build wind non-float'
1.52223,13.21,3.77,0.79,71.99,0.13,10.02,0,0,'build wind float'
1.52121,14.03,3.76,0.58,71.79,0.11,9.65,0,0,'vehic wind float'
1.51665,13.14,3.45,1.76,72.48,0.6,8.38,0,0.17,'vehic wind float'
1.51707,13.48,3.48,1.71,72.52,0.62,7.99,0,0,'build wind non-float'
1.51719,14.75,0,2,73.02,0,8.53,1.59,0.08,headlamps
1.51629,12.71,3.33,1.49,73.28,0.67,8.24,0,0,'build wind non-float'
1.51994,13.27,0,1.76,73.03,0.47,11.32,0,0,containers
1.51811,12.96,2.96,1.43,72.92,0.6,8.79,0.14,0,'build wind non-float'
1.52152,13.05,3.65,0.87,72.22,0.19,9.85,0,0.17,'build wind float'
1.52475,11.45,0,1.88,72.19,0.81,13.24,0,0.34,'build wind non-float'
1.51841,12.93,3.74,1.11,72.28,0.64,8.96,0,0.22,'build wind non-float'
1.51754,13.39,3.66,1.19,72.79,0.57,8.27,0,0.11,'build wind float'
1.52058,12.85,1.61,2.17,72.18,0.76,9.7,0.24,0.51,containers
1.51569,13.24,3.49,1.47,73.25,0.38,8.03,0,0,'build wind non-float'
1.5159,12.82,3.52,1.9,72.86,0.69,7.97,0,0,'build wind non-float'
1.51683,14.56,0,1.98,73.29,0,8.52,1.57,0.07,headlamps
1.51687,13.23,3.54,1.48,72.84,0.56,8.1,0,0,'build wind non-float'
1.5161,13.33,3.53,1.34,72.67,0.56,8.33,0,0,'vehic wind float'
1.51674,12.87,3.56,1.64,73.14,0.65,7.99,0,0,'build wind non-float'
1.51832,13.33,3.34,1.54,72.14,0.56,8.99,0,0,'vehic wind float'
1.51115,17.38,0,0.34,75.41,0,6.65,0,0,tableware
1.51645,13.44,3.61,1.54,72.39,0.66,8.03,0,0,'build wind non-float'
1.51755,13,3.6,1.36,72.99,0.57,8.4,0,0.11,'build wind float'
1.51571,12.72,3.46,1.56,73.2,0.67,8.09,0,0.24,'build wind float'
1.51596,12.79,3.61,1.62,72.97,0.64,8.07,0,0.26,'build wind float'
1.5173,12.35,2.72,1.63,72.87,0.7,9.23,0,0,'build wind non-float'
1.51662,12.85,3.51,1.44,73.01,0.68,8.23,0.06,0.25,'build wind non-float'
1.51409,14.25,3.09,2.08,72.28,1.1,7.08,0,0,'build wind non-float'
1.51797,12.74,3.48,1.35,72.96,0.64,8.68,0,0,'build wind float'
1.51806,13,3.8,1.08,73.07,0.56,8.38,0,0.12,'build wind non-float'
1.51627,13,3.58,1.54,72.83,0.61,8.04,0,0,'build wind non-float'
1.5159,13.24,3.34,1.47,73.1,0.39,8.22,0,0,'build wind non-float'
1.51934,13.64,3.54,0.75,72.65,0.16,8.89,0.15,0.24,'vehic wind float'
1.51755,12.71,3.42,1.2,73.2,0.59,8.64,0,0,'build wind float'
1.51514,14.01,2.68,3.5,69.89,1.68,5.87,2.2,0,containers
1.51766,13.21,3.69,1.29,72.61,0.57,8.22,0,0,'build wind float'
1.51784,13.08,3.49,1.28,72.86,0.6,8.49,0,0,'build wind float'
1.52177,13.2,3.68,1.15,72.75,0.54,8.52,0,0,'build wind non-float'
1.51753,12.57,3.47,1.38,73.39,0.6,8.55,0,0.06,'build wind float'
1.51851,13.2,3.63,1.07,72.83,0.57,8.41,0.09,0.17,'build wind non-float'
1.51743,13.3,3.6,1.14,73.09,0.58,8.17,0,0,'build wind float'
1.51593,13.09,3.59,1.52,73.1,0.67,7.83,0,0,'build wind non-float'
1.5164,14.37,0,2.74,72.85,0,9.45,0.54,0,headlamps
1.51735,13.02,3.54,1.69,72.73,0.54,8.44,0,0.07,'build wind float'
1.52247,14.86,2.2,2.06,70.26,0.76,9.76,0,0,headlamps
1.52099,13.69,3.59,1.12,71.96,0.09,9.4,0,0,'build wind float'
1.51769,13.65,3.66,1.11,72.77,0.11,8.6,0,0,'vehic wind float'
1.51846,13.41,3.89,1.33,72.38,0.51,8.28,0,0,'build wind non-float'
1.51848,13.64,3.87,1.27,71.96,0.54,8.32,0,0.32,'build wind non-float'
1.51905,13.6,3.62,1.11,72.64,0.14,8.76,0,0,'build wind float'
1.51567,13.29,3.45,1.21,72.74,0.56,8.57,0,0,'build wind float'
1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0,0,'build wind float'
1.5232,13.72,3.72,0.51,71.75,0.09,10.06,0,0.16,'build wind float'
1.51556,13.87,0,2.54,73.23,0.14,9.41,0.81,0.01,headlamps
1.51926,13.2,3.33,1.28,72.36,0.6,9.14,0,0.11,'build wind float'
1.52211,14.19,3.78,0.91,71.36,0.23,9.14,0,0.37,'vehic wind float'
1.53125,10.73,0,2.1,69.81,0.58,13.3,3.15,0.28,'build wind non-float'
1.52152,13.05,3.65,0.87,72.32,0.19,9.85,0,0.17,'build wind float'
1.51829,14.46,2.24,1.62,72.38,0,9.26,0,0,tableware
1.51892,13.46,3.83,1.26,72.55,0.57,8.21,0,0.14,'build wind non-float'
1.51888,14.99,0.78,1.74,72.5,0,9.95,0,0,tableware
1.51829,13.24,3.9,1.41,72.33,0.55,8.31,0,0.1,'build wind non-float'
1.523,13.31,3.58,0.82,71.99,0.12,10.17,0,0.03,'build wind float'
1.51652,13.56,3.57,1.47,72.45,0.64,7.96,0,0,'build wind non-float'
1.51768,12.56,3.52,1.43,73.15,0.57,8.54,0,0,'build wind float'
1.51215,12.99,3.47,1.12,72.98,0.62,8.35,0,0.31,'build wind float'
1.51646,13.04,3.4,1.26,73.01,0.52,8.58,0,0,'vehic wind float'
1.51721,12.87,3.48,1.33,73.04,0.56,8.43,0,0,'build wind float'
1.51763,12.8,3.66,1.27,73.01,0.6,8.56,0,0,'build wind float'
1.51742,13.27,3.62,1.24,73.08,0.55,8.07,0,0,'build wind float'
1.52127,14.32,3.9,0.83,71.5,0,9.49,0,0,'vehic wind float'
1.51779,13.21,3.39,1.33,72.76,0.59,8.59,0,0,'build wind float'
1.52171,11.56,1.88,1.56,72.86,0.47,11.41,0,0,containers
1.518,13.71,3.93,1.54,71.81,0.54,8.21,0,0.15,'build wind non-float'
1.52777,12.64,0,0.67,72.02,0.06,14.4,0,0,'build wind non-float'
1.5175,12.82,3.55,1.49,72.75,0.54,8.52,0,0.19,'build wind float'
1.51764,12.98,3.54,1.21,73,0.65,8.53,0,0,'build wind float'
1.52177,13.75,1.01,1.36,72.19,0.33,11.14,0,0,'build wind non-float'
1.51645,14.94,0,1.87,73.11,0,8.67,1.38,0,headlamps
1.51786,12.73,3.43,1.19,72.95,0.62,8.76,0,0.3,'build wind float'
1.52152,13.12,3.58,0.9,72.2,0.23,9.82,0,0.16,'build wind float'
1.51937,13.79,2.41,1.19,72.76,0,9.77,0,0,tableware
1.51514,14.85,0,2.42,73.72,0,8.39,0.56,0,headlamps
1.52172,13.48,3.74,0.9,72.01,0.18,9.61,0,0.07,'build wind float'
1.51732,14.95,0,1.8,72.99,0,8.61,1.55,0,headlamps
1.5202,13.98,1.35,1.63,71.76,0.39,10.56,0,0.18,'build wind non-float'
1.51605,12.9,3.44,1.45,73.06,0.44,8.27,0,0,'build wind non-float'
1.51847,13.1,3.97,1.19,72.44,0.6,8.43,0,0,'build wind non-float'
1.51761,13.89,3.6,1.36,72.73,0.48,7.83,0,0,'build wind float'
1.51673,13.3,3.64,1.53,72.53,0.65,8.03,0,0.29,'build wind non-float'
1.52365,15.79,1.83,1.31,70.43,0.31,8.61,1.68,0,headlamps
1.51685,14.92,0,1.99,73.06,0,8.4,1.59,0,headlamps
1.51658,14.8,0,1.99,73.11,0,8.28,1.71,0,headlamps
1.51316,13.02,0,3.04,70.48,6.21,6.96,0,0,containers
1.51709,13,3.47,1.79,72.72,0.66,8.18,0,0,'build wind non-float'
1.51727,14.7,0,2.34,73.28,0,8.95,0.66,0,headlamps
1.51898,13.58,3.35,1.23,72.08,0.59,8.91,0,0,'build wind float'
1.51969,12.64,0,1.65,73.75,0.38,11.53,0,0,containers
1.5182,12.62,2.76,0.83,73.81,0.35,9.42,0,0.2,'build wind non-float'
1.51617,14.95,0,2.27,73.3,0,8.71,0.67,0,headlamps
1.51911,13.9,3.73,1.18,72.12,0.06,8.89,0,0,'build wind float'
1.51651,14.38,0,1.94,73.61,0,8.48,1.57,0,headlamps
1.51694,12.86,3.58,1.31,72.61,0.61,8.79,0,0,'vehic wind float'
1.52315,13.44,3.34,1.23,72.38,0.6,8.83,0,0,headlamps
1.52068,13.55,2.09,1.67,72.18,0.53,9.57,0.27,0.17,'build wind non-float'
1.51838,14.32,3.26,2.22,71.25,1.46,5.79,1.63,0,headlamps
1.51818,13.72,0,0.56,74.45,0,10.99,0,0,'build wind non-float'
1.51769,12.45,2.71,1.29,73.7,0.56,9.06,0,0.24,'build wind float'
1.5166,12.99,3.18,1.23,72.97,0.58,8.81,0,0.24,'build wind non-float'
1.51589,12.88,3.43,1.4,73.28,0.69,8.05,0,0.24,'build wind float'
1.5241,13.83,2.9,1.17,71.15,0.08,10.79,0,0,'build wind non-float'
1.52725,13.8,3.15,0.66,70.57,0.08,11.64,0,0,'build wind non-float'
1.52119,12.97,0.33,1.51,73.39,0.13,11.27,0,0.28,containers
1.51748,12.86,3.56,1.27,73.21,0.54,8.38,0,0.17,'build wind float'
1.51653,11.95,0,1.19,75.18,2.7,8.93,0,0,headlamps
1.51623,14.14,0,2.88,72.61,0.08,9.18,1.06,0,headlamps
1.52101,13.64,4.49,1.1,71.78,0.06,8.75,0,0,'build wind float'
1.51763,12.61,3.59,1.31,73.29,0.58,8.5,0,0,'build wind float'
1.51596,13.02,3.56,1.54,73.11,0.72,7.9,0,0,'build wind non-float'
1.51674,12.79,3.52,1.54,73.36,0.66,7.9,0,0,'build wind non-float'
1.52065,14.36,0,2.02,73.42,0,8.44,1.64,0,headlamps
1.51768,12.65,3.56,1.3,73.08,0.61,8.69,0,0.14,'build wind float'
1.52369,13.44,0,1.58,72.22,0.32,12.24,0,0,containers
1.51756,13.15,3.61,1.05,73.24,0.57,8.24,0,0,'build wind float'
1.51754,13.48,3.74,1.17,72.99,0.59,8.03,0,0,'build wind float'
1.51711,12.89,3.62,1.57,72.96,0.61,8.11,0,0,'build wind non-float'
1.5221,13.73,3.84,0.72,71.76,0.17,9.74,0,0,'build wind float'
1.51594,13.09,3.52,1.55,72.87,0.68,8.05,0,0.09,'build wind non-float'
1.51784,12.68,3.67,1.16,73.11,0.61,8.7,0,0,'build wind float'
1.51909,13.89,3.53,1.32,71.81,0.51,8.78,0.11,0,'build wind float'
1.51977,13.81,3.58,1.32,71.72,0.12,8.67,0.69,0,'build wind float'
1.51666,12.86,0,1.83,73.88,0.97,10.17,0,0,containers
1.51631,13.34,3.57,1.57,72.87,0.61,7.89,0,0,'build wind non-float'
1.51872,12.93,3.66,1.56,72.51,0.58,8.55,0,0.12,'build wind non-float'
1.51708,13.72,3.68,1.81,72.06,0.64,7.88,0,0,'build wind non-float'
1.52081,13.78,2.28,1.43,71.99,0.49,9.85,0,0.17,'build wind non-float'
1.51574,14.86,3.67,1.74,71.87,0.16,7.36,0,0.12,'build wind non-float'
1.51813,13.43,3.98,1.18,72.49,0.58,8.15,0,0,'build wind non-float'
1.51131,13.69,3.2,1.81,72.81,1.76,5.43,1.19,0,headlamps
1.52227,14.17,3.81,0.78,71.35,0,9.69,0,0,'build wind float'
1.52614,13.7,0,1.36,71.24,0.19,13.44,0,0.1,'build wind non-float'
1.51811,13.33,3.85,1.25,72.78,0.52,8.12,0,0,'build wind non-float'
1.51655,13.41,3.39,1.28,72.64,0.52,8.65,0,0,'vehic wind float'
1.51751,12.81,3.57,1.35,73.02,0.62,8.59,0,0,'build wind float'
1.51508,15.15,0,2.25,73.5,0,8.34,0.63,0,headlamps
1.51915,12.73,1.85,1.86,72.69,0.6,10.09,0,0,containers
1.51966,14.77,3.75,0.29,72.02,0.03,9,0,0,'build wind float'
1.51844,13.25,3.76,1.32,72.4,0.58,8.42,0,0,'build wind non-float'
1.52664,11.23,0,0.77,73.21,0,14.68,0,0,'build wind non-float'
1.52172,13.51,3.86,0.88,71.79,0.23,9.54,0,0.11,'build wind float'
1.51602,14.85,0,2.38,73.28,0,8.76,0.64,0.09,headlamps
1.51321,13,0,3.02,70.7,6.21,6.93,0,0,containers
1.52739,11.02,0,0.75,73.08,0,14.96,0,0,'build wind non-float'
1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0,0,'build wind float'
1.51747,12.84,3.5,1.14,73.27,0.56,8.55,0,0,'build wind float'
1.51839,12.85,3.67,1.24,72.57,0.62,8.68,0,0.35,'build wind non-float'
1.51646,13.41,3.55,1.25,72.81,0.68,8.1,0,0,'build wind non-float'
1.51609,15.01,0,2.51,73.05,0.05,8.83,0.53,0,headlamps
1.51667,12.94,3.61,1.26,72.75,0.56,8.6,0,0,'build wind non-float'
1.51588,13.12,3.41,1.58,73.26,0.07,8.39,0,0.19,'build wind non-float'
1.52667,13.99,3.7,0.71,71.57,0.02,9.82,0,0.1,'build wind float'
1.51831,14.39,0,1.82,72.86,1.41,6.47,2.88,0,headlamps
1.51918,14.04,3.58,1.37,72.08,0.56,8.3,0,0,'build wind float'
1.51613,13.88,1.78,1.79,73.1,0,8.67,0.76,0,headlamps
1.52196,14.36,3.85,0.89,71.36,0.15,9.15,0,0,'build wind float'
1.51824,12.87,3.48,1.29,72.95,0.6,8.43,0,0,'build wind float'
1.52151,11.03,1.71,1.56,73.44,0.58,11.62,0,0,containers
1.51969,14.56,0,0.56,73.48,0,11.22,0,0,tableware
1.51618,13.01,3.5,1.48,72.89,0.6,8.12,0,0,'build wind non-float'
1.51645,13.4,3.49,1.52,72.65,0.67,8.08,0,0.1,'build wind non-float'
1.51796,13.5,3.36,1.63,71.94,0.57,8.81,0,0.09,'vehic wind float'
1.52222,14.43,0,1,72.67,0.1,11.52,0,0.08,'build wind non-float'
1.51783,12.69,3.54,1.34,72.95,0.57,8.75,0,0,'build wind float'
1.51711,14.23,0,2.08,73.36,0,8.62,1.67,0,headlamps
1.51736,12.78,3.62,1.29,72.79,0.59,8.7,0,0,'build wind float'
1.51808,13.43,2.87,1.19,72.84,0.55,9.03,0,0,'build wind float'
1.5167,13.24,3.57,1.38,72.7,0.56,8.44,0,0.1,'vehic wind float'
1.52043,13.38,0,1.4,72.25,0.33,12.5,0,0,containers
1.519,13.49,3.48,1.35,71.95,0.55,9,0,0,'build wind float'
1.51778,13.21,2.81,1.29,72.98,0.51,9.02,0,0.09,'build wind float'
1.51905,14,2.39,1.56,72.37,0,9.57,0,0,tableware
1.51531,14.38,0,2.66,73.1,0.04,9.08,0.64,0,headlamps
1.51916,14.15,0,2.09,72.74,0,10.88,0,0,tableware
1.51841,13.02,3.62,1.06,72.34,0.64,9.13,0,0.15,'build wind non-float'
1.5159,13.02,3.58,1.51,73.12,0.69,7.96,0,0,'build wind non-float'
1.51593,13.25,3.45,1.43,73.17,0.61,7.86,0,0,'build wind non-float'
1.5164,12.55,3.48,1.87,73.23,0.63,8.08,0,0.09,'build wind non-float'
1.51663,12.93,3.54,1.62,72.96,0.64,8.03,0,0.21,'build wind non-float'
1.5169,13.33,3.54,1.61,72.54,0.68,8.11,0,0,'build wind non-float'
1.51869,13.19,3.37,1.18,72.72,0.57,8.83,0,0.16,'build wind float'
1.51776,13.53,3.41,1.52,72.04,0.58,8.79,0,0,'vehic wind float'
1.51775,12.85,3.48,1.23,72.97,0.61,8.56,0.09,0.22,'build wind float'
1.5186,13.36,3.43,1.43,72.26,0.51,8.6,0,0,'build wind non-float'
1.5172,13.38,3.5,1.15,72.85,0.5,8.43,0,0,'build wind float'
1.51623,14.2,0,2.79,73.46,0.04,9.04,0.4,0.09,headlamps
1.51618,13.53,3.55,1.54,72.99,0.39,7.78,0,0,'build wind float'
1.51761,12.81,3.54,1.23,73.24,0.58,8.39,0,0,'build wind float'
1.5161,13.42,3.4,1.22,72.69,0.59,8.32,0,0,'vehic wind float'
1.51592,12.86,3.52,2.12,72.66,0.69,7.97,0,0,'build wind non-float'
1.51613,13.92,3.52,1.25,72.88,0.37,7.94,0,0.14,'build wind non-float'
1.51689,12.67,2.88,1.71,73.21,0.73,8.54,0,0,'build wind non-float'
1.51852,14.09,2.19,1.66,72.67,0,9.32,0,0,tableware

5
data/iris.net Normal file
View File

@ -0,0 +1,5 @@
class sepallength
class sepalwidth
class petallength
class petalwidth
petalwidth petallength

10177
data/kdd_JapaneseVowels.arff Executable file

File diff suppressed because it is too large Load Diff

20191
data/letter.arff Executable file

File diff suppressed because it is too large Load Diff

399
data/liver-disorders.arff Executable file
View File

@ -0,0 +1,399 @@
% 1. Title: BUPA liver disorders
%
% 2. Source information:
% -- Creators: BUPA Medical Research Ltd.
% -- Donor: Richard S. Forsyth
% 8 Grosvenor Avenue
% Mapperley Park
% Nottingham NG3 5DX
% 0602-621676
% -- Date: 5/15/1990
%
% 3. Past usage:
% -- None known other than what is shown in the PC/BEAGLE User's Guide
% (written by Richard S. Forsyth).
%
% 4. Relevant information:
% -- The first 5 variables are all blood tests which are thought
% to be sensitive to liver disorders that might arise from
% excessive alcohol consumption. Each line in the bupa.data file
% constitutes the record of a single male individual.
% -- It appears that drinks>5 is some sort of a selector on this database.
% See the PC/BEAGLE User's Guide for more information.
%
% 5. Number of instances: 345
%
% 6. Number of attributes: 7 overall
%
% 7. Attribute information:
% 1. mcv mean corpuscular volume
% 2. alkphos alkaline phosphotase
% 3. sgpt alamine aminotransferase
% 4. sgot aspartate aminotransferase
% 5. gammagt gamma-glutamyl transpeptidase
% 6. drinks number of half-pint equivalents of alcoholic beverages
% drunk per day
% 7. selector field used to split data into two sets
%
% 8. Missing values: none%
% Information about the dataset
% CLASSTYPE: nominal
% CLASSINDEX: last
%
@relation liver-disorders
@attribute mcv INTEGER
@attribute alkphos INTEGER
@attribute sgpt INTEGER
@attribute sgot INTEGER
@attribute gammagt INTEGER
@attribute drinks REAL
@attribute selector {1,2}
@data
85,92,45,27,31,0.0,1
85,64,59,32,23,0.0,2
86,54,33,16,54,0.0,2
91,78,34,24,36,0.0,2
87,70,12,28,10,0.0,2
98,55,13,17,17,0.0,2
88,62,20,17,9,0.5,1
88,67,21,11,11,0.5,1
92,54,22,20,7,0.5,1
90,60,25,19,5,0.5,1
89,52,13,24,15,0.5,1
82,62,17,17,15,0.5,1
90,64,61,32,13,0.5,1
86,77,25,19,18,0.5,1
96,67,29,20,11,0.5,1
91,78,20,31,18,0.5,1
89,67,23,16,10,0.5,1
89,79,17,17,16,0.5,1
91,107,20,20,56,0.5,1
94,116,11,33,11,0.5,1
92,59,35,13,19,0.5,1
93,23,35,20,20,0.5,1
90,60,23,27,5,0.5,1
96,68,18,19,19,0.5,1
84,80,47,33,97,0.5,1
92,70,24,13,26,0.5,1
90,47,28,15,18,0.5,1
88,66,20,21,10,0.5,1
91,102,17,13,19,0.5,1
87,41,31,19,16,0.5,1
86,79,28,16,17,0.5,1
91,57,31,23,42,0.5,1
93,77,32,18,29,0.5,1
88,96,28,21,40,0.5,1
94,65,22,18,11,0.5,1
91,72,155,68,82,0.5,2
85,54,47,33,22,0.5,2
79,39,14,19,9,0.5,2
85,85,25,26,30,0.5,2
89,63,24,20,38,0.5,2
84,92,68,37,44,0.5,2
89,68,26,39,42,0.5,2
89,101,18,25,13,0.5,2
86,84,18,14,16,0.5,2
85,65,25,14,18,0.5,2
88,61,19,21,13,0.5,2
92,56,14,16,10,0.5,2
95,50,29,25,50,0.5,2
91,75,24,22,11,0.5,2
83,40,29,25,38,0.5,2
89,74,19,23,16,0.5,2
85,64,24,22,11,0.5,2
92,57,64,36,90,0.5,2
94,48,11,23,43,0.5,2
87,52,21,19,30,0.5,2
85,65,23,29,15,0.5,2
84,82,21,21,19,0.5,2
88,49,20,22,19,0.5,2
96,67,26,26,36,0.5,2
90,63,24,24,24,0.5,2
90,45,33,34,27,0.5,2
90,72,14,15,18,0.5,2
91,55,4,8,13,0.5,2
91,52,15,22,11,0.5,2
87,71,32,19,27,1.0,1
89,77,26,20,19,1.0,1
89,67,5,17,14,1.0,2
85,51,26,24,23,1.0,2
103,75,19,30,13,1.0,2
90,63,16,21,14,1.0,2
90,63,29,23,57,2.0,1
90,67,35,19,35,2.0,1
87,66,27,22,9,2.0,1
90,73,34,21,22,2.0,1
86,54,20,21,16,2.0,1
90,80,19,14,42,2.0,1
87,90,43,28,156,2.0,2
96,72,28,19,30,2.0,2
91,55,9,25,16,2.0,2
95,78,27,25,30,2.0,2
92,101,34,30,64,2.0,2
89,51,41,22,48,2.0,2
91,99,42,33,16,2.0,2
94,58,21,18,26,2.0,2
92,60,30,27,297,2.0,2
94,58,21,18,26,2.0,2
88,47,33,26,29,2.0,2
92,65,17,25,9,2.0,2
92,79,22,20,11,3.0,1
84,83,20,25,7,3.0,1
88,68,27,21,26,3.0,1
86,48,20,20,6,3.0,1
99,69,45,32,30,3.0,1
88,66,23,12,15,3.0,1
89,62,42,30,20,3.0,1
90,51,23,17,27,3.0,1
81,61,32,37,53,3.0,2
89,89,23,18,104,3.0,2
89,65,26,18,36,3.0,2
92,75,26,26,24,3.0,2
85,59,25,20,25,3.0,2
92,61,18,13,81,3.0,2
89,63,22,27,10,4.0,1
90,84,18,23,13,4.0,1
88,95,25,19,14,4.0,1
89,35,27,29,17,4.0,1
91,80,37,23,27,4.0,1
91,109,33,15,18,4.0,1
91,65,17,5,7,4.0,1
88,107,29,20,50,4.0,2
87,76,22,55,9,4.0,2
87,86,28,23,21,4.0,2
87,42,26,23,17,4.0,2
88,80,24,25,17,4.0,2
90,96,34,49,169,4.0,2
86,67,11,15,8,4.0,2
92,40,19,20,21,4.0,2
85,60,17,21,14,4.0,2
89,90,15,17,25,4.0,2
91,57,15,16,16,4.0,2
96,55,48,39,42,4.0,2
79,101,17,27,23,4.0,2
90,134,14,20,14,4.0,2
89,76,14,21,24,4.0,2
88,93,29,27,31,4.0,2
90,67,10,16,16,4.0,2
92,73,24,21,48,4.0,2
91,55,28,28,82,4.0,2
83,45,19,21,13,4.0,2
90,74,19,14,22,4.0,2
92,66,21,16,33,5.0,1
93,63,26,18,18,5.0,1
86,78,47,39,107,5.0,2
97,44,113,45,150,5.0,2
87,59,15,19,12,5.0,2
86,44,21,11,15,5.0,2
87,64,16,20,24,5.0,2
92,57,21,23,22,5.0,2
90,70,25,23,112,5.0,2
99,59,17,19,11,5.0,2
92,80,10,26,20,6.0,1
95,60,26,22,28,6.0,1
91,63,25,26,15,6.0,1
92,62,37,21,36,6.0,1
95,50,13,14,15,6.0,1
90,76,37,19,50,6.0,1
96,70,70,26,36,6.0,1
95,62,64,42,76,6.0,1
92,62,20,23,20,6.0,1
91,63,25,26,15,6.0,1
82,56,67,38,92,6.0,2
92,82,27,24,37,6.0,2
90,63,12,26,21,6.0,2
88,37,9,15,16,6.0,2
100,60,29,23,76,6.0,2
98,43,35,23,69,6.0,2
91,74,87,50,67,6.0,2
92,87,57,25,44,6.0,2
93,99,36,34,48,6.0,2
90,72,17,19,19,6.0,2
97,93,21,20,68,6.0,2
93,50,18,25,17,6.0,2
90,57,20,26,33,6.0,2
92,76,31,28,41,6.0,2
88,55,19,17,14,6.0,2
89,63,24,29,29,6.0,2
92,79,70,32,84,7.0,1
92,93,58,35,120,7.0,1
93,84,58,47,62,7.0,2
97,71,29,22,52,8.0,1
84,99,33,19,26,8.0,1
96,44,42,23,73,8.0,1
90,62,22,21,21,8.0,1
92,94,18,17,6,8.0,1
90,67,77,39,114,8.0,1
97,71,29,22,52,8.0,1
91,69,25,25,66,8.0,2
93,59,17,20,14,8.0,2
92,95,85,48,200,8.0,2
90,50,26,22,53,8.0,2
91,62,59,47,60,8.0,2
92,93,22,28,123,9.0,1
92,77,86,41,31,10.0,1
86,66,22,24,26,10.0,2
98,57,31,34,73,10.0,2
95,80,50,64,55,10.0,2
92,108,53,33,94,12.0,2
97,92,22,28,49,12.0,2
93,77,39,37,108,16.0,1
94,83,81,34,201,20.0,1
87,75,25,21,14,0.0,1
88,56,23,18,12,0.0,1
84,97,41,20,32,0.0,2
94,91,27,20,15,0.5,1
97,62,17,13,5,0.5,1
92,85,25,20,12,0.5,1
82,48,27,15,12,0.5,1
88,74,31,25,15,0.5,1
95,77,30,14,21,0.5,1
88,94,26,18,8,0.5,1
91,70,19,19,22,0.5,1
83,54,27,15,12,0.5,1
91,105,40,26,56,0.5,1
86,79,37,28,14,0.5,1
91,96,35,22,135,0.5,1
89,82,23,14,35,0.5,1
90,73,24,23,11,0.5,1
90,87,19,25,19,0.5,1
89,82,33,32,18,0.5,1
85,79,17,8,9,0.5,1
85,119,30,26,17,0.5,1
78,69,24,18,31,0.5,1
88,107,34,21,27,0.5,1
89,115,17,27,7,0.5,1
92,67,23,15,12,0.5,1
89,101,27,34,14,0.5,1
91,84,11,12,10,0.5,1
94,101,41,20,53,0.5,2
88,46,29,22,18,0.5,2
88,122,35,29,42,0.5,2
84,88,28,25,35,0.5,2
90,79,18,15,24,0.5,2
87,69,22,26,11,0.5,2
65,63,19,20,14,0.5,2
90,64,12,17,14,0.5,2
85,58,18,24,16,0.5,2
88,81,41,27,36,0.5,2
86,78,52,29,62,0.5,2
82,74,38,28,48,0.5,2
86,58,36,27,59,0.5,2
94,56,30,18,27,0.5,2
87,57,30,30,22,0.5,2
98,74,148,75,159,0.5,2
94,75,20,25,38,0.5,2
83,68,17,20,71,0.5,2
93,56,25,21,33,0.5,2
101,65,18,21,22,0.5,2
92,65,25,20,31,0.5,2
92,58,14,16,13,0.5,2
86,58,16,23,23,0.5,2
85,62,15,13,22,0.5,2
86,57,13,20,13,0.5,2
86,54,26,30,13,0.5,2
81,41,33,27,34,1.0,1
91,67,32,26,13,1.0,1
91,80,21,19,14,1.0,1
92,60,23,15,19,1.0,1
91,60,32,14,8,1.0,1
93,65,28,22,10,1.0,1
90,63,45,24,85,1.0,2
87,92,21,22,37,1.0,2
83,78,31,19,115,1.0,2
95,62,24,23,14,1.0,2
93,59,41,30,48,1.0,2
84,82,43,32,38,2.0,1
87,71,33,20,22,2.0,1
86,44,24,15,18,2.0,1
86,66,28,24,21,2.0,1
88,58,31,17,17,2.0,1
90,61,28,29,31,2.0,1
88,69,70,24,64,2.0,1
93,87,18,17,26,2.0,1
98,58,33,21,28,2.0,1
91,44,18,18,23,2.0,2
87,75,37,19,70,2.0,2
94,91,30,26,25,2.0,2
88,85,14,15,10,2.0,2
89,109,26,25,27,2.0,2
87,59,37,27,34,2.0,2
93,58,20,23,18,2.0,2
88,57,9,15,16,2.0,2
94,65,38,27,17,3.0,1
91,71,12,22,11,3.0,1
90,55,20,20,16,3.0,1
91,64,21,17,26,3.0,2
88,47,35,26,33,3.0,2
82,72,31,20,84,3.0,2
85,58,83,49,51,3.0,2
91,54,25,22,35,4.0,1
98,50,27,25,53,4.0,2
86,62,29,21,26,4.0,2
89,48,32,22,14,4.0,2
82,68,20,22,9,4.0,2
83,70,17,19,23,4.0,2
96,70,21,26,21,4.0,2
94,117,77,56,52,4.0,2
93,45,11,14,21,4.0,2
93,49,27,21,29,4.0,2
84,73,46,32,39,4.0,2
91,63,17,17,46,4.0,2
90,57,31,18,37,4.0,2
87,45,19,13,16,4.0,2
91,68,14,20,19,4.0,2
86,55,29,35,108,4.0,2
91,86,52,47,52,4.0,2
88,46,15,33,55,4.0,2
85,52,22,23,34,4.0,2
89,72,33,27,55,4.0,2
95,59,23,18,19,4.0,2
94,43,154,82,121,4.0,2
96,56,38,26,23,5.0,2
90,52,10,17,12,5.0,2
94,45,20,16,12,5.0,2
99,42,14,21,49,5.0,2
93,102,47,23,37,5.0,2
94,71,25,26,31,5.0,2
92,73,33,34,115,5.0,2
87,54,41,29,23,6.0,1
92,67,15,14,14,6.0,1
98,101,31,26,32,6.0,1
92,53,51,33,92,6.0,1
97,94,43,43,82,6.0,1
93,43,11,16,54,6.0,1
93,68,24,18,19,6.0,1
95,36,38,19,15,6.0,1
99,86,58,42,203,6.0,1
98,66,103,57,114,6.0,1
92,80,10,26,20,6.0,1
96,74,27,25,43,6.0,2
95,93,21,27,47,6.0,2
86,109,16,22,28,6.0,2
91,46,30,24,39,7.0,2
102,82,34,78,203,7.0,2
85,50,12,18,14,7.0,2
91,57,33,23,12,8.0,1
91,52,76,32,24,8.0,1
93,70,46,30,33,8.0,1
87,55,36,19,25,8.0,1
98,123,28,24,31,8.0,1
82,55,18,23,44,8.0,2
95,73,20,25,225,8.0,2
97,80,17,20,53,8.0,2
100,83,25,24,28,8.0,2
88,91,56,35,126,9.0,2
91,138,45,21,48,10.0,1
92,41,37,22,37,10.0,1
86,123,20,25,23,10.0,2
91,93,35,34,37,10.0,2
87,87,15,23,11,10.0,2
87,56,52,43,55,10.0,2
99,75,26,24,41,12.0,1
96,69,53,43,203,12.0,2
98,77,55,35,89,15.0,1
91,68,27,26,14,16.0,1
98,99,57,45,65,20.0,1

2306
data/mfeat-factors.arff Executable file

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,7 @@
#include <iostream>
#include <string>
#include <torch/torch.h>
#include <getopt.h>
#include "ArffFiles.h"
#include "Network.h"
#include "CPPFImdlp.h"
@ -8,41 +9,89 @@
using namespace std;
vector<mdlp::labels_t> discretize(vector<mdlp::samples_t>& X, mdlp::labels_t& y)
const string PATH = "data/";
/* print a description of all supported options */
void usage(const char* path)
{
/* take only the last portion of the path */
const char* basename = strrchr(path, '/');
basename = basename ? basename + 1 : path;
cout << "usage: " << basename << "[OPTION]" << endl;
cout << " -h, --help\t\t Print this help and exit." << endl;
cout
<< " -f, --file[=FILENAME]\t {diabetes, glass, iris, kdd_JapaneseVowels, letter, liver-disorders, mfeat-factors}."
<< endl;
cout << " -p, --path[=FILENAME]\t folder where the data files are located, default " << PATH << endl;
cout << " -n, --net=[FILENAME]\t default=file parameter value" << endl;
}
tuple<string, string, string> parse_arguments(int argc, char** argv)
{
string file_name;
string network_name;
string path = PATH;
const vector<struct option> long_options = {
{"help", no_argument, nullptr, 'h'},
{"file", required_argument, nullptr, 'f'},
{"path", required_argument, nullptr, 'p'},
{"net", required_argument, nullptr, 'n'},
{nullptr, no_argument, nullptr, 0}
};
while (true) {
const auto c = getopt_long(argc, argv, "hf:p:n:", long_options.data(), nullptr);
if (c == -1)
break;
switch (c) {
case 'h':
usage(argv[0]);
exit(0);
case 'f':
file_name = string(optarg);
break;
case 'n':
network_name = string(optarg);
break;
case 'p':
path = optarg;
if (path.back() != '/')
path += '/';
break;
case '?':
usage(argv[0]);
exit(1);
default:
abort();
}
}
if (file_name.empty()) {
usage(argv[0]);
exit(1);
}
if (network_name.empty()) {
network_name = file_name;
}
return make_tuple(file_name, path, network_name);
}
pair<vector<mdlp::labels_t>, map<string, int>> discretize(vector<mdlp::samples_t>& X, mdlp::labels_t& y, vector<string> features)
{
vector<mdlp::labels_t>Xd;
map<string, int> maxes;
auto fimdlp = mdlp::CPPFImdlp();
for (int i = 0; i < X.size(); i++) {
fimdlp.fit(X[i], y);
mdlp::labels_t& xd = fimdlp.transform(X[i]);
cout << "X[" << i << "]: ";
auto mm = minmax_element(xd.begin(), xd.end());
cout << *mm.first << " " << *mm.second << endl;
maxes[features[i]] = *max_element(xd.begin(), xd.end()) + 1;
Xd.push_back(xd);
}
return Xd;
return { Xd, maxes };
}
int main()
void showNodesInfo(bayesnet::Network& network, string className)
{
auto handler = ArffFiles();
handler.load("data/iris.arff");
// Get Dataset X, y
vector<mdlp::samples_t>& X = handler.getX();
mdlp::labels_t& y = handler.getY();
// Get className & Features
auto className = handler.getClassName();
vector<string> features;
for (auto feature : handler.getAttributes()) {
features.push_back(feature.first);
}
// Discretize Dataset
vector<mdlp::labels_t> Xd = discretize(X, y);
// Build Network
auto network = bayesnet::Network();
network.fit(Xd, y, features, className);
cout << "Hello, Bayesian Networks!" << endl;
cout << "Nodes:" << endl;
for (auto [name, item] : network.getNodes()) {
cout << "*" << item->getName() << " -> " << item->getNumStates() << endl;
@ -58,9 +107,11 @@ int main()
cout << "Root: " << network.getRoot()->getName() << endl;
network.setRoot(className);
cout << "Now Root should be class: " << network.getRoot()->getName() << endl;
}
void showCPDS(bayesnet::Network& network)
{
cout << "CPDs:" << endl;
auto nodes = network.getNodes();
auto classNode = nodes[className];
for (auto it = nodes.begin(); it != nodes.end(); it++) {
cout << "* Name: " << it->first << " " << it->second->getName() << " -> " << it->second->getNumStates() << endl;
cout << "Parents: ";
@ -71,6 +122,100 @@ int main()
auto cpd = it->second->getCPT();
cout << cpd << endl;
}
}
bool file_exists(const std::string& name)
{
if (FILE* file = fopen(name.c_str(), "r")) {
fclose(file);
return true;
} else {
return false;
}
}
pair<string, string> get_options(int argc, char** argv)
{
map<string, bool> datasets = {
{"diabetes", true},
{"glass", true},
{"iris", true},
{"kdd_JapaneseVowels", false},
{"letter", true},
{"liver-disorders", true},
{"mfeat-factors", true},
};
string file_name;
string path;
string network_name;
tie(file_name, path, network_name) = parse_arguments(argc, argv);
if (datasets.find(file_name) == datasets.end() && file_name != "all") {
cout << "Invalid file name: " << file_name << endl;
usage(argv[0]);
exit(1);
}
file_name = path + file_name + ".arff";
if (!file_exists(file_name)) {
cout << "Data File " << file_name << " does not exist" << endl;
usage(argv[0]);
exit(1);
}
network_name = path + network_name + ".net";
if (!file_exists(network_name)) {
cout << "Network File " << network_name << " does not exist" << endl;
usage(argv[0]);
exit(1);
}
return { file_name, network_name };
}
void build_network(bayesnet::Network& network, string network_name, map<string, int> maxes)
{
ifstream file(network_name);
string line;
while (getline(file, line)) {
istringstream iss(line);
string parent, child;
if (!(iss >> parent >> child)) {
break;
}
network.addNode(parent, maxes[parent]);
network.addNode(child, maxes[child]);
network.addEdge(parent, child);
}
file.close();
}
int main(int argc, char** argv)
{
string file_name, network_name;
tie(file_name, network_name) = get_options(argc, argv);
auto handler = ArffFiles();
handler.load(file_name);
// Get Dataset X, y
vector<mdlp::samples_t>& X = handler.getX();
mdlp::labels_t& y = handler.getY();
// Get className & Features
auto className = handler.getClassName();
vector<string> features;
for (auto feature : handler.getAttributes()) {
features.push_back(feature.first);
}
// Discretize Dataset
vector<mdlp::labels_t> Xd;
map<string, int> maxes;
tie(Xd, maxes) = discretize(X, y, features);
maxes[className] = *max_element(y.begin(), y.end()) + 1;
// Build Network
auto network = bayesnet::Network();
build_network(network, network_name, maxes);
network.fit(Xd, y, features, className);
cout << "Hello, Bayesian Networks!" << endl;
showNodesInfo(network, className);
// showCPDS(network);
cout << "Score: " << network.score(Xd, y) << endl;
cout << "PyTorch version: " << TORCH_VERSION << endl;
return 0;
}

View File

@ -11,7 +11,9 @@ namespace bayesnet {
void Network::addNode(string name, int numStates)
{
if (nodes.find(name) != nodes.end()) {
throw invalid_argument("Node " + name + " already exists");
// if node exists update its number of states
nodes[name]->setNumStates(numStates);
return;
}
nodes[name] = new Node(name, numStates);
if (root == nullptr) {
@ -63,6 +65,7 @@ namespace bayesnet {
{
// remove problematic edge
nodes[parent]->removeChild(nodes[child]);
nodes[child]->removeParent(nodes[parent]);
throw invalid_argument("Adding this edge forms a cycle in the graph.");
}
@ -72,20 +75,6 @@ namespace bayesnet {
{
return nodes;
}
void Network::buildNetwork()
{
// Add features as nodes to the network
for (int i = 0; i < features.size(); ++i) {
addNode(features[i], *max_element(dataset[features[i]].begin(), dataset[features[i]].end()) + 1);
}
// Add class as node to the network
addNode(className, *max_element(dataset[className].begin(), dataset[className].end()) + 1);
// Add edges from class to features => naive Bayes
for (auto feature : features) {
addEdge(className, feature);
}
addEdge("petalwidth", "petallength");
}
void Network::fit(const vector<vector<int>>& dataset, const vector<int>& labels, const vector<string>& featureNames, const string& className)
{
features = featureNames;
@ -95,7 +84,6 @@ namespace bayesnet {
this->dataset[featureNames[i]] = dataset[i];
}
this->dataset[className] = labels;
buildNetwork();
estimateParameters();
}
@ -128,4 +116,82 @@ namespace bayesnet {
node->setCPT(cpt);
}
}
pair<int, double> Network::predict_sample(const vector<int>& sample)
{
// Ensure the sample size is equal to the number of features
if (sample.size() != features.size()) {
throw std::invalid_argument("Sample size (" + to_string(sample.size()) +
") does not match the number of features (" + to_string(features.size()) + ")");
}
// Map the feature values to their corresponding nodes
map<string, int> featureValues;
for (int i = 0; i < features.size(); ++i) {
featureValues[features[i]] = sample[i];
}
// For each possible class, calculate the posterior probability
Node* classNode = nodes[className];
int numClassStates = classNode->getNumStates();
std::vector<double> classProbabilities(numClassStates, 0.0);
for (int classState = 0; classState < numClassStates; ++classState) {
// Start with the prior probability of the class
classProbabilities[classState] = classNode->getCPT()[classState].item<double>();
// Multiply by the likelihood of each feature given the class
for (auto& pair : nodes) {
if (pair.first != className) {
Node* node = pair.second;
int featureValue = featureValues[pair.first];
// We use the class as the parent state to index into the CPT
classProbabilities[classState] *= node->getCPT()[classState][featureValue].item<double>();
}
}
}
// Find the class with the maximum posterior probability
auto maxElem = std::max_element(classProbabilities.begin(), classProbabilities.end());
int predictedClass = std::distance(classProbabilities.begin(), maxElem);
double maxProbability = *maxElem;
return std::make_pair(predictedClass, maxProbability);
}
vector<int> Network::predict(const vector<vector<int>>& samples)
{
vector<int> predictions;
vector<int> sample;
for (int row = 0; row < samples[0].size(); ++row) {
sample.clear();
for (int col = 0; col < samples.size(); ++col) {
sample.push_back(samples[col][row]);
}
predictions.push_back(predict_sample(sample).first);
}
return predictions;
}
vector<pair<int, double>> Network::predict_proba(const vector<vector<int>>& samples)
{
vector<pair<int, double>> predictions;
vector<int> sample;
for (int row = 0; row < samples[0].size(); ++row) {
sample.clear();
for (int col = 0; col < samples.size(); ++col) {
sample.push_back(samples[col][row]);
}
predictions.push_back(predict_sample(sample));
}
return predictions;
}
double Network::score(const vector<vector<int>>& samples, const vector<int>& labels)
{
vector<int> y_pred = predict(samples);
int correct = 0;
for (int i = 0; i < y_pred.size(); ++i) {
if (y_pred[i] == labels[i]) {
correct++;
}
}
return (double)correct / y_pred.size();
}
}

View File

@ -15,6 +15,7 @@ namespace bayesnet {
string className;
int laplaceSmoothing;
bool isCyclic(const std::string&, std::unordered_set<std::string>&, std::unordered_set<std::string>&);
pair<int, double> predict_sample(const vector<int>&);
public:
Network();
Network(int);
@ -24,9 +25,11 @@ namespace bayesnet {
map<string, Node*>& getNodes();
void fit(const vector<vector<int>>&, const vector<int>&, const vector<string>&, const string&);
void estimateParameters();
void buildNetwork();
void setRoot(string);
Node* getRoot();
vector<int> predict(const vector<vector<int>>&);
vector<pair<int, double>> predict_proba(const vector<vector<int>>&);
double score(const vector<vector<int>>&, const vector<int>&);
};
}
#endif

View File

@ -41,6 +41,10 @@ namespace bayesnet {
{
return numStates;
}
void Node::setNumStates(int numStates)
{
this->numStates = numStates;
}
torch::Tensor& Node::getCPT()
{
return cpt;

View File

@ -27,6 +27,7 @@ namespace bayesnet {
torch::Tensor& getCPT();
void setCPT(const torch::Tensor&);
int getNumStates() const;
void setNumStates(int);
int getId() const { return id; }
};
}