First commit
This commit is contained in:
45
libsvm-3.36/.github/workflows/wheel.yml
vendored
Normal file
45
libsvm-3.36/.github/workflows/wheel.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: Build wheels
|
||||
|
||||
on:
|
||||
# on new tag
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
|
||||
# manually trigger
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build_wheels:
|
||||
name: Build wheels on ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-2022, macos-13]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Set MacOS compiler
|
||||
if: runner.os == 'macOS'
|
||||
run: |
|
||||
brew install gcc@13;
|
||||
echo "CXX=gcc-13" >> $GITHUB_ENV
|
||||
|
||||
- name: Build wheels
|
||||
uses: pypa/cibuildwheel@v2.10.2
|
||||
env:
|
||||
# don't build for PyPython and windows 32-bit
|
||||
CIBW_SKIP: pp* *win32*
|
||||
# force compiler on macOS
|
||||
CXX: ${{ env.CXX }}
|
||||
CC: ${{ env.CXX }}
|
||||
with:
|
||||
package-dir: ./python
|
||||
output-dir: ./python/wheelhouse
|
||||
|
||||
- name: Upload a Build Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheels-${{ matrix.os }}
|
||||
path: ./python/wheelhouse
|
31
libsvm-3.36/COPYRIGHT
Normal file
31
libsvm-3.36/COPYRIGHT
Normal file
@@ -0,0 +1,31 @@
|
||||
|
||||
Copyright (c) 2000-2023 Chih-Chung Chang and Chih-Jen Lin
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither name of copyright holders nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
|
||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
2256
libsvm-3.36/FAQ.html
Normal file
2256
libsvm-3.36/FAQ.html
Normal file
File diff suppressed because it is too large
Load Diff
28
libsvm-3.36/Makefile
Normal file
28
libsvm-3.36/Makefile
Normal file
@@ -0,0 +1,28 @@
|
||||
CXX ?= g++
|
||||
CFLAGS = -Wall -Wconversion -O3 -fPIC
|
||||
SHVER = 4
|
||||
OS = $(shell uname)
|
||||
ifeq ($(OS),Darwin)
|
||||
SHARED_LIB_FLAG = -dynamiclib -Wl,-install_name,libsvm.so.$(SHVER)
|
||||
else
|
||||
SHARED_LIB_FLAG = -shared -Wl,-soname,libsvm.so.$(SHVER)
|
||||
endif
|
||||
|
||||
# Uncomment the following lines to enable parallelization with OpenMP
|
||||
# CFLAGS += -fopenmp
|
||||
# SHARED_LIB_FLAG += -fopenmp
|
||||
|
||||
all: svm-train svm-predict svm-scale
|
||||
|
||||
lib: svm.o
|
||||
$(CXX) $(SHARED_LIB_FLAG) svm.o -o libsvm.so.$(SHVER)
|
||||
svm-predict: svm-predict.c svm.o
|
||||
$(CXX) $(CFLAGS) svm-predict.c svm.o -o svm-predict -lm
|
||||
svm-train: svm-train.c svm.o
|
||||
$(CXX) $(CFLAGS) svm-train.c svm.o -o svm-train -lm
|
||||
svm-scale: svm-scale.c
|
||||
$(CXX) $(CFLAGS) svm-scale.c -o svm-scale
|
||||
svm.o: svm.cpp svm.h
|
||||
$(CXX) $(CFLAGS) -c svm.cpp
|
||||
clean:
|
||||
rm -f *~ svm.o svm-train svm-predict svm-scale libsvm.so.$(SHVER)
|
36
libsvm-3.36/Makefile.win
Normal file
36
libsvm-3.36/Makefile.win
Normal file
@@ -0,0 +1,36 @@
|
||||
#You must ensure nmake.exe, cl.exe, link.exe are in system path.
|
||||
#VCVARS64.bat
|
||||
#Under dosbox prompt
|
||||
#nmake -f Makefile.win
|
||||
|
||||
##########################################
|
||||
CXX = cl.exe
|
||||
CFLAGS = /nologo /O2 /EHsc /I. /D _WIN64 /D _CRT_SECURE_NO_DEPRECATE
|
||||
TARGET = windows
|
||||
|
||||
# Uncomment the following lines to enable parallelization with OpenMP
|
||||
# CFLAGS = /nologo /O2 /EHsc /I. /D _WIN64 /D _CRT_SECURE_NO_DEPRECATE /openmp
|
||||
|
||||
all: $(TARGET)\svm-train.exe $(TARGET)\svm-predict.exe $(TARGET)\svm-scale.exe $(TARGET)\svm-toy.exe lib
|
||||
|
||||
$(TARGET)\svm-predict.exe: svm.h svm-predict.c svm.obj
|
||||
$(CXX) $(CFLAGS) svm-predict.c svm.obj -Fe$(TARGET)\svm-predict.exe
|
||||
|
||||
$(TARGET)\svm-train.exe: svm.h svm-train.c svm.obj
|
||||
$(CXX) $(CFLAGS) svm-train.c svm.obj -Fe$(TARGET)\svm-train.exe
|
||||
|
||||
$(TARGET)\svm-scale.exe: svm.h svm-scale.c
|
||||
$(CXX) $(CFLAGS) svm-scale.c -Fe$(TARGET)\svm-scale.exe
|
||||
|
||||
$(TARGET)\svm-toy.exe: svm.h svm.obj svm-toy\windows\svm-toy.cpp
|
||||
$(CXX) $(CFLAGS) svm-toy\windows\svm-toy.cpp svm.obj user32.lib gdi32.lib comdlg32.lib -Fe$(TARGET)\svm-toy.exe
|
||||
|
||||
svm.obj: svm.cpp svm.h
|
||||
$(CXX) $(CFLAGS) -c svm.cpp
|
||||
|
||||
lib: svm.cpp svm.h svm.def
|
||||
$(CXX) $(CFLAGS) -LD svm.cpp -Fe$(TARGET)\libsvm -link -DEF:svm.def
|
||||
|
||||
clean:
|
||||
-erase /Q *.obj $(TARGET)\*.exe $(TARGET)\*.dll $(TARGET)\*.exp $(TARGET)\*.lib
|
||||
|
812
libsvm-3.36/README
Normal file
812
libsvm-3.36/README
Normal file
@@ -0,0 +1,812 @@
|
||||
Libsvm is a simple, easy-to-use, and efficient software for SVM
|
||||
classification and regression. It solves C-SVM classification, nu-SVM
|
||||
classification, one-class-SVM, epsilon-SVM regression, and nu-SVM
|
||||
regression. It also provides an automatic model selection tool for
|
||||
C-SVM classification. This document explains the use of libsvm.
|
||||
|
||||
Libsvm is available at
|
||||
http://www.csie.ntu.edu.tw/~cjlin/libsvm
|
||||
Please read the COPYRIGHT file before using libsvm.
|
||||
|
||||
Table of Contents
|
||||
=================
|
||||
|
||||
- Quick Start
|
||||
- Installation and Data Format
|
||||
- `svm-train' Usage
|
||||
- `svm-predict' Usage
|
||||
- `svm-scale' Usage
|
||||
- Tips on Practical Use
|
||||
- Examples
|
||||
- Precomputed Kernels
|
||||
- Library Usage
|
||||
- Java Version
|
||||
- Building Windows Binaries
|
||||
- Additional Tools: Sub-sampling, Parameter Selection, Format checking, etc.
|
||||
- MATLAB/OCTAVE Interface
|
||||
- Python Interface
|
||||
- Additional Information
|
||||
|
||||
Quick Start
|
||||
===========
|
||||
|
||||
If you are new to SVM and if the data is not large, please go to
|
||||
`tools' directory and use easy.py after installation. It does
|
||||
everything automatic -- from data scaling to parameter selection.
|
||||
|
||||
Usage: easy.py training_file [testing_file]
|
||||
|
||||
More information about parameter selection can be found in
|
||||
`tools/README.'
|
||||
|
||||
Installation and Data Format
|
||||
============================
|
||||
|
||||
On Unix systems, type `make' to build the `svm-train', `svm-predict',
|
||||
and `svm-scale' programs. Run them without arguments to show the
|
||||
usages of them.
|
||||
|
||||
On other systems, consult `Makefile' to build them (e.g., see
|
||||
'Building Windows binaries' in this file) or use the pre-built
|
||||
binaries (Windows binaries are in the directory `windows').
|
||||
|
||||
The format of training and testing data files is:
|
||||
|
||||
<label> <index1>:<value1> <index2>:<value2> ...
|
||||
.
|
||||
.
|
||||
.
|
||||
|
||||
Each line contains an instance and is ended by a '\n' character.
|
||||
While there can be no feature values for a sample (i.e., a row of all zeros),
|
||||
the <label> column must not be empty. For <label> in the training set,
|
||||
we have the following cases.
|
||||
|
||||
* classification: <label> is an integer indicating the class label
|
||||
(multi-class is supported).
|
||||
|
||||
* For regression, <label> is the target value which can be any real
|
||||
number.
|
||||
|
||||
* For one-class SVM, <label> has no effect and can be any number.
|
||||
|
||||
In the test set, <label> is used only to calculate accuracy or
|
||||
errors. If it's unknown, any number is fine. For one-class SVM, if
|
||||
non-outliers/outliers are known, their labels in the test file must be
|
||||
+1/-1 for evaluation. The <label> column is read using strtod() provided by
|
||||
the C standard library. Therefore, <label> values that are numerically
|
||||
equivalent will be treated the same (e.g., +01e0 and 1 count as the same class).
|
||||
|
||||
The pair <index>:<value> gives a feature (attribute) value: <index> is
|
||||
an integer starting from 1 and <value> is a real number. The only
|
||||
exception is the precomputed kernel, where <index> starts from 0; see
|
||||
the section of precomputed kernels. Indices must be in ASCENDING
|
||||
order.
|
||||
|
||||
A sample classification data included in this package is
|
||||
`heart_scale'. To check if your data is in a correct form, use
|
||||
`tools/checkdata.py' (details in `tools/README').
|
||||
|
||||
Type `svm-train heart_scale', and the program will read the training
|
||||
data and output the model file `heart_scale.model'. If you have a test
|
||||
set called heart_scale.t, then type `svm-predict heart_scale.t
|
||||
heart_scale.model output' to see the prediction accuracy. The `output'
|
||||
file contains the predicted class labels.
|
||||
|
||||
For classification, if training data are in only one class (i.e., all
|
||||
labels are the same), then `svm-train' issues a warning message:
|
||||
`Warning: training data in only one class. See README for details,'
|
||||
which means the training data is very unbalanced. The label in the
|
||||
training data is directly returned when testing.
|
||||
|
||||
There are some other useful programs in this package.
|
||||
|
||||
svm-scale:
|
||||
|
||||
This is a tool for scaling input data file.
|
||||
|
||||
svm-toy:
|
||||
|
||||
This is a simple graphical interface which shows how SVM
|
||||
separate data in a plane. You can click in the window to
|
||||
draw data points. Use "change" button to choose class
|
||||
1, 2 or 3 (i.e., up to three classes are supported), "load"
|
||||
button to load data from a file, "save" button to save data to
|
||||
a file, "run" button to obtain an SVM model, and "clear"
|
||||
button to clear the window.
|
||||
|
||||
You can enter options in the bottom of the window, the syntax of
|
||||
options is the same as `svm-train'.
|
||||
|
||||
Note that "load" and "save" consider dense data format both in
|
||||
classification and the regression cases. For classification,
|
||||
each data point has one label (the color) that must be 1, 2,
|
||||
or 3 and two attributes (x-axis and y-axis values) in
|
||||
[0,1). For regression, each data point has one target value
|
||||
(y-axis) and one attribute (x-axis values) in [0, 1).
|
||||
|
||||
Type `make' in respective directories to build them.
|
||||
|
||||
You need Qt library to build the Qt version.
|
||||
(available from http://www.trolltech.com)
|
||||
|
||||
You need GTK+ library to build the GTK version.
|
||||
(available from http://www.gtk.org)
|
||||
|
||||
The pre-built Windows binaries are in the `windows'
|
||||
directory. We use Visual C++ on a 64-bit machine.
|
||||
|
||||
`svm-train' Usage
|
||||
=================
|
||||
|
||||
Usage: svm-train [options] training_set_file [model_file]
|
||||
options:
|
||||
-s svm_type : set type of SVM (default 0)
|
||||
0 -- C-SVC (multi-class classification)
|
||||
1 -- nu-SVC (multi-class classification)
|
||||
2 -- one-class SVM
|
||||
3 -- epsilon-SVR (regression)
|
||||
4 -- nu-SVR (regression)
|
||||
-t kernel_type : set type of kernel function (default 2)
|
||||
0 -- linear: u'*v
|
||||
1 -- polynomial: (gamma*u'*v + coef0)^degree
|
||||
2 -- radial basis function: exp(-gamma*|u-v|^2)
|
||||
3 -- sigmoid: tanh(gamma*u'*v + coef0)
|
||||
4 -- precomputed kernel (kernel values in training_set_file)
|
||||
-d degree : set degree in kernel function (default 3)
|
||||
-g gamma : set gamma in kernel function (default 1/num_features)
|
||||
-r coef0 : set coef0 in kernel function (default 0)
|
||||
-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)
|
||||
-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)
|
||||
-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)
|
||||
-m cachesize : set cache memory size in MB (default 100)
|
||||
-e epsilon : set tolerance of termination criterion (default 0.001)
|
||||
-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)
|
||||
-b probability_estimates : whether to train a model for probability estimates, 0 or 1 (default 0)
|
||||
-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)
|
||||
-v n: n-fold cross validation mode
|
||||
-q : quiet mode (no outputs)
|
||||
|
||||
|
||||
option -v randomly splits the data into n parts and calculates cross
|
||||
validation accuracy/mean squared error on them.
|
||||
|
||||
See libsvm FAQ for the meaning of outputs.
|
||||
|
||||
`svm-predict' Usage
|
||||
===================
|
||||
|
||||
Usage: svm-predict [options] test_file model_file output_file
|
||||
options:
|
||||
-b probability_estimates: whether to predict probability estimates, 0 or 1 (default 0).
|
||||
|
||||
model_file is the model file generated by svm-train.
|
||||
test_file is the test data you want to predict.
|
||||
svm-predict will produce output in the output_file.
|
||||
|
||||
`svm-scale' Usage
|
||||
=================
|
||||
|
||||
Usage: svm-scale [options] data_filename
|
||||
options:
|
||||
-l lower : x scaling lower limit (default -1)
|
||||
-u upper : x scaling upper limit (default +1)
|
||||
-y y_lower y_upper : y scaling limits (default: no y scaling)
|
||||
-s save_filename : save scaling parameters to save_filename
|
||||
-r restore_filename : restore scaling parameters from restore_filename
|
||||
|
||||
See 'Examples' in this file for examples.
|
||||
|
||||
Tips on Practical Use
|
||||
=====================
|
||||
|
||||
* Scale your data. For example, scale each attribute to [0,1] or [-1,+1].
|
||||
* For C-SVC, consider using the model selection tool in the tools directory.
|
||||
* nu in nu-SVC/one-class-SVM/nu-SVR approximates the fraction of training
|
||||
errors and support vectors.
|
||||
* If data for classification are unbalanced (e.g. many positive and
|
||||
few negative), try different penalty parameters C by -wi (see
|
||||
examples below).
|
||||
* Specify larger cache size (i.e., larger -m) for huge problems.
|
||||
|
||||
Examples
|
||||
========
|
||||
|
||||
> svm-scale -l -1 -u 1 -s range train > train.scale
|
||||
> svm-scale -r range test > test.scale
|
||||
|
||||
Scale each feature of the training data to be in [-1,1]. Scaling
|
||||
factors are stored in the file range and then used for scaling the
|
||||
test data.
|
||||
|
||||
> svm-train -s 0 -c 5 -t 2 -g 0.5 -e 0.1 data_file
|
||||
|
||||
Train a classifier with RBF kernel exp(-0.5|u-v|^2), C=5, and
|
||||
stopping tolerance 0.1.
|
||||
|
||||
> svm-train -s 3 -p 0.1 -t 0 data_file
|
||||
|
||||
Solve SVM regression with linear kernel u'v and epsilon=0.1
|
||||
in the loss function.
|
||||
|
||||
> svm-train -c 10 -w1 1 -w-2 5 -w4 2 data_file
|
||||
|
||||
Train a classifier with penalty 10 = 1 * 10 for class 1, penalty 50 =
|
||||
5 * 10 for class -2, and penalty 20 = 2 * 10 for class 4.
|
||||
|
||||
> svm-train -s 0 -c 100 -g 0.1 -v 5 data_file
|
||||
|
||||
Do five-fold cross validation for the classifier using
|
||||
the parameters C = 100 and gamma = 0.1
|
||||
|
||||
> svm-train -s 0 -b 1 data_file
|
||||
> svm-predict -b 1 test_file data_file.model output_file
|
||||
|
||||
Obtain a model with probability information and predict test data with
|
||||
probability estimates
|
||||
|
||||
Precomputed Kernels
|
||||
===================
|
||||
|
||||
Users may precompute kernel values and input them as training and
|
||||
testing files. Then libsvm does not need the original
|
||||
training/testing sets.
|
||||
|
||||
Assume there are L training instances x1, ..., xL and.
|
||||
Let K(x, y) be the kernel
|
||||
value of two instances x and y. The input formats
|
||||
are:
|
||||
|
||||
New training instance for xi:
|
||||
|
||||
<label> 0:i 1:K(xi,x1) ... L:K(xi,xL)
|
||||
|
||||
New testing instance for any x:
|
||||
|
||||
<label> 0:? 1:K(x,x1) ... L:K(x,xL)
|
||||
|
||||
That is, in the training file the first column must be the "ID" of
|
||||
xi. In testing, ? can be any value.
|
||||
|
||||
All kernel values including ZEROs must be explicitly provided. Any
|
||||
permutation or random subsets of the training/testing files are also
|
||||
valid (see examples below).
|
||||
|
||||
Note: the format is slightly different from the precomputed kernel
|
||||
package released in libsvmtools earlier.
|
||||
|
||||
Examples:
|
||||
|
||||
Assume the original training data has three four-feature
|
||||
instances and testing data has one instance:
|
||||
|
||||
15 1:1 2:1 3:1 4:1
|
||||
45 2:3 4:3
|
||||
25 3:1
|
||||
|
||||
15 1:1 3:1
|
||||
|
||||
If the linear kernel is used, we have the following new
|
||||
training/testing sets:
|
||||
|
||||
15 0:1 1:4 2:6 3:1
|
||||
45 0:2 1:6 2:18 3:0
|
||||
25 0:3 1:1 2:0 3:1
|
||||
|
||||
15 0:? 1:2 2:0 3:1
|
||||
|
||||
? can be any value.
|
||||
|
||||
Any subset of the above training file is also valid. For example,
|
||||
|
||||
25 0:3 1:1 2:0 3:1
|
||||
45 0:2 1:6 2:18 3:0
|
||||
|
||||
implies that the kernel matrix is
|
||||
|
||||
[K(2,2) K(2,3)] = [18 0]
|
||||
[K(3,2) K(3,3)] = [0 1]
|
||||
|
||||
Library Usage
|
||||
=============
|
||||
|
||||
These functions and structures are declared in the header file
|
||||
`svm.h'. You need to #include "svm.h" in your C/C++ source files and
|
||||
link your program with `svm.cpp'. You can see `svm-train.c' and
|
||||
`svm-predict.c' for examples showing how to use them. We define
|
||||
LIBSVM_VERSION and declare `extern int libsvm_version;' in svm.h, so
|
||||
you can check the version number.
|
||||
|
||||
Before you classify test data, you need to construct an SVM model
|
||||
(`svm_model') using training data. A model can also be saved in
|
||||
a file for later use. Once an SVM model is available, you can use it
|
||||
to classify new data.
|
||||
|
||||
- Function: struct svm_model *svm_train(const struct svm_problem *prob,
|
||||
const struct svm_parameter *param);
|
||||
|
||||
This function constructs and returns an SVM model according to
|
||||
the given training data and parameters.
|
||||
|
||||
struct svm_problem describes the problem:
|
||||
|
||||
struct svm_problem
|
||||
{
|
||||
int l;
|
||||
double *y;
|
||||
struct svm_node **x;
|
||||
};
|
||||
|
||||
where `l' is the number of training data, and `y' is an array containing
|
||||
their target values. (integers in classification, real numbers in
|
||||
regression) `x' is an array of pointers, each of which points to a sparse
|
||||
representation (array of svm_node) of one training vector.
|
||||
|
||||
For example, if we have the following training data:
|
||||
|
||||
LABEL ATTR1 ATTR2 ATTR3 ATTR4 ATTR5
|
||||
----- ----- ----- ----- ----- -----
|
||||
1 0 0.1 0.2 0 0
|
||||
2 0 0.1 0.3 -1.2 0
|
||||
1 0.4 0 0 0 0
|
||||
2 0 0.1 0 1.4 0.5
|
||||
3 -0.1 -0.2 0.1 1.1 0.1
|
||||
|
||||
then the components of svm_problem are:
|
||||
|
||||
l = 5
|
||||
|
||||
y -> 1 2 1 2 3
|
||||
|
||||
x -> [ ] -> (2,0.1) (3,0.2) (-1,?)
|
||||
[ ] -> (2,0.1) (3,0.3) (4,-1.2) (-1,?)
|
||||
[ ] -> (1,0.4) (-1,?)
|
||||
[ ] -> (2,0.1) (4,1.4) (5,0.5) (-1,?)
|
||||
[ ] -> (1,-0.1) (2,-0.2) (3,0.1) (4,1.1) (5,0.1) (-1,?)
|
||||
|
||||
where (index,value) is stored in the structure `svm_node':
|
||||
|
||||
struct svm_node
|
||||
{
|
||||
int index;
|
||||
double value;
|
||||
};
|
||||
|
||||
index = -1 indicates the end of one vector. Note that indices must
|
||||
be in ASCENDING order.
|
||||
|
||||
struct svm_parameter describes the parameters of an SVM model:
|
||||
|
||||
struct svm_parameter
|
||||
{
|
||||
int svm_type;
|
||||
int kernel_type;
|
||||
int degree; /* for poly */
|
||||
double gamma; /* for poly/rbf/sigmoid */
|
||||
double coef0; /* for poly/sigmoid */
|
||||
|
||||
/* these are for training only */
|
||||
double cache_size; /* in MB */
|
||||
double eps; /* stopping criteria */
|
||||
double C; /* for C_SVC, EPSILON_SVR, and NU_SVR */
|
||||
int nr_weight; /* for C_SVC */
|
||||
int *weight_label; /* for C_SVC */
|
||||
double* weight; /* for C_SVC */
|
||||
double nu; /* for NU_SVC, ONE_CLASS, and NU_SVR */
|
||||
double p; /* for EPSILON_SVR */
|
||||
int shrinking; /* use the shrinking heuristics */
|
||||
int probability; /* do probability estimates */
|
||||
};
|
||||
|
||||
svm_type can be one of C_SVC, NU_SVC, ONE_CLASS, EPSILON_SVR, NU_SVR.
|
||||
|
||||
C_SVC: C-SVM classification
|
||||
NU_SVC: nu-SVM classification
|
||||
ONE_CLASS: one-class-SVM
|
||||
EPSILON_SVR: epsilon-SVM regression
|
||||
NU_SVR: nu-SVM regression
|
||||
|
||||
kernel_type can be one of LINEAR, POLY, RBF, SIGMOID.
|
||||
|
||||
LINEAR: u'*v
|
||||
POLY: (gamma*u'*v + coef0)^degree
|
||||
RBF: exp(-gamma*|u-v|^2)
|
||||
SIGMOID: tanh(gamma*u'*v + coef0)
|
||||
PRECOMPUTED: kernel values in training_set_file
|
||||
|
||||
cache_size is the size of the kernel cache, specified in megabytes.
|
||||
C is the cost of constraints violation.
|
||||
eps is the stopping criterion. (we usually use 0.00001 in nu-SVC,
|
||||
0.001 in others). nu is the parameter in nu-SVM, nu-SVR, and
|
||||
one-class-SVM. p is the epsilon in epsilon-insensitive loss function
|
||||
of epsilon-SVM regression. shrinking = 1 means shrinking is conducted;
|
||||
= 0 otherwise. probability = 1 means model with probability
|
||||
information is obtained; = 0 otherwise.
|
||||
|
||||
nr_weight, weight_label, and weight are used to change the penalty
|
||||
for some classes (If the weight for a class is not changed, it is
|
||||
set to 1). This is useful for training classifier using unbalanced
|
||||
input data or with asymmetric misclassification cost.
|
||||
|
||||
nr_weight is the number of elements in the array weight_label and
|
||||
weight. Each weight[i] corresponds to weight_label[i], meaning that
|
||||
the penalty of class weight_label[i] is scaled by a factor of weight[i].
|
||||
|
||||
If you do not want to change penalty for any of the classes,
|
||||
just set nr_weight to 0.
|
||||
|
||||
*NOTE* Because svm_model contains pointers to svm_problem, you can
|
||||
not free the memory used by svm_problem if you are still using the
|
||||
svm_model produced by svm_train().
|
||||
|
||||
*NOTE* To avoid wrong parameters, svm_check_parameter() should be
|
||||
called before svm_train().
|
||||
|
||||
struct svm_model stores the model obtained from the training procedure.
|
||||
It is not recommended to directly access entries in this structure.
|
||||
Programmers should use the interface functions to get the values.
|
||||
|
||||
struct svm_model
|
||||
{
|
||||
struct svm_parameter param; /* parameter */
|
||||
int nr_class; /* number of classes, = 2 in regression/one class svm */
|
||||
int l; /* total #SV */
|
||||
struct svm_node **SV; /* SVs (SV[l]) */
|
||||
double **sv_coef; /* coefficients for SVs in decision functions (sv_coef[k-1][l]) */
|
||||
double *rho; /* constants in decision functions (rho[k*(k-1)/2]) */
|
||||
double *probA; /* pairwise probability information */
|
||||
double *probB;
|
||||
double *prob_density_marks; /*probability information for ONE_CLASS*/
|
||||
int *sv_indices; /* sv_indices[0,...,nSV-1] are values in [1,...,num_traning_data] to indicate SVs in the training set */
|
||||
|
||||
/* for classification only */
|
||||
|
||||
int *label; /* label of each class (label[k]) */
|
||||
int *nSV; /* number of SVs for each class (nSV[k]) */
|
||||
/* nSV[0] + nSV[1] + ... + nSV[k-1] = l */
|
||||
/* XXX */
|
||||
int free_sv; /* 1 if svm_model is created by svm_load_model*/
|
||||
/* 0 if svm_model is created by svm_train */
|
||||
};
|
||||
|
||||
param describes the parameters used to obtain the model.
|
||||
|
||||
nr_class is the number of classes for classification. It is a
|
||||
non-negative integer with special cases of 0 (no training data at
|
||||
all) and 1 (all training data in one class). For regression and
|
||||
one-class SVM, nr_class = 2.
|
||||
|
||||
l is the number of support vectors. SV and sv_coef are support
|
||||
vectors and the corresponding coefficients, respectively. Assume there are
|
||||
k classes. For data in class j, the corresponding sv_coef includes (k-1) y*alpha vectors,
|
||||
where alpha's are solutions of the following two class problems:
|
||||
1 vs j, 2 vs j, ..., j-1 vs j, j vs j+1, j vs j+2, ..., j vs k
|
||||
and y=1 for the first j-1 vectors, while y=-1 for the remaining k-j
|
||||
vectors. For example, if there are 4 classes, sv_coef and SV are like:
|
||||
|
||||
+-+-+-+--------------------+
|
||||
|1|1|1| |
|
||||
|v|v|v| SVs from class 1 |
|
||||
|2|3|4| |
|
||||
+-+-+-+--------------------+
|
||||
|1|2|2| |
|
||||
|v|v|v| SVs from class 2 |
|
||||
|2|3|4| |
|
||||
+-+-+-+--------------------+
|
||||
|1|2|3| |
|
||||
|v|v|v| SVs from class 3 |
|
||||
|3|3|4| |
|
||||
+-+-+-+--------------------+
|
||||
|1|2|3| |
|
||||
|v|v|v| SVs from class 4 |
|
||||
|4|4|4| |
|
||||
+-+-+-+--------------------+
|
||||
|
||||
See svm_train() for an example of assigning values to sv_coef.
|
||||
|
||||
rho is the bias term (-b). probA and probB are parameters used in
|
||||
probability outputs. If there are k classes, there are k*(k-1)/2
|
||||
binary problems as well as rho, probA, and probB values. They are
|
||||
aligned in the order of binary problems:
|
||||
1 vs 2, 1 vs 3, ..., 1 vs k, 2 vs 3, ..., 2 vs k, ..., k-1 vs k.
|
||||
|
||||
sv_indices[0,...,nSV-1] are values in [1,...,num_traning_data] to
|
||||
indicate support vectors in the training set.
|
||||
|
||||
label contains labels in the training data.
|
||||
|
||||
nSV is the number of support vectors in each class.
|
||||
|
||||
free_sv is a flag used to determine whether the space of SV should
|
||||
be released in free_model_content(struct svm_model*) and
|
||||
free_and_destroy_model(struct svm_model**). If the model is
|
||||
generated by svm_train(), then SV points to data in svm_problem
|
||||
and should not be removed. For example, free_sv is 0 if svm_model
|
||||
is created by svm_train, but is 1 if created by svm_load_model.
|
||||
|
||||
- Function: double svm_predict(const struct svm_model *model,
|
||||
const struct svm_node *x);
|
||||
|
||||
This function does classification or regression on a test vector x
|
||||
given a model.
|
||||
|
||||
For a classification model, the predicted class for x is returned.
|
||||
For a regression model, the function value of x calculated using
|
||||
the model is returned. For an one-class model, +1 or -1 is
|
||||
returned.
|
||||
|
||||
- Function: void svm_cross_validation(const struct svm_problem *prob,
|
||||
const struct svm_parameter *param, int nr_fold, double *target);
|
||||
|
||||
This function conducts cross validation. Data are separated to
|
||||
nr_fold folds. Under given parameters, sequentially each fold is
|
||||
validated using the model from training the remaining. Predicted
|
||||
labels (of all prob's instances) in the validation process are
|
||||
stored in the array called target.
|
||||
|
||||
The format of svm_prob is same as that for svm_train().
|
||||
|
||||
- Function: int svm_get_svm_type(const struct svm_model *model);
|
||||
|
||||
This function gives svm_type of the model. Possible values of
|
||||
svm_type are defined in svm.h.
|
||||
|
||||
- Function: int svm_get_nr_class(const svm_model *model);
|
||||
|
||||
For a classification model, this function gives the number of
|
||||
classes. For a regression or an one-class model, 2 is returned.
|
||||
|
||||
- Function: void svm_get_labels(const svm_model *model, int* label)
|
||||
|
||||
For a classification model, this function outputs the name of
|
||||
labels into an array called label. For regression and one-class
|
||||
models, label is unchanged.
|
||||
|
||||
- Function: void svm_get_sv_indices(const struct svm_model *model, int *sv_indices)
|
||||
|
||||
This function outputs indices of support vectors into an array called sv_indices.
|
||||
The size of sv_indices is the number of support vectors and can be obtained by calling svm_get_nr_sv.
|
||||
Each sv_indices[i] is in the range of [1, ..., num_traning_data].
|
||||
|
||||
- Function: int svm_get_nr_sv(const struct svm_model *model)
|
||||
|
||||
This function gives the number of total support vector.
|
||||
|
||||
- Function: double svm_get_svr_probability(const struct svm_model *model);
|
||||
|
||||
For a regression model with probability information, this function
|
||||
outputs a value sigma > 0. For test data, we consider the
|
||||
probability model: target value = predicted value + z, z: Laplace
|
||||
distribution e^(-|z|/sigma)/(2sigma)
|
||||
|
||||
If the model is not for svr or does not contain required
|
||||
information, 0 is returned.
|
||||
|
||||
- Function: double svm_predict_values(const svm_model *model,
|
||||
const svm_node *x, double* dec_values)
|
||||
|
||||
This function gives decision values on a test vector x given a
|
||||
model, and return the predicted label (classification) or
|
||||
the function value (regression).
|
||||
|
||||
For a classification model with nr_class classes, this function
|
||||
gives nr_class*(nr_class-1)/2 decision values in the array
|
||||
dec_values, where nr_class can be obtained from the function
|
||||
svm_get_nr_class. The order is label[0] vs. label[1], ...,
|
||||
label[0] vs. label[nr_class-1], label[1] vs. label[2], ...,
|
||||
label[nr_class-2] vs. label[nr_class-1], where label can be
|
||||
obtained from the function svm_get_labels. The returned value is
|
||||
the predicted class for x. Note that when nr_class = 1, this
|
||||
function does not give any decision value.
|
||||
|
||||
For a regression model, dec_values[0] and the returned value are
|
||||
both the function value of x calculated using the model. For a
|
||||
one-class model, dec_values[0] is the decision value of x, while
|
||||
the returned value is +1/-1.
|
||||
|
||||
- Function: double svm_predict_probability(const struct svm_model *model,
|
||||
const struct svm_node *x, double* prob_estimates);
|
||||
|
||||
This function does classification or regression on a test vector x
|
||||
given a model with probability information.
|
||||
|
||||
For a classification model with probability information, this
|
||||
function gives nr_class probability estimates in the array
|
||||
prob_estimates. nr_class can be obtained from the function
|
||||
svm_get_nr_class. The class with the highest probability is
|
||||
returned. For one-class SVM, the array prob_estimates contains
|
||||
two elements for probabilities of normal instance/outlier,
|
||||
while for regression, the array is unchanged. For both one-class
|
||||
SVM and regression, the returned value is the same as that of
|
||||
svm_predict.
|
||||
|
||||
- Function: const char *svm_check_parameter(const struct svm_problem *prob,
|
||||
const struct svm_parameter *param);
|
||||
|
||||
This function checks whether the parameters are within the feasible
|
||||
range of the problem. This function should be called before calling
|
||||
svm_train() and svm_cross_validation(). It returns NULL if the
|
||||
parameters are feasible, otherwise an error message is returned.
|
||||
|
||||
- Function: int svm_check_probability_model(const struct svm_model *model);
|
||||
|
||||
This function checks whether the model contains required
|
||||
information to do probability estimates. If so, it returns
|
||||
+1. Otherwise, 0 is returned. This function should be called
|
||||
before calling svm_get_svr_probability and
|
||||
svm_predict_probability.
|
||||
|
||||
- Function: int svm_save_model(const char *model_file_name,
|
||||
const struct svm_model *model);
|
||||
|
||||
This function saves a model to a file; returns 0 on success, or -1
|
||||
if an error occurs.
|
||||
|
||||
- Function: struct svm_model *svm_load_model(const char *model_file_name);
|
||||
|
||||
This function returns a pointer to the model read from the file,
|
||||
or a null pointer if the model could not be loaded.
|
||||
|
||||
- Function: void svm_free_model_content(struct svm_model *model_ptr);
|
||||
|
||||
This function frees the memory used by the entries in a model structure.
|
||||
|
||||
- Function: void svm_free_and_destroy_model(struct svm_model **model_ptr_ptr);
|
||||
|
||||
This function frees the memory used by a model and destroys the model
|
||||
structure. It is equivalent to svm_destroy_model, which
|
||||
is deprecated after version 3.0.
|
||||
|
||||
- Function: void svm_destroy_param(struct svm_parameter *param);
|
||||
|
||||
This function frees the memory used by a parameter set.
|
||||
|
||||
- Function: void svm_set_print_string_function(void (*print_func)(const char *));
|
||||
|
||||
Users can specify their output format by a function. Use
|
||||
svm_set_print_string_function(NULL);
|
||||
for default printing to stdout.
|
||||
|
||||
Please note that this function is not thread-safe. When multiple threads load or
|
||||
use the same dynamic library (for example, libsvm.so.4), they actually share the
|
||||
same memory space of the dynamic library, which results in all threads modifying
|
||||
the same static function pointer, svm_print_string, in svm.cpp when they call this
|
||||
function.
|
||||
|
||||
For example, suppose we have threads A and B. They call this function sequentially
|
||||
and pass their own thread-local print_func into it. After that, they both call (*svm_print_string)(str)
|
||||
once. When the last thread finishes setting it (say B), svm_print_string is set to
|
||||
B.print_func. Now, if thread A wants to access svm_print_string, it is actually
|
||||
accessing B.print_func rather than A.print_func, which is incorrect since we expect
|
||||
to use the functionality of A.print_func.
|
||||
|
||||
Even if A.print_func and B.print_func have identical functionality, it is still risky.
|
||||
Suppose svm_print_string is now set to B.print_func, and B deletes B.print_func after
|
||||
finishing its work. Later, thread A calls svm_print_string, but the address points to,
|
||||
which is B.print_func, has already been deleted. This invalid memory access will crash
|
||||
the program. To mitigate this issue, in this example, you should ensure that A.print_func
|
||||
and B.print_func remain valid after threads finish their work. For example, in Python,
|
||||
you can assign them as global variables.
|
||||
|
||||
Java Version
|
||||
============
|
||||
|
||||
The pre-compiled java class archive `libsvm.jar' and its source files are
|
||||
in the java directory. To run the programs, use
|
||||
|
||||
java -classpath libsvm.jar svm_train <arguments>
|
||||
java -classpath libsvm.jar svm_predict <arguments>
|
||||
java -classpath libsvm.jar svm_toy
|
||||
java -classpath libsvm.jar svm_scale <arguments>
|
||||
|
||||
Note that you need Java 1.5 (5.0) or above to run it.
|
||||
|
||||
You may need to add Java runtime library (like classes.zip) to the classpath.
|
||||
You may need to increase maximum Java heap size.
|
||||
|
||||
Library usages are similar to the C version. These functions are available:
|
||||
|
||||
public class svm {
|
||||
public static final int LIBSVM_VERSION=336;
|
||||
public static svm_model svm_train(svm_problem prob, svm_parameter param);
|
||||
public static void svm_cross_validation(svm_problem prob, svm_parameter param, int nr_fold, double[] target);
|
||||
public static int svm_get_svm_type(svm_model model);
|
||||
public static int svm_get_nr_class(svm_model model);
|
||||
public static void svm_get_labels(svm_model model, int[] label);
|
||||
public static void svm_get_sv_indices(svm_model model, int[] indices);
|
||||
public static int svm_get_nr_sv(svm_model model);
|
||||
public static double svm_get_svr_probability(svm_model model);
|
||||
public static double svm_predict_values(svm_model model, svm_node[] x, double[] dec_values);
|
||||
public static double svm_predict(svm_model model, svm_node[] x);
|
||||
public static double svm_predict_probability(svm_model model, svm_node[] x, double[] prob_estimates);
|
||||
public static void svm_save_model(String model_file_name, svm_model model) throws IOException
|
||||
public static svm_model svm_load_model(String model_file_name) throws IOException
|
||||
public static String svm_check_parameter(svm_problem prob, svm_parameter param);
|
||||
public static int svm_check_probability_model(svm_model model);
|
||||
public static void svm_set_print_string_function(svm_print_interface print_func);
|
||||
}
|
||||
|
||||
The library is in the "libsvm" package.
|
||||
Note that in Java version, svm_node[] is not ended with a node whose index = -1.
|
||||
|
||||
Users can specify their output format by
|
||||
|
||||
your_print_func = new svm_print_interface()
|
||||
{
|
||||
public void print(String s)
|
||||
{
|
||||
// your own format
|
||||
}
|
||||
};
|
||||
svm.svm_set_print_string_function(your_print_func);
|
||||
|
||||
However, similar to the C version, it is not thread-safe. Please check the
|
||||
usage of C version svm_set_print_string_function() for details.
|
||||
|
||||
Building Windows Binaries
|
||||
=========================
|
||||
|
||||
Windows binaries are available in the directory `windows'. To re-build
|
||||
them via Visual C++, use the following steps:
|
||||
|
||||
1. Open a DOS command box (or Visual Studio Command Prompt) and change
|
||||
to libsvm directory. If environment variables of VC++ have not been
|
||||
set, type
|
||||
|
||||
"C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvars64.bat"
|
||||
|
||||
You may have to modify the above command according which version of
|
||||
VC++ or where it is installed.
|
||||
|
||||
2. Type
|
||||
|
||||
nmake -f Makefile.win clean all
|
||||
|
||||
3. (optional) To build shared library libsvm.dll, type
|
||||
|
||||
nmake -f Makefile.win lib
|
||||
|
||||
4. (optional) To build 32-bit windows binaries, you must
|
||||
(1) Setup "C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvars32.bat" instead of vcvars64.bat
|
||||
(2) Change CFLAGS in Makefile.win: /D _WIN64 to /D _WIN32
|
||||
|
||||
Another way is to build them from Visual C++ environment. See details
|
||||
in libsvm FAQ.
|
||||
|
||||
- Additional Tools: Sub-sampling, Parameter Selection, Format checking, etc.
|
||||
============================================================================
|
||||
|
||||
See the README file in the tools directory.
|
||||
|
||||
MATLAB/OCTAVE Interface
|
||||
=======================
|
||||
|
||||
Please check the file README in the directory `matlab'.
|
||||
|
||||
Python Interface
|
||||
================
|
||||
|
||||
See the README file in python directory.
|
||||
|
||||
Additional Information
|
||||
======================
|
||||
|
||||
If you find LIBSVM helpful, please cite it as
|
||||
|
||||
Chih-Chung Chang and Chih-Jen Lin, LIBSVM : a library for support
|
||||
vector machines. ACM Transactions on Intelligent Systems and
|
||||
Technology, 2:27:1--27:27, 2011. Software available at
|
||||
http://www.csie.ntu.edu.tw/~cjlin/libsvm
|
||||
|
||||
LIBSVM implementation document is available at
|
||||
http://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf
|
||||
|
||||
For any questions and comments, please email cjlin@csie.ntu.edu.tw
|
||||
|
||||
Acknowledgments:
|
||||
This work was supported in part by the National Science
|
||||
Council of Taiwan via the grant NSC 89-2213-E-002-013.
|
||||
The authors thank their group members and users
|
||||
for many helpful discussions and comments. They are listed in
|
||||
http://www.csie.ntu.edu.tw/~cjlin/libsvm/acknowledgements
|
||||
|
270
libsvm-3.36/heart_scale
Normal file
270
libsvm-3.36/heart_scale
Normal file
@@ -0,0 +1,270 @@
|
||||
+1 1:0.708333 2:1 3:1 4:-0.320755 5:-0.105023 6:-1 7:1 8:-0.419847 9:-1 10:-0.225806 12:1 13:-1
|
||||
-1 1:0.583333 2:-1 3:0.333333 4:-0.603774 5:1 6:-1 7:1 8:0.358779 9:-1 10:-0.483871 12:-1 13:1
|
||||
+1 1:0.166667 2:1 3:-0.333333 4:-0.433962 5:-0.383562 6:-1 7:-1 8:0.0687023 9:-1 10:-0.903226 11:-1 12:-1 13:1
|
||||
-1 1:0.458333 2:1 3:1 4:-0.358491 5:-0.374429 6:-1 7:-1 8:-0.480916 9:1 10:-0.935484 12:-0.333333 13:1
|
||||
-1 1:0.875 2:-1 3:-0.333333 4:-0.509434 5:-0.347032 6:-1 7:1 8:-0.236641 9:1 10:-0.935484 11:-1 12:-0.333333 13:-1
|
||||
-1 1:0.5 2:1 3:1 4:-0.509434 5:-0.767123 6:-1 7:-1 8:0.0534351 9:-1 10:-0.870968 11:-1 12:-1 13:1
|
||||
+1 1:0.125 2:1 3:0.333333 4:-0.320755 5:-0.406393 6:1 7:1 8:0.0839695 9:1 10:-0.806452 12:-0.333333 13:0.5
|
||||
+1 1:0.25 2:1 3:1 4:-0.698113 5:-0.484018 6:-1 7:1 8:0.0839695 9:1 10:-0.612903 12:-0.333333 13:1
|
||||
+1 1:0.291667 2:1 3:1 4:-0.132075 5:-0.237443 6:-1 7:1 8:0.51145 9:-1 10:-0.612903 12:0.333333 13:1
|
||||
+1 1:0.416667 2:-1 3:1 4:0.0566038 5:0.283105 6:-1 7:1 8:0.267176 9:-1 10:0.290323 12:1 13:1
|
||||
-1 1:0.25 2:1 3:1 4:-0.226415 5:-0.506849 6:-1 7:-1 8:0.374046 9:-1 10:-0.83871 12:-1 13:1
|
||||
-1 2:1 3:1 4:-0.0943396 5:-0.543379 6:-1 7:1 8:-0.389313 9:1 10:-1 11:-1 12:-1 13:1
|
||||
-1 1:-0.375 2:1 3:0.333333 4:-0.132075 5:-0.502283 6:-1 7:1 8:0.664122 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.333333 2:1 3:-1 4:-0.245283 5:-0.506849 6:-1 7:-1 8:0.129771 9:-1 10:-0.16129 12:0.333333 13:-1
|
||||
-1 1:0.166667 2:-1 3:1 4:-0.358491 5:-0.191781 6:-1 7:1 8:0.343511 9:-1 10:-1 11:-1 12:-0.333333 13:-1
|
||||
-1 1:0.75 2:-1 3:1 4:-0.660377 5:-0.894977 6:-1 7:-1 8:-0.175573 9:-1 10:-0.483871 12:-1 13:-1
|
||||
+1 1:-0.291667 2:1 3:1 4:-0.132075 5:-0.155251 6:-1 7:-1 8:-0.251908 9:1 10:-0.419355 12:0.333333 13:1
|
||||
+1 2:1 3:1 4:-0.132075 5:-0.648402 6:1 7:1 8:0.282443 9:1 11:1 12:-1 13:1
|
||||
-1 1:0.458333 2:1 3:-1 4:-0.698113 5:-0.611872 6:-1 7:1 8:0.114504 9:1 10:-0.419355 12:-1 13:-1
|
||||
-1 1:-0.541667 2:1 3:-1 4:-0.132075 5:-0.666667 6:-1 7:-1 8:0.633588 9:1 10:-0.548387 11:-1 12:-1 13:1
|
||||
+1 1:0.583333 2:1 3:1 4:-0.509434 5:-0.52968 6:-1 7:1 8:-0.114504 9:1 10:-0.16129 12:0.333333 13:1
|
||||
-1 1:-0.208333 2:1 3:-0.333333 4:-0.320755 5:-0.456621 6:-1 7:1 8:0.664122 9:-1 10:-0.935484 12:-1 13:-1
|
||||
-1 1:-0.416667 2:1 3:1 4:-0.603774 5:-0.191781 6:-1 7:-1 8:0.679389 9:-1 10:-0.612903 12:-1 13:-1
|
||||
-1 1:-0.25 2:1 3:1 4:-0.660377 5:-0.643836 6:-1 7:-1 8:0.0992366 9:-1 10:-0.967742 11:-1 12:-1 13:-1
|
||||
-1 1:0.0416667 2:-1 3:-0.333333 4:-0.283019 5:-0.260274 6:1 7:1 8:0.343511 9:1 10:-1 11:-1 12:-0.333333 13:-1
|
||||
-1 1:-0.208333 2:-1 3:0.333333 4:-0.320755 5:-0.319635 6:-1 7:-1 8:0.0381679 9:-1 10:-0.935484 11:-1 12:-1 13:-1
|
||||
-1 1:-0.291667 2:-1 3:1 4:-0.169811 5:-0.465753 6:-1 7:1 8:0.236641 9:1 10:-1 12:-1 13:-1
|
||||
-1 1:-0.0833333 2:-1 3:0.333333 4:-0.509434 5:-0.228311 6:-1 7:1 8:0.312977 9:-1 10:-0.806452 11:-1 12:-1 13:-1
|
||||
+1 1:0.208333 2:1 3:0.333333 4:-0.660377 5:-0.525114 6:-1 7:1 8:0.435115 9:-1 10:-0.193548 12:-0.333333 13:1
|
||||
-1 1:0.75 2:-1 3:0.333333 4:-0.698113 5:-0.365297 6:1 7:1 8:-0.0992366 9:-1 10:-1 11:-1 12:-0.333333 13:-1
|
||||
+1 1:0.166667 2:1 3:0.333333 4:-0.358491 5:-0.52968 6:-1 7:1 8:0.206107 9:-1 10:-0.870968 12:-0.333333 13:1
|
||||
-1 1:0.541667 2:1 3:1 4:0.245283 5:-0.534247 6:-1 7:1 8:0.0229008 9:-1 10:-0.258065 11:-1 12:-1 13:0.5
|
||||
-1 1:-0.666667 2:-1 3:0.333333 4:-0.509434 5:-0.593607 6:-1 7:-1 8:0.51145 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.25 2:1 3:1 4:0.433962 5:-0.086758 6:-1 7:1 8:0.0534351 9:1 10:0.0967742 11:1 12:-1 13:1
|
||||
+1 1:-0.125 2:1 3:1 4:-0.0566038 5:-0.6621 6:-1 7:1 8:-0.160305 9:1 10:-0.709677 12:-1 13:1
|
||||
+1 1:-0.208333 2:1 3:1 4:-0.320755 5:-0.406393 6:1 7:1 8:0.206107 9:1 10:-1 11:-1 12:0.333333 13:1
|
||||
+1 1:0.333333 2:1 3:1 4:-0.132075 5:-0.630137 6:-1 7:1 8:0.0229008 9:1 10:-0.387097 11:-1 12:-0.333333 13:1
|
||||
+1 1:0.25 2:1 3:-1 4:0.245283 5:-0.328767 6:-1 7:1 8:-0.175573 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.458333 2:1 3:0.333333 4:-0.320755 5:-0.753425 6:-1 7:-1 8:0.206107 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.208333 2:1 3:1 4:-0.471698 5:-0.561644 6:-1 7:1 8:0.755725 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:-0.541667 2:1 3:1 4:0.0943396 5:-0.557078 6:-1 7:-1 8:0.679389 9:-1 10:-1 11:-1 12:-1 13:1
|
||||
-1 1:0.375 2:-1 3:1 4:-0.433962 5:-0.621005 6:-1 7:-1 8:0.40458 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.375 2:1 3:0.333333 4:-0.320755 5:-0.511416 6:-1 7:-1 8:0.648855 9:1 10:-0.870968 11:-1 12:-1 13:-1
|
||||
-1 1:-0.291667 2:1 3:-0.333333 4:-0.867925 5:-0.675799 6:1 7:-1 8:0.29771 9:-1 10:-1 11:-1 12:-1 13:1
|
||||
+1 1:0.25 2:1 3:0.333333 4:-0.396226 5:-0.579909 6:1 7:-1 8:-0.0381679 9:-1 10:-0.290323 12:-0.333333 13:0.5
|
||||
-1 1:0.208333 2:1 3:0.333333 4:-0.132075 5:-0.611872 6:1 7:1 8:0.435115 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:-0.166667 2:1 3:0.333333 4:-0.54717 5:-0.894977 6:-1 7:1 8:-0.160305 9:-1 10:-0.741935 11:-1 12:1 13:-1
|
||||
+1 1:-0.375 2:1 3:1 4:-0.698113 5:-0.675799 6:-1 7:1 8:0.618321 9:-1 10:-1 11:-1 12:-0.333333 13:-1
|
||||
+1 1:0.541667 2:1 3:-0.333333 4:0.245283 5:-0.452055 6:-1 7:-1 8:-0.251908 9:1 10:-1 12:1 13:0.5
|
||||
+1 1:0.5 2:-1 3:1 4:0.0566038 5:-0.547945 6:-1 7:1 8:-0.343511 9:-1 10:-0.677419 12:1 13:1
|
||||
+1 1:-0.458333 2:1 3:1 4:-0.207547 5:-0.136986 6:-1 7:-1 8:-0.175573 9:1 10:-0.419355 12:-1 13:0.5
|
||||
-1 1:-0.0416667 2:1 3:-0.333333 4:-0.358491 5:-0.639269 6:1 7:-1 8:0.725191 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:0.5 2:-1 3:0.333333 4:-0.132075 5:0.328767 6:1 7:1 8:0.312977 9:-1 10:-0.741935 11:-1 12:-0.333333 13:-1
|
||||
-1 1:0.416667 2:-1 3:-0.333333 4:-0.132075 5:-0.684932 6:-1 7:-1 8:0.648855 9:-1 10:-1 11:-1 12:0.333333 13:-1
|
||||
-1 1:-0.333333 2:-1 3:-0.333333 4:-0.320755 5:-0.506849 6:-1 7:1 8:0.587786 9:-1 10:-0.806452 12:-1 13:-1
|
||||
-1 1:-0.5 2:-1 3:-0.333333 4:-0.792453 5:-0.671233 6:-1 7:-1 8:0.480916 9:-1 10:-1 11:-1 12:-0.333333 13:-1
|
||||
+1 1:0.333333 2:1 3:1 4:-0.169811 5:-0.817352 6:-1 7:1 8:-0.175573 9:1 10:0.16129 12:-0.333333 13:-1
|
||||
-1 1:0.291667 2:-1 3:0.333333 4:-0.509434 5:-0.762557 6:1 7:-1 8:-0.618321 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.25 2:-1 3:1 4:0.509434 5:-0.438356 6:-1 7:-1 8:0.0992366 9:1 10:-1 12:-1 13:-1
|
||||
+1 1:0.375 2:1 3:-0.333333 4:-0.509434 5:-0.292237 6:-1 7:1 8:-0.51145 9:-1 10:-0.548387 12:-0.333333 13:1
|
||||
-1 1:0.166667 2:1 3:0.333333 4:0.0566038 5:-1 6:1 7:-1 8:0.557252 9:-1 10:-0.935484 11:-1 12:-0.333333 13:1
|
||||
+1 1:-0.0833333 2:-1 3:1 4:-0.320755 5:-0.182648 6:-1 7:-1 8:0.0839695 9:1 10:-0.612903 12:-1 13:1
|
||||
-1 1:-0.375 2:1 3:0.333333 4:-0.509434 5:-0.543379 6:-1 7:-1 8:0.496183 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:0.291667 2:-1 3:-1 4:0.0566038 5:-0.479452 6:-1 7:-1 8:0.526718 9:-1 10:-0.709677 11:-1 12:-1 13:-1
|
||||
-1 1:0.416667 2:1 3:-1 4:-0.0377358 5:-0.511416 6:1 7:1 8:0.206107 9:-1 10:-0.258065 11:1 12:-1 13:0.5
|
||||
+1 1:0.166667 2:1 3:1 4:0.0566038 5:-0.315068 6:-1 7:1 8:-0.374046 9:1 10:-0.806452 12:-0.333333 13:0.5
|
||||
-1 1:-0.0833333 2:1 3:1 4:-0.132075 5:-0.383562 6:-1 7:1 8:0.755725 9:1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.208333 2:-1 3:-0.333333 4:-0.207547 5:-0.118721 6:1 7:1 8:0.236641 9:-1 10:-1 11:-1 12:0.333333 13:-1
|
||||
-1 1:-0.375 2:-1 3:0.333333 4:-0.54717 5:-0.47032 6:-1 7:-1 8:0.19084 9:-1 10:-0.903226 12:-0.333333 13:-1
|
||||
+1 1:-0.25 2:1 3:0.333333 4:-0.735849 5:-0.465753 6:-1 7:-1 8:0.236641 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.333333 2:1 3:1 4:-0.509434 5:-0.388128 6:-1 7:-1 8:0.0534351 9:1 10:0.16129 12:-0.333333 13:1
|
||||
-1 1:0.166667 2:-1 3:1 4:-0.509434 5:0.0410959 6:-1 7:-1 8:0.40458 9:1 10:-0.806452 11:-1 12:-1 13:-1
|
||||
-1 1:0.708333 2:1 3:-0.333333 4:0.169811 5:-0.456621 6:-1 7:1 8:0.0992366 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:0.958333 2:-1 3:0.333333 4:-0.132075 5:-0.675799 6:-1 8:-0.312977 9:-1 10:-0.645161 12:-1 13:-1
|
||||
-1 1:0.583333 2:-1 3:1 4:-0.773585 5:-0.557078 6:-1 7:-1 8:0.0839695 9:-1 10:-0.903226 11:-1 12:0.333333 13:-1
|
||||
+1 1:-0.333333 2:1 3:1 4:-0.0943396 5:-0.164384 6:-1 7:1 8:0.160305 9:1 10:-1 12:1 13:1
|
||||
-1 1:-0.333333 2:1 3:1 4:-0.811321 5:-0.625571 6:-1 7:1 8:0.175573 9:1 10:-0.0322581 12:-1 13:-1
|
||||
-1 1:-0.583333 2:-1 3:0.333333 4:-1 5:-0.666667 6:-1 7:-1 8:0.648855 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.458333 2:-1 3:0.333333 4:-0.509434 5:-0.621005 6:-1 7:-1 8:0.557252 9:-1 10:-1 12:-1 13:-1
|
||||
-1 1:0.125 2:1 3:-0.333333 4:-0.509434 5:-0.497717 6:-1 7:-1 8:0.633588 9:-1 10:-0.741935 11:-1 12:-1 13:-1
|
||||
+1 1:0.208333 2:1 3:1 4:-0.0188679 5:-0.579909 6:-1 7:-1 8:-0.480916 9:-1 10:-0.354839 12:-0.333333 13:1
|
||||
+1 1:-0.75 2:1 3:1 4:-0.509434 5:-0.671233 6:-1 7:-1 8:-0.0992366 9:1 10:-0.483871 12:-1 13:1
|
||||
+1 1:0.208333 2:1 3:1 4:0.0566038 5:-0.342466 6:-1 7:1 8:-0.389313 9:1 10:-0.741935 11:-1 12:-1 13:1
|
||||
-1 1:-0.5 2:1 3:0.333333 4:-0.320755 5:-0.598174 6:-1 7:1 8:0.480916 9:-1 10:-0.354839 12:-1 13:-1
|
||||
-1 1:0.166667 2:1 3:1 4:-0.698113 5:-0.657534 6:-1 7:-1 8:-0.160305 9:1 10:-0.516129 12:-1 13:0.5
|
||||
-1 1:-0.458333 2:1 3:-1 4:0.0188679 5:-0.461187 6:-1 7:1 8:0.633588 9:-1 10:-0.741935 11:-1 12:0.333333 13:-1
|
||||
-1 1:0.375 2:1 3:-0.333333 4:-0.358491 5:-0.625571 6:1 7:1 8:0.0534351 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:0.25 2:1 3:-1 4:0.584906 5:-0.342466 6:-1 7:1 8:0.129771 9:-1 10:0.354839 11:1 12:-1 13:1
|
||||
-1 1:-0.5 2:-1 3:-0.333333 4:-0.396226 5:-0.178082 6:-1 7:-1 8:0.40458 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:-0.125 2:1 3:1 4:0.0566038 5:-0.465753 6:-1 7:1 8:-0.129771 9:-1 10:-0.16129 12:-1 13:1
|
||||
-1 1:0.25 2:1 3:-0.333333 4:-0.132075 5:-0.56621 6:-1 7:-1 8:0.419847 9:1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.333333 2:-1 3:1 4:-0.320755 5:-0.0684932 6:-1 7:1 8:0.496183 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.0416667 2:1 3:1 4:-0.433962 5:-0.360731 6:-1 7:1 8:-0.419847 9:1 10:-0.290323 12:-0.333333 13:1
|
||||
+1 1:0.0416667 2:1 3:1 4:-0.698113 5:-0.634703 6:-1 7:1 8:-0.435115 9:1 10:-1 12:-0.333333 13:-1
|
||||
+1 1:-0.0416667 2:1 3:1 4:-0.415094 5:-0.607306 6:-1 7:-1 8:0.480916 9:-1 10:-0.677419 11:-1 12:0.333333 13:1
|
||||
+1 1:-0.25 2:1 3:1 4:-0.698113 5:-0.319635 6:-1 7:1 8:-0.282443 9:1 10:-0.677419 12:-0.333333 13:-1
|
||||
-1 1:0.541667 2:1 3:1 4:-0.509434 5:-0.196347 6:-1 7:1 8:0.221374 9:-1 10:-0.870968 12:-1 13:-1
|
||||
+1 1:0.208333 2:1 3:1 4:-0.886792 5:-0.506849 6:-1 7:-1 8:0.29771 9:-1 10:-0.967742 11:-1 12:-0.333333 13:1
|
||||
-1 1:0.458333 2:-1 3:0.333333 4:-0.132075 5:-0.146119 6:-1 7:-1 8:-0.0534351 9:-1 10:-0.935484 11:-1 12:-1 13:1
|
||||
-1 1:-0.125 2:-1 3:-0.333333 4:-0.509434 5:-0.461187 6:-1 7:-1 8:0.389313 9:-1 10:-0.645161 11:-1 12:-1 13:-1
|
||||
-1 1:-0.375 2:-1 3:0.333333 4:-0.735849 5:-0.931507 6:-1 7:-1 8:0.587786 9:-1 10:-0.806452 12:-1 13:-1
|
||||
+1 1:0.583333 2:1 3:1 4:-0.509434 5:-0.493151 6:-1 7:-1 8:-1 9:-1 10:-0.677419 12:-1 13:-1
|
||||
-1 1:-0.166667 2:-1 3:1 4:-0.320755 5:-0.347032 6:-1 7:-1 8:0.40458 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.166667 2:1 3:1 4:0.339623 5:-0.255708 6:1 7:1 8:-0.19084 9:-1 10:-0.677419 12:1 13:1
|
||||
+1 1:0.416667 2:1 3:1 4:-0.320755 5:-0.415525 6:-1 7:1 8:0.160305 9:-1 10:-0.548387 12:-0.333333 13:1
|
||||
+1 1:-0.208333 2:1 3:1 4:-0.433962 5:-0.324201 6:-1 7:1 8:0.450382 9:-1 10:-0.83871 12:-1 13:1
|
||||
-1 1:-0.0833333 2:1 3:0.333333 4:-0.886792 5:-0.561644 6:-1 7:-1 8:0.0992366 9:1 10:-0.612903 12:-1 13:-1
|
||||
+1 1:0.291667 2:-1 3:1 4:0.0566038 5:-0.39726 6:-1 7:1 8:0.312977 9:-1 10:-0.16129 12:0.333333 13:1
|
||||
+1 1:0.25 2:1 3:1 4:-0.132075 5:-0.767123 6:-1 7:-1 8:0.389313 9:1 10:-1 11:-1 12:-0.333333 13:1
|
||||
-1 1:-0.333333 2:-1 3:-0.333333 4:-0.660377 5:-0.844749 6:-1 7:-1 8:0.0229008 9:-1 10:-1 12:-1 13:-1
|
||||
+1 1:0.0833333 2:-1 3:1 4:0.622642 5:-0.0821918 6:-1 8:-0.29771 9:1 10:0.0967742 12:-1 13:-1
|
||||
-1 1:-0.5 2:1 3:-0.333333 4:-0.698113 5:-0.502283 6:-1 7:-1 8:0.251908 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.291667 2:-1 3:1 4:0.207547 5:-0.182648 6:-1 7:1 8:0.374046 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:0.0416667 2:-1 3:0.333333 4:-0.226415 5:-0.187215 6:1 7:-1 8:0.51145 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.458333 2:1 3:-0.333333 4:-0.509434 5:-0.228311 6:-1 7:-1 8:0.389313 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.166667 2:-1 3:-0.333333 4:-0.245283 5:-0.3379 6:-1 7:-1 8:0.389313 9:-1 10:-1 12:-1 13:-1
|
||||
+1 1:-0.291667 2:1 3:1 4:-0.509434 5:-0.438356 6:-1 7:1 8:0.114504 9:-1 10:-0.741935 11:-1 12:-1 13:1
|
||||
+1 1:0.125 2:-1 3:1 4:1 5:-0.260274 6:1 7:1 8:-0.0534351 9:1 10:0.290323 11:1 12:0.333333 13:1
|
||||
-1 1:0.541667 2:-1 3:-1 4:0.0566038 5:-0.543379 6:-1 7:-1 8:-0.343511 9:-1 10:-0.16129 11:1 12:-1 13:-1
|
||||
+1 1:0.125 2:1 3:1 4:-0.320755 5:-0.283105 6:1 7:1 8:-0.51145 9:1 10:-0.483871 11:1 12:-1 13:1
|
||||
+1 1:-0.166667 2:1 3:0.333333 4:-0.509434 5:-0.716895 6:-1 7:-1 8:0.0381679 9:-1 10:-0.354839 12:1 13:1
|
||||
+1 1:0.0416667 2:1 3:1 4:-0.471698 5:-0.269406 6:-1 7:1 8:-0.312977 9:1 10:0.0322581 12:0.333333 13:-1
|
||||
+1 1:0.166667 2:1 3:1 4:0.0943396 5:-0.324201 6:-1 7:-1 8:-0.740458 9:1 10:-0.612903 12:-0.333333 13:1
|
||||
-1 1:0.5 2:-1 3:0.333333 4:0.245283 5:0.0684932 6:-1 7:1 8:0.221374 9:-1 10:-0.741935 11:-1 12:-1 13:-1
|
||||
-1 1:0.0416667 2:1 3:0.333333 4:-0.415094 5:-0.328767 6:-1 7:1 8:0.236641 9:-1 10:-0.83871 11:1 12:-0.333333 13:-1
|
||||
-1 1:0.0416667 2:-1 3:0.333333 4:0.245283 5:-0.657534 6:-1 7:-1 8:0.40458 9:-1 10:-1 11:-1 12:-0.333333 13:-1
|
||||
+1 1:0.375 2:1 3:1 4:-0.509434 5:-0.356164 6:-1 7:-1 8:-0.572519 9:1 10:-0.419355 12:0.333333 13:1
|
||||
-1 1:-0.0416667 2:-1 3:0.333333 4:-0.207547 5:-0.680365 6:-1 7:1 8:0.496183 9:-1 10:-0.967742 12:-1 13:-1
|
||||
-1 1:-0.0416667 2:1 3:-0.333333 4:-0.245283 5:-0.657534 6:-1 7:-1 8:0.328244 9:-1 10:-0.741935 11:-1 12:-0.333333 13:-1
|
||||
+1 1:0.291667 2:1 3:1 4:-0.566038 5:-0.525114 6:1 7:-1 8:0.358779 9:1 10:-0.548387 11:-1 12:0.333333 13:1
|
||||
+1 1:0.416667 2:-1 3:1 4:-0.735849 5:-0.347032 6:-1 7:-1 8:0.496183 9:1 10:-0.419355 12:0.333333 13:-1
|
||||
+1 1:0.541667 2:1 3:1 4:-0.660377 5:-0.607306 6:-1 7:1 8:-0.0687023 9:1 10:-0.967742 11:-1 12:-0.333333 13:-1
|
||||
-1 1:-0.458333 2:1 3:1 4:-0.132075 5:-0.543379 6:-1 7:-1 8:0.633588 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.458333 2:1 3:1 4:-0.509434 5:-0.452055 6:-1 7:1 8:-0.618321 9:1 10:-0.290323 11:1 12:-0.333333 13:-1
|
||||
-1 1:0.0416667 2:1 3:0.333333 4:0.0566038 5:-0.515982 6:-1 7:1 8:0.435115 9:-1 10:-0.483871 11:-1 12:-1 13:1
|
||||
-1 1:-0.291667 2:-1 3:0.333333 4:-0.0943396 5:-0.767123 6:-1 7:1 8:0.358779 9:1 10:-0.548387 11:1 12:-1 13:-1
|
||||
-1 1:0.583333 2:-1 3:0.333333 4:0.0943396 5:-0.310502 6:-1 7:-1 8:0.541985 9:-1 10:-1 11:-1 12:-0.333333 13:-1
|
||||
+1 1:0.125 2:1 3:1 4:-0.415094 5:-0.438356 6:1 7:1 8:0.114504 9:1 10:-0.612903 12:-0.333333 13:-1
|
||||
-1 1:-0.791667 2:-1 3:-0.333333 4:-0.54717 5:-0.616438 6:-1 7:-1 8:0.847328 9:-1 10:-0.774194 11:-1 12:-1 13:-1
|
||||
-1 1:0.166667 2:1 3:1 4:-0.283019 5:-0.630137 6:-1 7:-1 8:0.480916 9:1 10:-1 11:-1 12:-1 13:1
|
||||
+1 1:0.458333 2:1 3:1 4:-0.0377358 5:-0.607306 6:-1 7:1 8:-0.0687023 9:-1 10:-0.354839 12:0.333333 13:0.5
|
||||
-1 1:0.25 2:1 3:1 4:-0.169811 5:-0.3379 6:-1 7:1 8:0.694656 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:-0.125 2:1 3:0.333333 4:-0.132075 5:-0.511416 6:-1 7:-1 8:0.40458 9:-1 10:-0.806452 12:-0.333333 13:1
|
||||
-1 1:-0.0833333 2:1 3:-1 4:-0.415094 5:-0.60274 6:-1 7:1 8:-0.175573 9:1 10:-0.548387 11:-1 12:-0.333333 13:-1
|
||||
+1 1:0.0416667 2:1 3:-0.333333 4:0.849057 5:-0.283105 6:-1 7:1 8:0.89313 9:-1 10:-1 11:-1 12:-0.333333 13:1
|
||||
+1 2:1 3:1 4:-0.45283 5:-0.287671 6:-1 7:-1 8:-0.633588 9:1 10:-0.354839 12:0.333333 13:1
|
||||
+1 1:-0.0416667 2:1 3:1 4:-0.660377 5:-0.525114 6:-1 7:-1 8:0.358779 9:-1 10:-1 11:-1 12:-0.333333 13:-1
|
||||
+1 1:-0.541667 2:1 3:1 4:-0.698113 5:-0.812785 6:-1 7:1 8:-0.343511 9:1 10:-0.354839 12:-1 13:1
|
||||
+1 1:0.208333 2:1 3:0.333333 4:-0.283019 5:-0.552511 6:-1 7:1 8:0.557252 9:-1 10:0.0322581 11:-1 12:0.333333 13:1
|
||||
-1 1:-0.5 2:-1 3:0.333333 4:-0.660377 5:-0.351598 6:-1 7:1 8:0.541985 9:1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.5 2:1 3:0.333333 4:-0.660377 5:-0.43379 6:-1 7:-1 8:0.648855 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.125 2:-1 3:0.333333 4:-0.509434 5:-0.575342 6:-1 7:-1 8:0.328244 9:-1 10:-0.483871 12:-1 13:-1
|
||||
-1 1:0.0416667 2:-1 3:0.333333 4:-0.735849 5:-0.356164 6:-1 7:1 8:0.465649 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:0.458333 2:-1 3:1 4:-0.320755 5:-0.191781 6:-1 7:-1 8:-0.221374 9:-1 10:-0.354839 12:0.333333 13:-1
|
||||
-1 1:-0.0833333 2:-1 3:0.333333 4:-0.320755 5:-0.406393 6:-1 7:1 8:0.19084 9:-1 10:-0.83871 11:-1 12:-1 13:-1
|
||||
-1 1:-0.291667 2:-1 3:-0.333333 4:-0.792453 5:-0.643836 6:-1 7:-1 8:0.541985 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.0833333 2:1 3:1 4:-0.132075 5:-0.584475 6:-1 7:-1 8:-0.389313 9:1 10:0.806452 11:1 12:-1 13:1
|
||||
-1 1:-0.333333 2:1 3:-0.333333 4:-0.358491 5:-0.16895 6:-1 7:1 8:0.51145 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:0.125 2:1 3:-1 4:-0.509434 5:-0.694064 6:-1 7:1 8:0.389313 9:-1 10:-0.387097 12:-1 13:1
|
||||
+1 1:0.541667 2:-1 3:1 4:0.584906 5:-0.534247 6:1 7:-1 8:0.435115 9:1 10:-0.677419 12:0.333333 13:1
|
||||
+1 1:-0.625 2:1 3:-1 4:-0.509434 5:-0.520548 6:-1 7:-1 8:0.694656 9:1 10:0.225806 12:-1 13:1
|
||||
+1 1:0.375 2:-1 3:1 4:0.0566038 5:-0.461187 6:-1 7:-1 8:0.267176 9:1 10:-0.548387 12:-1 13:-1
|
||||
-1 1:0.0833333 2:1 3:-0.333333 4:-0.320755 5:-0.378995 6:-1 7:-1 8:0.282443 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.208333 2:1 3:1 4:-0.358491 5:-0.392694 6:-1 7:1 8:-0.0992366 9:1 10:-0.0322581 12:0.333333 13:1
|
||||
-1 1:-0.416667 2:1 3:1 4:-0.698113 5:-0.611872 6:-1 7:-1 8:0.374046 9:-1 10:-1 11:-1 12:-1 13:1
|
||||
-1 1:0.458333 2:-1 3:1 4:0.622642 5:-0.0913242 6:-1 7:-1 8:0.267176 9:1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.125 2:-1 3:1 4:-0.698113 5:-0.415525 6:-1 7:1 8:0.343511 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 2:1 3:0.333333 4:-0.320755 5:-0.675799 6:1 7:1 8:0.236641 9:-1 10:-0.612903 11:1 12:-1 13:-1
|
||||
-1 1:-0.333333 2:-1 3:1 4:-0.169811 5:-0.497717 6:-1 7:1 8:0.236641 9:1 10:-0.935484 12:-1 13:-1
|
||||
+1 1:0.5 2:1 3:-1 4:-0.169811 5:-0.287671 6:1 7:1 8:0.572519 9:-1 10:-0.548387 12:-0.333333 13:-1
|
||||
-1 1:0.666667 2:1 3:-1 4:0.245283 5:-0.506849 6:1 7:1 8:-0.0839695 9:-1 10:-0.967742 12:-0.333333 13:-1
|
||||
+1 1:0.666667 2:1 3:0.333333 4:-0.132075 5:-0.415525 6:-1 7:1 8:0.145038 9:-1 10:-0.354839 12:1 13:1
|
||||
+1 1:0.583333 2:1 3:1 4:-0.886792 5:-0.210046 6:-1 7:1 8:-0.175573 9:1 10:-0.709677 12:0.333333 13:-1
|
||||
-1 1:0.625 2:-1 3:0.333333 4:-0.509434 5:-0.611872 6:-1 7:1 8:-0.328244 9:-1 10:-0.516129 12:-1 13:-1
|
||||
-1 1:-0.791667 2:1 3:-1 4:-0.54717 5:-0.744292 6:-1 7:1 8:0.572519 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.375 2:-1 3:1 4:-0.169811 5:-0.232877 6:1 7:-1 8:-0.465649 9:-1 10:-0.387097 12:1 13:-1
|
||||
+1 1:-0.0833333 2:1 3:1 4:-0.132075 5:-0.214612 6:-1 7:-1 8:-0.221374 9:1 10:0.354839 12:1 13:1
|
||||
+1 1:-0.291667 2:1 3:0.333333 4:0.0566038 5:-0.520548 6:-1 7:-1 8:0.160305 9:-1 10:0.16129 12:-1 13:-1
|
||||
+1 1:0.583333 2:1 3:1 4:-0.415094 5:-0.415525 6:1 7:-1 8:0.40458 9:-1 10:-0.935484 12:0.333333 13:1
|
||||
-1 1:-0.125 2:1 3:0.333333 4:-0.339623 5:-0.680365 6:-1 7:-1 8:0.40458 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.458333 2:1 3:0.333333 4:-0.509434 5:-0.479452 6:1 7:-1 8:0.877863 9:-1 10:-0.741935 11:1 12:-1 13:1
|
||||
+1 1:0.125 2:-1 3:1 4:-0.245283 5:0.292237 6:-1 7:1 8:0.206107 9:1 10:-0.387097 12:0.333333 13:1
|
||||
+1 1:-0.5 2:1 3:1 4:-0.698113 5:-0.789954 6:-1 7:1 8:0.328244 9:-1 10:-1 11:-1 12:-1 13:1
|
||||
-1 1:-0.458333 2:-1 3:1 4:-0.849057 5:-0.365297 6:-1 7:1 8:-0.221374 9:-1 10:-0.806452 12:-1 13:-1
|
||||
-1 2:1 3:0.333333 4:-0.320755 5:-0.452055 6:1 7:1 8:0.557252 9:-1 10:-1 11:-1 12:1 13:-1
|
||||
-1 1:-0.416667 2:1 3:0.333333 4:-0.320755 5:-0.136986 6:-1 7:-1 8:0.389313 9:-1 10:-0.387097 11:-1 12:-0.333333 13:-1
|
||||
+1 1:0.125 2:1 3:1 4:-0.283019 5:-0.73516 6:-1 7:1 8:-0.480916 9:1 10:-0.322581 12:-0.333333 13:0.5
|
||||
-1 1:-0.0416667 2:1 3:1 4:-0.735849 5:-0.511416 6:1 7:-1 8:0.160305 9:-1 10:-0.967742 11:-1 12:1 13:1
|
||||
-1 1:0.375 2:-1 3:1 4:-0.132075 5:0.223744 6:-1 7:1 8:0.312977 9:-1 10:-0.612903 12:-1 13:-1
|
||||
+1 1:0.708333 2:1 3:0.333333 4:0.245283 5:-0.347032 6:-1 7:-1 8:-0.374046 9:1 10:-0.0645161 12:-0.333333 13:1
|
||||
-1 1:0.0416667 2:1 3:1 4:-0.132075 5:-0.484018 6:-1 7:-1 8:0.358779 9:-1 10:-0.612903 11:-1 12:-1 13:-1
|
||||
+1 1:0.708333 2:1 3:1 4:-0.0377358 5:-0.780822 6:-1 7:-1 8:-0.175573 9:1 10:-0.16129 11:1 12:-1 13:1
|
||||
-1 1:0.0416667 2:1 3:-0.333333 4:-0.735849 5:-0.164384 6:-1 7:-1 8:0.29771 9:-1 10:-1 11:-1 12:-1 13:1
|
||||
+1 1:-0.75 2:1 3:1 4:-0.396226 5:-0.287671 6:-1 7:1 8:0.29771 9:1 10:-1 11:-1 12:-1 13:1
|
||||
-1 1:-0.208333 2:1 3:0.333333 4:-0.433962 5:-0.410959 6:1 7:-1 8:0.587786 9:-1 10:-1 11:-1 12:0.333333 13:-1
|
||||
-1 1:0.0833333 2:-1 3:-0.333333 4:-0.226415 5:-0.43379 6:-1 7:1 8:0.374046 9:-1 10:-0.548387 12:-1 13:-1
|
||||
-1 1:0.208333 2:-1 3:1 4:-0.886792 5:-0.442922 6:-1 7:1 8:-0.221374 9:-1 10:-0.677419 12:-1 13:-1
|
||||
-1 1:0.0416667 2:-1 3:0.333333 4:-0.698113 5:-0.598174 6:-1 7:-1 8:0.328244 9:-1 10:-0.483871 12:-1 13:-1
|
||||
-1 1:0.666667 2:-1 3:-1 4:-0.132075 5:-0.484018 6:-1 7:-1 8:0.221374 9:-1 10:-0.419355 11:-1 12:0.333333 13:-1
|
||||
+1 1:1 2:1 3:1 4:-0.415094 5:-0.187215 6:-1 7:1 8:0.389313 9:1 10:-1 11:-1 12:1 13:-1
|
||||
-1 1:0.625 2:1 3:0.333333 4:-0.54717 5:-0.310502 6:-1 7:-1 8:0.221374 9:-1 10:-0.677419 11:-1 12:-0.333333 13:1
|
||||
+1 1:0.208333 2:1 3:1 4:-0.415094 5:-0.205479 6:-1 7:1 8:0.526718 9:-1 10:-1 11:-1 12:0.333333 13:1
|
||||
+1 1:0.291667 2:1 3:1 4:-0.415094 5:-0.39726 6:-1 7:1 8:0.0687023 9:1 10:-0.0967742 12:-0.333333 13:1
|
||||
+1 1:-0.0833333 2:1 3:1 4:-0.132075 5:-0.210046 6:-1 7:-1 8:0.557252 9:1 10:-0.483871 11:-1 12:-1 13:1
|
||||
+1 1:0.0833333 2:1 3:1 4:0.245283 5:-0.255708 6:-1 7:1 8:0.129771 9:1 10:-0.741935 12:-0.333333 13:1
|
||||
-1 1:-0.0416667 2:1 3:-1 4:0.0943396 5:-0.214612 6:1 7:-1 8:0.633588 9:-1 10:-0.612903 12:-1 13:1
|
||||
-1 1:0.291667 2:-1 3:0.333333 4:-0.849057 5:-0.123288 6:-1 7:-1 8:0.358779 9:-1 10:-1 11:-1 12:-0.333333 13:-1
|
||||
-1 1:0.208333 2:1 3:0.333333 4:-0.792453 5:-0.479452 6:-1 7:1 8:0.267176 9:1 10:-0.806452 12:-1 13:1
|
||||
+1 1:0.458333 2:1 3:0.333333 4:-0.415094 5:-0.164384 6:-1 7:-1 8:-0.0839695 9:1 10:-0.419355 12:-1 13:1
|
||||
-1 1:-0.666667 2:1 3:0.333333 4:-0.320755 5:-0.43379 6:-1 7:-1 8:0.770992 9:-1 10:0.129032 11:1 12:-1 13:-1
|
||||
+1 1:0.25 2:1 3:-1 4:0.433962 5:-0.260274 6:-1 7:1 8:0.343511 9:-1 10:-0.935484 12:-1 13:1
|
||||
-1 1:-0.0833333 2:1 3:0.333333 4:-0.415094 5:-0.456621 6:1 7:1 8:0.450382 9:-1 10:-0.225806 12:-1 13:-1
|
||||
-1 1:-0.416667 2:-1 3:0.333333 4:-0.471698 5:-0.60274 6:-1 7:-1 8:0.435115 9:-1 10:-0.935484 12:-1 13:-1
|
||||
+1 1:0.208333 2:1 3:1 4:-0.358491 5:-0.589041 6:-1 7:1 8:-0.0839695 9:1 10:-0.290323 12:1 13:1
|
||||
-1 1:-1 2:1 3:-0.333333 4:-0.320755 5:-0.643836 6:-1 7:1 8:1 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.5 2:-1 3:-0.333333 4:-0.320755 5:-0.643836 6:-1 7:1 8:0.541985 9:-1 10:-0.548387 11:-1 12:-1 13:-1
|
||||
-1 1:0.416667 2:-1 3:0.333333 4:-0.226415 5:-0.424658 6:-1 7:1 8:0.541985 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.0833333 2:1 3:0.333333 4:-1 5:-0.538813 6:-1 7:-1 8:0.267176 9:1 10:-1 11:-1 12:-0.333333 13:1
|
||||
-1 1:0.0416667 2:1 3:0.333333 4:-0.509434 5:-0.39726 6:-1 7:1 8:0.160305 9:-1 10:-0.870968 12:-1 13:1
|
||||
-1 1:-0.375 2:1 3:-0.333333 4:-0.509434 5:-0.570776 6:-1 7:-1 8:0.51145 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.0416667 2:1 3:1 4:-0.698113 5:-0.484018 6:-1 7:-1 8:-0.160305 9:1 10:-0.0967742 12:-0.333333 13:1
|
||||
+1 1:0.5 2:1 3:1 4:-0.226415 5:-0.415525 6:-1 7:1 8:-0.145038 9:-1 10:-0.0967742 12:-0.333333 13:1
|
||||
-1 1:0.166667 2:1 3:0.333333 4:0.0566038 5:-0.808219 6:-1 7:-1 8:0.572519 9:-1 10:-0.483871 11:-1 12:-1 13:-1
|
||||
+1 1:0.416667 2:1 3:1 4:-0.320755 5:-0.0684932 6:1 7:1 8:-0.0687023 9:1 10:-0.419355 11:-1 12:1 13:1
|
||||
-1 1:-0.75 2:-1 3:1 4:-0.169811 5:-0.739726 6:-1 7:-1 8:0.694656 9:-1 10:-0.548387 11:-1 12:-1 13:-1
|
||||
-1 1:-0.5 2:1 3:-0.333333 4:-0.226415 5:-0.648402 6:-1 7:-1 8:-0.0687023 9:-1 10:-1 12:-1 13:0.5
|
||||
+1 1:0.375 2:-1 3:0.333333 4:-0.320755 5:-0.374429 6:-1 7:-1 8:-0.603053 9:-1 10:-0.612903 12:-0.333333 13:1
|
||||
+1 1:-0.416667 2:-1 3:1 4:-0.283019 5:-0.0182648 6:1 7:1 8:-0.00763359 9:1 10:-0.0322581 12:-1 13:1
|
||||
-1 1:0.208333 2:-1 3:-1 4:0.0566038 5:-0.283105 6:1 7:1 8:0.389313 9:-1 10:-0.677419 11:-1 12:-1 13:-1
|
||||
-1 1:-0.0416667 2:1 3:-1 4:-0.54717 5:-0.726027 6:-1 7:1 8:0.816794 9:-1 10:-1 12:-1 13:0.5
|
||||
+1 1:0.333333 2:-1 3:1 4:-0.0377358 5:-0.173516 6:-1 7:1 8:0.145038 9:1 10:-0.677419 12:-1 13:1
|
||||
+1 1:-0.583333 2:1 3:1 4:-0.54717 5:-0.575342 6:-1 7:-1 8:0.0534351 9:-1 10:-0.612903 12:-1 13:1
|
||||
-1 1:-0.333333 2:1 3:1 4:-0.603774 5:-0.388128 6:-1 7:1 8:0.740458 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:-0.0416667 2:1 3:1 4:-0.358491 5:-0.410959 6:-1 7:-1 8:0.374046 9:1 10:-1 11:-1 12:-0.333333 13:1
|
||||
-1 1:0.375 2:1 3:0.333333 4:-0.320755 5:-0.520548 6:-1 7:-1 8:0.145038 9:-1 10:-0.419355 12:1 13:1
|
||||
+1 1:0.375 2:-1 3:1 4:0.245283 5:-0.826484 6:-1 7:1 8:0.129771 9:-1 10:1 11:1 12:1 13:1
|
||||
-1 2:-1 3:1 4:-0.169811 5:-0.506849 6:-1 7:1 8:0.358779 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:-0.416667 2:1 3:1 4:-0.509434 5:-0.767123 6:-1 7:1 8:-0.251908 9:1 10:-0.193548 12:-1 13:1
|
||||
-1 1:-0.25 2:1 3:0.333333 4:-0.169811 5:-0.401826 6:-1 7:1 8:0.29771 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.0416667 2:1 3:-0.333333 4:-0.509434 5:-0.0913242 6:-1 7:-1 8:0.541985 9:-1 10:-0.935484 11:-1 12:-1 13:-1
|
||||
+1 1:0.625 2:1 3:0.333333 4:0.622642 5:-0.324201 6:1 7:1 8:0.206107 9:1 10:-0.483871 12:-1 13:1
|
||||
-1 1:-0.583333 2:1 3:0.333333 4:-0.132075 5:-0.109589 6:-1 7:1 8:0.694656 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 2:-1 3:1 4:-0.320755 5:-0.369863 6:-1 7:1 8:0.0992366 9:-1 10:-0.870968 12:-1 13:-1
|
||||
+1 1:0.375 2:-1 3:1 4:-0.132075 5:-0.351598 6:-1 7:1 8:0.358779 9:-1 10:0.16129 11:1 12:0.333333 13:-1
|
||||
-1 1:-0.0833333 2:-1 3:0.333333 4:-0.132075 5:-0.16895 6:-1 7:1 8:0.0839695 9:-1 10:-0.516129 11:-1 12:-0.333333 13:-1
|
||||
+1 1:0.291667 2:1 3:1 4:-0.320755 5:-0.420091 6:-1 7:-1 8:0.114504 9:1 10:-0.548387 11:-1 12:-0.333333 13:1
|
||||
+1 1:0.5 2:1 3:1 4:-0.698113 5:-0.442922 6:-1 7:1 8:0.328244 9:-1 10:-0.806452 11:-1 12:0.333333 13:0.5
|
||||
-1 1:0.5 2:-1 3:0.333333 4:0.150943 5:-0.347032 6:-1 7:-1 8:0.175573 9:-1 10:-0.741935 11:-1 12:-1 13:-1
|
||||
+1 1:0.291667 2:1 3:0.333333 4:-0.132075 5:-0.730594 6:-1 7:1 8:0.282443 9:-1 10:-0.0322581 12:-1 13:-1
|
||||
+1 1:0.291667 2:1 3:1 4:-0.0377358 5:-0.287671 6:-1 7:1 8:0.0839695 9:1 10:-0.0967742 12:0.333333 13:1
|
||||
+1 1:0.0416667 2:1 3:1 4:-0.509434 5:-0.716895 6:-1 7:-1 8:-0.358779 9:-1 10:-0.548387 12:-0.333333 13:1
|
||||
-1 1:-0.375 2:1 3:-0.333333 4:-0.320755 5:-0.575342 6:-1 7:1 8:0.78626 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:-0.375 2:1 3:1 4:-0.660377 5:-0.251142 6:-1 7:1 8:0.251908 9:-1 10:-1 11:-1 12:-0.333333 13:-1
|
||||
-1 1:-0.0833333 2:1 3:0.333333 4:-0.698113 5:-0.776256 6:-1 7:-1 8:-0.206107 9:-1 10:-0.806452 11:-1 12:-1 13:-1
|
||||
-1 1:0.25 2:1 3:0.333333 4:0.0566038 5:-0.607306 6:1 7:-1 8:0.312977 9:-1 10:-0.483871 11:-1 12:-1 13:-1
|
||||
-1 1:0.75 2:-1 3:-0.333333 4:0.245283 5:-0.196347 6:-1 7:-1 8:0.389313 9:-1 10:-0.870968 11:-1 12:0.333333 13:-1
|
||||
-1 1:0.333333 2:1 3:0.333333 4:0.0566038 5:-0.465753 6:1 7:-1 8:0.00763359 9:1 10:-0.677419 12:-1 13:-1
|
||||
+1 1:0.0833333 2:1 3:1 4:-0.283019 5:0.0365297 6:-1 7:-1 8:-0.0687023 9:1 10:-0.612903 12:-0.333333 13:1
|
||||
+1 1:0.458333 2:1 3:0.333333 4:-0.132075 5:-0.0456621 6:-1 7:-1 8:0.328244 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
-1 1:-0.416667 2:1 3:1 4:0.0566038 5:-0.447489 6:-1 7:-1 8:0.526718 9:-1 10:-0.516129 11:-1 12:-1 13:-1
|
||||
-1 1:0.208333 2:-1 3:0.333333 4:-0.509434 5:-0.0228311 6:-1 7:-1 8:0.541985 9:-1 10:-1 11:-1 12:-1 13:-1
|
||||
+1 1:0.291667 2:1 3:1 4:-0.320755 5:-0.634703 6:-1 7:1 8:-0.0687023 9:1 10:-0.225806 12:0.333333 13:1
|
||||
+1 1:0.208333 2:1 3:-0.333333 4:-0.509434 5:-0.278539 6:-1 7:1 8:0.358779 9:-1 10:-0.419355 12:-1 13:-1
|
||||
-1 1:-0.166667 2:1 3:-0.333333 4:-0.320755 5:-0.360731 6:-1 7:-1 8:0.526718 9:-1 10:-0.806452 11:-1 12:-1 13:-1
|
||||
+1 1:-0.208333 2:1 3:-0.333333 4:-0.698113 5:-0.52968 6:-1 7:-1 8:0.480916 9:-1 10:-0.677419 11:1 12:-1 13:1
|
||||
-1 1:-0.0416667 2:1 3:0.333333 4:0.471698 5:-0.666667 6:1 7:-1 8:0.389313 9:-1 10:-0.83871 11:-1 12:-1 13:1
|
||||
-1 1:-0.375 2:1 3:-0.333333 4:-0.509434 5:-0.374429 6:-1 7:-1 8:0.557252 9:-1 10:-1 11:-1 12:-1 13:1
|
||||
-1 1:0.125 2:-1 3:-0.333333 4:-0.132075 5:-0.232877 6:-1 7:1 8:0.251908 9:-1 10:-0.580645 12:-1 13:-1
|
||||
-1 1:0.166667 2:1 3:1 4:-0.132075 5:-0.69863 6:-1 7:-1 8:0.175573 9:-1 10:-0.870968 12:-1 13:0.5
|
||||
+1 1:0.583333 2:1 3:1 4:0.245283 5:-0.269406 6:-1 7:1 8:-0.435115 9:1 10:-0.516129 12:1 13:-1
|
26
libsvm-3.36/java/Makefile
Normal file
26
libsvm-3.36/java/Makefile
Normal file
@@ -0,0 +1,26 @@
|
||||
.SUFFIXES: .class .java
|
||||
FILES = libsvm/svm.class libsvm/svm_model.class libsvm/svm_node.class \
|
||||
libsvm/svm_parameter.class libsvm/svm_problem.class \
|
||||
libsvm/svm_print_interface.class \
|
||||
svm_train.class svm_predict.class svm_toy.class svm_scale.class
|
||||
|
||||
#JAVAC = jikes
|
||||
JAVAC_FLAGS = --release 11
|
||||
JAVAC = javac
|
||||
# JAVAC_FLAGS =
|
||||
export CLASSPATH := .:$(CLASSPATH)
|
||||
|
||||
all: $(FILES)
|
||||
jar cvf libsvm.jar *.class libsvm/*.class
|
||||
|
||||
.java.class:
|
||||
$(JAVAC) $(JAVAC_FLAGS) $<
|
||||
|
||||
libsvm/svm.java: libsvm/svm.m4
|
||||
m4 libsvm/svm.m4 > libsvm/svm.java
|
||||
|
||||
clean:
|
||||
rm -f libsvm/*.class *.class *.jar libsvm/*~ *~ libsvm/svm.java
|
||||
|
||||
dist: clean all
|
||||
rm *.class libsvm/*.class
|
BIN
libsvm-3.36/java/libsvm.jar
Normal file
BIN
libsvm-3.36/java/libsvm.jar
Normal file
Binary file not shown.
2972
libsvm-3.36/java/libsvm/svm.java
Normal file
2972
libsvm-3.36/java/libsvm/svm.java
Normal file
File diff suppressed because it is too large
Load Diff
2972
libsvm-3.36/java/libsvm/svm.m4
Normal file
2972
libsvm-3.36/java/libsvm/svm.m4
Normal file
File diff suppressed because it is too large
Load Diff
23
libsvm-3.36/java/libsvm/svm_model.java
Normal file
23
libsvm-3.36/java/libsvm/svm_model.java
Normal file
@@ -0,0 +1,23 @@
|
||||
//
|
||||
// svm_model
|
||||
//
|
||||
package libsvm;
|
||||
public class svm_model implements java.io.Serializable
|
||||
{
|
||||
public svm_parameter param; // parameter
|
||||
public int nr_class; // number of classes, = 2 in regression/one class svm
|
||||
public int l; // total #SV
|
||||
public svm_node[][] SV; // SVs (SV[l])
|
||||
public double[][] sv_coef; // coefficients for SVs in decision functions (sv_coef[k-1][l])
|
||||
public double[] rho; // constants in decision functions (rho[k*(k-1)/2])
|
||||
public double[] probA; // pariwise probability information
|
||||
public double[] probB;
|
||||
public double[] prob_density_marks; // probability information for ONE_CLASS
|
||||
public int[] sv_indices; // sv_indices[0,...,nSV-1] are values in [1,...,num_traning_data] to indicate SVs in the training set
|
||||
|
||||
// for classification only
|
||||
|
||||
public int[] label; // label of each class (label[k])
|
||||
public int[] nSV; // number of SVs for each class (nSV[k])
|
||||
// nSV[0] + nSV[1] + ... + nSV[k-1] = l
|
||||
};
|
6
libsvm-3.36/java/libsvm/svm_node.java
Normal file
6
libsvm-3.36/java/libsvm/svm_node.java
Normal file
@@ -0,0 +1,6 @@
|
||||
package libsvm;
|
||||
public class svm_node implements java.io.Serializable
|
||||
{
|
||||
public int index;
|
||||
public double value;
|
||||
}
|
47
libsvm-3.36/java/libsvm/svm_parameter.java
Normal file
47
libsvm-3.36/java/libsvm/svm_parameter.java
Normal file
@@ -0,0 +1,47 @@
|
||||
package libsvm;
|
||||
public class svm_parameter implements Cloneable,java.io.Serializable
|
||||
{
|
||||
/* svm_type */
|
||||
public static final int C_SVC = 0;
|
||||
public static final int NU_SVC = 1;
|
||||
public static final int ONE_CLASS = 2;
|
||||
public static final int EPSILON_SVR = 3;
|
||||
public static final int NU_SVR = 4;
|
||||
|
||||
/* kernel_type */
|
||||
public static final int LINEAR = 0;
|
||||
public static final int POLY = 1;
|
||||
public static final int RBF = 2;
|
||||
public static final int SIGMOID = 3;
|
||||
public static final int PRECOMPUTED = 4;
|
||||
|
||||
public int svm_type;
|
||||
public int kernel_type;
|
||||
public int degree; // for poly
|
||||
public double gamma; // for poly/rbf/sigmoid
|
||||
public double coef0; // for poly/sigmoid
|
||||
|
||||
// these are for training only
|
||||
public double cache_size; // in MB
|
||||
public double eps; // stopping criteria
|
||||
public double C; // for C_SVC, EPSILON_SVR and NU_SVR
|
||||
public int nr_weight; // for C_SVC
|
||||
public int[] weight_label; // for C_SVC
|
||||
public double[] weight; // for C_SVC
|
||||
public double nu; // for NU_SVC, ONE_CLASS, and NU_SVR
|
||||
public double p; // for EPSILON_SVR
|
||||
public int shrinking; // use the shrinking heuristics
|
||||
public int probability; // do probability estimates
|
||||
|
||||
public Object clone()
|
||||
{
|
||||
try
|
||||
{
|
||||
return super.clone();
|
||||
} catch (CloneNotSupportedException e)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
5
libsvm-3.36/java/libsvm/svm_print_interface.java
Normal file
5
libsvm-3.36/java/libsvm/svm_print_interface.java
Normal file
@@ -0,0 +1,5 @@
|
||||
package libsvm;
|
||||
public interface svm_print_interface
|
||||
{
|
||||
public void print(String s);
|
||||
}
|
7
libsvm-3.36/java/libsvm/svm_problem.java
Normal file
7
libsvm-3.36/java/libsvm/svm_problem.java
Normal file
@@ -0,0 +1,7 @@
|
||||
package libsvm;
|
||||
public class svm_problem implements java.io.Serializable
|
||||
{
|
||||
public int l;
|
||||
public double[] y;
|
||||
public svm_node[][] x;
|
||||
}
|
200
libsvm-3.36/java/svm_predict.java
Normal file
200
libsvm-3.36/java/svm_predict.java
Normal file
@@ -0,0 +1,200 @@
|
||||
import libsvm.*;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
class svm_predict {
|
||||
private static svm_print_interface svm_print_null = new svm_print_interface()
|
||||
{
|
||||
public void print(String s) {}
|
||||
};
|
||||
|
||||
private static svm_print_interface svm_print_stdout = new svm_print_interface()
|
||||
{
|
||||
public void print(String s)
|
||||
{
|
||||
System.out.print(s);
|
||||
}
|
||||
};
|
||||
|
||||
private static svm_print_interface svm_print_string = svm_print_stdout;
|
||||
|
||||
static void info(String s)
|
||||
{
|
||||
svm_print_string.print(s);
|
||||
}
|
||||
|
||||
private static double atof(String s)
|
||||
{
|
||||
return Double.valueOf(s).doubleValue();
|
||||
}
|
||||
|
||||
private static int atoi(String s)
|
||||
{
|
||||
return Integer.parseInt(s);
|
||||
}
|
||||
|
||||
private static void predict(BufferedReader input, DataOutputStream output, svm_model model, int predict_probability) throws IOException
|
||||
{
|
||||
int correct = 0;
|
||||
int total = 0;
|
||||
double error = 0;
|
||||
double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;
|
||||
|
||||
int svm_type=svm.svm_get_svm_type(model);
|
||||
int nr_class=svm.svm_get_nr_class(model);
|
||||
double[] prob_estimates=null;
|
||||
|
||||
if(predict_probability == 1)
|
||||
{
|
||||
if(svm_type == svm_parameter.EPSILON_SVR ||
|
||||
svm_type == svm_parameter.NU_SVR)
|
||||
{
|
||||
svm_predict.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma="+svm.svm_get_svr_probability(model)+"\n");
|
||||
}
|
||||
else if(svm_type == svm_parameter.ONE_CLASS)
|
||||
{
|
||||
// nr_class = 2 for ONE_CLASS
|
||||
prob_estimates = new double[nr_class];
|
||||
output.writeBytes("label normal outlier\n");
|
||||
}
|
||||
else
|
||||
{
|
||||
int[] labels=new int[nr_class];
|
||||
svm.svm_get_labels(model,labels);
|
||||
prob_estimates = new double[nr_class];
|
||||
output.writeBytes("labels");
|
||||
for(int j=0;j<nr_class;j++)
|
||||
output.writeBytes(" "+labels[j]);
|
||||
output.writeBytes("\n");
|
||||
}
|
||||
}
|
||||
while(true)
|
||||
{
|
||||
String line = input.readLine();
|
||||
if(line == null) break;
|
||||
|
||||
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
|
||||
|
||||
double target_label = atof(st.nextToken());
|
||||
int m = st.countTokens()/2;
|
||||
svm_node[] x = new svm_node[m];
|
||||
for(int j=0;j<m;j++)
|
||||
{
|
||||
x[j] = new svm_node();
|
||||
x[j].index = atoi(st.nextToken());
|
||||
x[j].value = atof(st.nextToken());
|
||||
}
|
||||
|
||||
double predict_label;
|
||||
if (predict_probability==1 && (svm_type==svm_parameter.C_SVC || svm_type==svm_parameter.NU_SVC || svm_type==svm_parameter.ONE_CLASS))
|
||||
{
|
||||
predict_label = svm.svm_predict_probability(model,x,prob_estimates);
|
||||
output.writeBytes(predict_label+" ");
|
||||
for(int j=0;j<nr_class;j++)
|
||||
output.writeBytes(prob_estimates[j]+" ");
|
||||
output.writeBytes("\n");
|
||||
}
|
||||
else
|
||||
{
|
||||
predict_label = svm.svm_predict(model,x);
|
||||
output.writeBytes(predict_label+"\n");
|
||||
}
|
||||
|
||||
if(predict_label == target_label)
|
||||
++correct;
|
||||
error += (predict_label-target_label)*(predict_label-target_label);
|
||||
sump += predict_label;
|
||||
sumt += target_label;
|
||||
sumpp += predict_label*predict_label;
|
||||
sumtt += target_label*target_label;
|
||||
sumpt += predict_label*target_label;
|
||||
++total;
|
||||
}
|
||||
if(svm_type == svm_parameter.EPSILON_SVR ||
|
||||
svm_type == svm_parameter.NU_SVR)
|
||||
{
|
||||
svm_predict.info("Mean squared error = "+error/total+" (regression)\n");
|
||||
svm_predict.info("Squared correlation coefficient = "+
|
||||
((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
|
||||
((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt))+
|
||||
" (regression)\n");
|
||||
}
|
||||
else
|
||||
svm_predict.info("Accuracy = "+(double)correct/total*100+
|
||||
"% ("+correct+"/"+total+") (classification)\n");
|
||||
}
|
||||
|
||||
private static void exit_with_help()
|
||||
{
|
||||
System.err.print("usage: svm_predict [options] test_file model_file output_file\n"
|
||||
+"options:\n"
|
||||
+"-b probability_estimates: whether to predict probability estimates, 0 or 1 (default 0); one-class SVM not supported yet\n"
|
||||
+"-q : quiet mode (no outputs)\n");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
public static void main(String argv[]) throws IOException
|
||||
{
|
||||
int i, predict_probability=0;
|
||||
svm_print_string = svm_print_stdout;
|
||||
|
||||
// parse options
|
||||
for(i=0;i<argv.length;i++)
|
||||
{
|
||||
if(argv[i].charAt(0) != '-') break;
|
||||
++i;
|
||||
switch(argv[i-1].charAt(1))
|
||||
{
|
||||
case 'b':
|
||||
predict_probability = atoi(argv[i]);
|
||||
break;
|
||||
case 'q':
|
||||
svm_print_string = svm_print_null;
|
||||
i--;
|
||||
break;
|
||||
default:
|
||||
System.err.print("Unknown option: " + argv[i-1] + "\n");
|
||||
exit_with_help();
|
||||
}
|
||||
}
|
||||
if(i>=argv.length-2)
|
||||
exit_with_help();
|
||||
try
|
||||
{
|
||||
BufferedReader input = new BufferedReader(new FileReader(argv[i]));
|
||||
DataOutputStream output = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(argv[i+2])));
|
||||
svm_model model = svm.svm_load_model(argv[i+1]);
|
||||
if (model == null)
|
||||
{
|
||||
System.err.print("can't open model file "+argv[i+1]+"\n");
|
||||
System.exit(1);
|
||||
}
|
||||
if(predict_probability == 1)
|
||||
{
|
||||
if(svm.svm_check_probability_model(model)==0)
|
||||
{
|
||||
System.err.print("Model does not support probabiliy estimates\n");
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if(svm.svm_check_probability_model(model)!=0)
|
||||
{
|
||||
svm_predict.info("Model supports probability estimates, but disabled in prediction.\n");
|
||||
}
|
||||
}
|
||||
predict(input,output,model,predict_probability);
|
||||
input.close();
|
||||
output.close();
|
||||
}
|
||||
catch(FileNotFoundException e)
|
||||
{
|
||||
exit_with_help();
|
||||
}
|
||||
catch(ArrayIndexOutOfBoundsException e)
|
||||
{
|
||||
exit_with_help();
|
||||
}
|
||||
}
|
||||
}
|
350
libsvm-3.36/java/svm_scale.java
Normal file
350
libsvm-3.36/java/svm_scale.java
Normal file
@@ -0,0 +1,350 @@
|
||||
import libsvm.*;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.text.DecimalFormat;
|
||||
|
||||
class svm_scale
|
||||
{
|
||||
private String line = null;
|
||||
private double lower = -1.0;
|
||||
private double upper = 1.0;
|
||||
private double y_lower;
|
||||
private double y_upper;
|
||||
private boolean y_scaling = false;
|
||||
private double[] feature_max;
|
||||
private double[] feature_min;
|
||||
private double y_max = -Double.MAX_VALUE;
|
||||
private double y_min = Double.MAX_VALUE;
|
||||
private int max_index;
|
||||
private long num_nonzeros = 0;
|
||||
private long new_num_nonzeros = 0;
|
||||
|
||||
private static void exit_with_help()
|
||||
{
|
||||
System.out.print(
|
||||
"Usage: svm-scale [options] data_filename\n"
|
||||
+"options:\n"
|
||||
+"-l lower : x scaling lower limit (default -1)\n"
|
||||
+"-u upper : x scaling upper limit (default +1)\n"
|
||||
+"-y y_lower y_upper : y scaling limits (default: no y scaling)\n"
|
||||
+"-s save_filename : save scaling parameters to save_filename\n"
|
||||
+"-r restore_filename : restore scaling parameters from restore_filename\n"
|
||||
);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
private BufferedReader rewind(BufferedReader fp, String filename) throws IOException
|
||||
{
|
||||
fp.close();
|
||||
return new BufferedReader(new FileReader(filename));
|
||||
}
|
||||
|
||||
private void output_target(double value)
|
||||
{
|
||||
if(y_scaling)
|
||||
{
|
||||
if(value == y_min)
|
||||
value = y_lower;
|
||||
else if(value == y_max)
|
||||
value = y_upper;
|
||||
else
|
||||
value = y_lower + (y_upper-y_lower) *
|
||||
(value-y_min) / (y_max-y_min);
|
||||
}
|
||||
|
||||
System.out.print(value + " ");
|
||||
}
|
||||
|
||||
private void output(int index, double value)
|
||||
{
|
||||
/* skip single-valued attribute */
|
||||
if(feature_max[index] == feature_min[index])
|
||||
return;
|
||||
|
||||
if(value == feature_min[index])
|
||||
value = lower;
|
||||
else if(value == feature_max[index])
|
||||
value = upper;
|
||||
else
|
||||
value = lower + (upper-lower) *
|
||||
(value-feature_min[index])/
|
||||
(feature_max[index]-feature_min[index]);
|
||||
|
||||
if(value != 0)
|
||||
{
|
||||
System.out.print(index + ":" + value + " ");
|
||||
new_num_nonzeros++;
|
||||
}
|
||||
}
|
||||
|
||||
private String readline(BufferedReader fp) throws IOException
|
||||
{
|
||||
line = fp.readLine();
|
||||
return line;
|
||||
}
|
||||
|
||||
private void run(String []argv) throws IOException
|
||||
{
|
||||
int i,index;
|
||||
BufferedReader fp = null, fp_restore = null;
|
||||
String save_filename = null;
|
||||
String restore_filename = null;
|
||||
String data_filename = null;
|
||||
|
||||
|
||||
for(i=0;i<argv.length;i++)
|
||||
{
|
||||
if (argv[i].charAt(0) != '-') break;
|
||||
++i;
|
||||
switch(argv[i-1].charAt(1))
|
||||
{
|
||||
case 'l': lower = Double.parseDouble(argv[i]); break;
|
||||
case 'u': upper = Double.parseDouble(argv[i]); break;
|
||||
case 'y':
|
||||
y_lower = Double.parseDouble(argv[i]);
|
||||
++i;
|
||||
y_upper = Double.parseDouble(argv[i]);
|
||||
y_scaling = true;
|
||||
break;
|
||||
case 's': save_filename = argv[i]; break;
|
||||
case 'r': restore_filename = argv[i]; break;
|
||||
default:
|
||||
System.err.println("unknown option");
|
||||
exit_with_help();
|
||||
}
|
||||
}
|
||||
|
||||
if(!(upper > lower) || (y_scaling && !(y_upper > y_lower)))
|
||||
{
|
||||
System.err.println("inconsistent lower/upper specification");
|
||||
System.exit(1);
|
||||
}
|
||||
if(restore_filename != null && save_filename != null)
|
||||
{
|
||||
System.err.println("cannot use -r and -s simultaneously");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if(argv.length != i+1)
|
||||
exit_with_help();
|
||||
|
||||
data_filename = argv[i];
|
||||
try {
|
||||
fp = new BufferedReader(new FileReader(data_filename));
|
||||
} catch (Exception e) {
|
||||
System.err.println("can't open file " + data_filename);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
/* assumption: min index of attributes is 1 */
|
||||
/* pass 1: find out max index of attributes */
|
||||
max_index = 0;
|
||||
|
||||
if(restore_filename != null)
|
||||
{
|
||||
int idx, c;
|
||||
|
||||
try {
|
||||
fp_restore = new BufferedReader(new FileReader(restore_filename));
|
||||
}
|
||||
catch (Exception e) {
|
||||
System.err.println("can't open file " + restore_filename);
|
||||
System.exit(1);
|
||||
}
|
||||
if((c = fp_restore.read()) == 'y')
|
||||
{
|
||||
fp_restore.readLine();
|
||||
fp_restore.readLine();
|
||||
fp_restore.readLine();
|
||||
}
|
||||
fp_restore.readLine();
|
||||
fp_restore.readLine();
|
||||
|
||||
String restore_line = null;
|
||||
while((restore_line = fp_restore.readLine())!=null)
|
||||
{
|
||||
StringTokenizer st2 = new StringTokenizer(restore_line);
|
||||
idx = Integer.parseInt(st2.nextToken());
|
||||
max_index = Math.max(max_index, idx);
|
||||
}
|
||||
fp_restore = rewind(fp_restore, restore_filename);
|
||||
}
|
||||
|
||||
while (readline(fp) != null)
|
||||
{
|
||||
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
|
||||
st.nextToken();
|
||||
while(st.hasMoreTokens())
|
||||
{
|
||||
index = Integer.parseInt(st.nextToken());
|
||||
max_index = Math.max(max_index, index);
|
||||
st.nextToken();
|
||||
num_nonzeros++;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
feature_max = new double[(max_index+1)];
|
||||
feature_min = new double[(max_index+1)];
|
||||
} catch(OutOfMemoryError e) {
|
||||
System.err.println("can't allocate enough memory");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
for(i=0;i<=max_index;i++)
|
||||
{
|
||||
feature_max[i] = -Double.MAX_VALUE;
|
||||
feature_min[i] = Double.MAX_VALUE;
|
||||
}
|
||||
|
||||
fp = rewind(fp, data_filename);
|
||||
|
||||
/* pass 2: find out min/max value */
|
||||
while(readline(fp) != null)
|
||||
{
|
||||
int next_index = 1;
|
||||
double target;
|
||||
double value;
|
||||
|
||||
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
|
||||
target = Double.parseDouble(st.nextToken());
|
||||
y_max = Math.max(y_max, target);
|
||||
y_min = Math.min(y_min, target);
|
||||
|
||||
while (st.hasMoreTokens())
|
||||
{
|
||||
index = Integer.parseInt(st.nextToken());
|
||||
value = Double.parseDouble(st.nextToken());
|
||||
|
||||
for (i = next_index; i<index; i++)
|
||||
{
|
||||
feature_max[i] = Math.max(feature_max[i], 0);
|
||||
feature_min[i] = Math.min(feature_min[i], 0);
|
||||
}
|
||||
|
||||
feature_max[index] = Math.max(feature_max[index], value);
|
||||
feature_min[index] = Math.min(feature_min[index], value);
|
||||
next_index = index + 1;
|
||||
}
|
||||
|
||||
for(i=next_index;i<=max_index;i++)
|
||||
{
|
||||
feature_max[i] = Math.max(feature_max[i], 0);
|
||||
feature_min[i] = Math.min(feature_min[i], 0);
|
||||
}
|
||||
}
|
||||
|
||||
fp = rewind(fp, data_filename);
|
||||
|
||||
/* pass 2.5: save/restore feature_min/feature_max */
|
||||
if(restore_filename != null)
|
||||
{
|
||||
// fp_restore rewinded in finding max_index
|
||||
int idx, c;
|
||||
double fmin, fmax;
|
||||
|
||||
fp_restore.mark(2); // for reset
|
||||
if((c = fp_restore.read()) == 'y')
|
||||
{
|
||||
fp_restore.readLine(); // pass the '\n' after 'y'
|
||||
StringTokenizer st = new StringTokenizer(fp_restore.readLine());
|
||||
y_lower = Double.parseDouble(st.nextToken());
|
||||
y_upper = Double.parseDouble(st.nextToken());
|
||||
st = new StringTokenizer(fp_restore.readLine());
|
||||
y_min = Double.parseDouble(st.nextToken());
|
||||
y_max = Double.parseDouble(st.nextToken());
|
||||
y_scaling = true;
|
||||
}
|
||||
else
|
||||
fp_restore.reset();
|
||||
|
||||
if(fp_restore.read() == 'x') {
|
||||
fp_restore.readLine(); // pass the '\n' after 'x'
|
||||
StringTokenizer st = new StringTokenizer(fp_restore.readLine());
|
||||
lower = Double.parseDouble(st.nextToken());
|
||||
upper = Double.parseDouble(st.nextToken());
|
||||
String restore_line = null;
|
||||
while((restore_line = fp_restore.readLine())!=null)
|
||||
{
|
||||
StringTokenizer st2 = new StringTokenizer(restore_line);
|
||||
idx = Integer.parseInt(st2.nextToken());
|
||||
fmin = Double.parseDouble(st2.nextToken());
|
||||
fmax = Double.parseDouble(st2.nextToken());
|
||||
if (idx <= max_index)
|
||||
{
|
||||
feature_min[idx] = fmin;
|
||||
feature_max[idx] = fmax;
|
||||
}
|
||||
}
|
||||
}
|
||||
fp_restore.close();
|
||||
}
|
||||
|
||||
if(save_filename != null)
|
||||
{
|
||||
Formatter formatter = new Formatter(new StringBuilder());
|
||||
BufferedWriter fp_save = null;
|
||||
|
||||
try {
|
||||
fp_save = new BufferedWriter(new FileWriter(save_filename));
|
||||
} catch(IOException e) {
|
||||
System.err.println("can't open file " + save_filename);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if(y_scaling)
|
||||
{
|
||||
formatter.format("y\n");
|
||||
formatter.format("%.16g %.16g\n", y_lower, y_upper);
|
||||
formatter.format("%.16g %.16g\n", y_min, y_max);
|
||||
}
|
||||
formatter.format("x\n");
|
||||
formatter.format("%.16g %.16g\n", lower, upper);
|
||||
for(i=1;i<=max_index;i++)
|
||||
{
|
||||
if(feature_min[i] != feature_max[i])
|
||||
formatter.format("%d %.16g %.16g\n", i, feature_min[i], feature_max[i]);
|
||||
}
|
||||
fp_save.write(formatter.toString());
|
||||
fp_save.close();
|
||||
}
|
||||
|
||||
/* pass 3: scale */
|
||||
while(readline(fp) != null)
|
||||
{
|
||||
int next_index = 1;
|
||||
double target;
|
||||
double value;
|
||||
|
||||
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
|
||||
target = Double.parseDouble(st.nextToken());
|
||||
output_target(target);
|
||||
while(st.hasMoreElements())
|
||||
{
|
||||
index = Integer.parseInt(st.nextToken());
|
||||
value = Double.parseDouble(st.nextToken());
|
||||
for (i = next_index; i<index; i++)
|
||||
output(i, 0);
|
||||
output(index, value);
|
||||
next_index = index + 1;
|
||||
}
|
||||
|
||||
for(i=next_index;i<= max_index;i++)
|
||||
output(i, 0);
|
||||
System.out.print("\n");
|
||||
}
|
||||
if (new_num_nonzeros > num_nonzeros)
|
||||
System.err.print(
|
||||
"WARNING: original #nonzeros " + num_nonzeros+"\n"
|
||||
+" new #nonzeros " + new_num_nonzeros+"\n"
|
||||
+"Use -l 0 if many original feature values are zeros\n");
|
||||
|
||||
fp.close();
|
||||
}
|
||||
|
||||
public static void main(String argv[]) throws IOException
|
||||
{
|
||||
svm_scale s = new svm_scale();
|
||||
s.run(argv);
|
||||
}
|
||||
}
|
487
libsvm-3.36/java/svm_toy.java
Normal file
487
libsvm-3.36/java/svm_toy.java
Normal file
@@ -0,0 +1,487 @@
|
||||
import libsvm.*;
|
||||
import java.awt.*;
|
||||
import java.util.*;
|
||||
import java.awt.event.*;
|
||||
import java.io.*;
|
||||
|
||||
public class svm_toy {
|
||||
public static void main(String[] args) {
|
||||
svm_toy_frame frame = new svm_toy_frame("svm_toy", 500, 500+50);
|
||||
}
|
||||
}
|
||||
class svm_toy_frame extends Frame {
|
||||
|
||||
static final String DEFAULT_PARAM="-t 2 -c 100";
|
||||
int XLEN;
|
||||
int YLEN;
|
||||
|
||||
// off-screen buffer
|
||||
|
||||
Image buffer;
|
||||
Graphics buffer_gc;
|
||||
|
||||
// pre-allocated colors
|
||||
|
||||
final static Color colors[] =
|
||||
{
|
||||
new Color(0,0,0),
|
||||
new Color(0,120,120),
|
||||
new Color(120,120,0),
|
||||
new Color(120,0,120),
|
||||
new Color(0,200,200),
|
||||
new Color(200,200,0),
|
||||
new Color(200,0,200)
|
||||
};
|
||||
|
||||
class point {
|
||||
point(double x, double y, byte value)
|
||||
{
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.value = value;
|
||||
}
|
||||
double x, y;
|
||||
byte value;
|
||||
}
|
||||
|
||||
Vector<point> point_list = new Vector<point>();
|
||||
byte current_value = 1;
|
||||
|
||||
svm_toy_frame(String title, int width, int height)
|
||||
{
|
||||
super(title);
|
||||
this.addWindowListener(new WindowAdapter() {
|
||||
public void windowClosing(WindowEvent e) {
|
||||
System.exit(0);
|
||||
}
|
||||
});
|
||||
this.init();
|
||||
this.setSize(width, height);
|
||||
XLEN = width;
|
||||
YLEN = height-50;
|
||||
this.clear_all();
|
||||
this.setVisible(true);
|
||||
}
|
||||
|
||||
void init()
|
||||
{
|
||||
final Button button_change = new Button("Change");
|
||||
Button button_run = new Button("Run");
|
||||
Button button_clear = new Button("Clear");
|
||||
Button button_save = new Button("Save");
|
||||
Button button_load = new Button("Load");
|
||||
final TextField input_line = new TextField(DEFAULT_PARAM);
|
||||
|
||||
BorderLayout layout = new BorderLayout();
|
||||
this.setLayout(layout);
|
||||
|
||||
Panel p = new Panel();
|
||||
GridBagLayout gridbag = new GridBagLayout();
|
||||
p.setLayout(gridbag);
|
||||
|
||||
GridBagConstraints c = new GridBagConstraints();
|
||||
c.fill = GridBagConstraints.HORIZONTAL;
|
||||
c.weightx = 1;
|
||||
c.gridwidth = 1;
|
||||
gridbag.setConstraints(button_change,c);
|
||||
gridbag.setConstraints(button_run,c);
|
||||
gridbag.setConstraints(button_clear,c);
|
||||
gridbag.setConstraints(button_save,c);
|
||||
gridbag.setConstraints(button_load,c);
|
||||
c.weightx = 5;
|
||||
c.gridwidth = 5;
|
||||
gridbag.setConstraints(input_line,c);
|
||||
|
||||
button_change.setBackground(colors[current_value]);
|
||||
|
||||
p.add(button_change);
|
||||
p.add(button_run);
|
||||
p.add(button_clear);
|
||||
p.add(button_save);
|
||||
p.add(button_load);
|
||||
p.add(input_line);
|
||||
this.add(p,BorderLayout.SOUTH);
|
||||
|
||||
button_change.addActionListener(new ActionListener()
|
||||
{ public void actionPerformed (ActionEvent e)
|
||||
{ button_change_clicked(); button_change.setBackground(colors[current_value]); }});
|
||||
|
||||
button_run.addActionListener(new ActionListener()
|
||||
{ public void actionPerformed (ActionEvent e)
|
||||
{ button_run_clicked(input_line.getText()); }});
|
||||
|
||||
button_clear.addActionListener(new ActionListener()
|
||||
{ public void actionPerformed (ActionEvent e)
|
||||
{ button_clear_clicked(); }});
|
||||
|
||||
button_save.addActionListener(new ActionListener()
|
||||
{ public void actionPerformed (ActionEvent e)
|
||||
{ button_save_clicked(input_line.getText()); }});
|
||||
|
||||
button_load.addActionListener(new ActionListener()
|
||||
{ public void actionPerformed (ActionEvent e)
|
||||
{ button_load_clicked(); }});
|
||||
|
||||
input_line.addActionListener(new ActionListener()
|
||||
{ public void actionPerformed (ActionEvent e)
|
||||
{ button_run_clicked(input_line.getText()); }});
|
||||
|
||||
this.enableEvents(AWTEvent.MOUSE_EVENT_MASK);
|
||||
}
|
||||
|
||||
void draw_point(point p)
|
||||
{
|
||||
Color c = colors[p.value+3];
|
||||
|
||||
Graphics window_gc = getGraphics();
|
||||
buffer_gc.setColor(c);
|
||||
buffer_gc.fillRect((int)(p.x*XLEN),(int)(p.y*YLEN),4,4);
|
||||
window_gc.setColor(c);
|
||||
window_gc.fillRect((int)(p.x*XLEN),(int)(p.y*YLEN),4,4);
|
||||
}
|
||||
|
||||
void clear_all()
|
||||
{
|
||||
point_list.removeAllElements();
|
||||
if(buffer != null)
|
||||
{
|
||||
buffer_gc.setColor(colors[0]);
|
||||
buffer_gc.fillRect(0,0,XLEN,YLEN);
|
||||
}
|
||||
repaint();
|
||||
}
|
||||
|
||||
void draw_all_points()
|
||||
{
|
||||
int n = point_list.size();
|
||||
for(int i=0;i<n;i++)
|
||||
draw_point(point_list.elementAt(i));
|
||||
}
|
||||
|
||||
void button_change_clicked()
|
||||
{
|
||||
++current_value;
|
||||
if(current_value > 3) current_value = 1;
|
||||
}
|
||||
|
||||
private static double atof(String s)
|
||||
{
|
||||
return Double.valueOf(s).doubleValue();
|
||||
}
|
||||
|
||||
private static int atoi(String s)
|
||||
{
|
||||
return Integer.parseInt(s);
|
||||
}
|
||||
|
||||
void button_run_clicked(String args)
|
||||
{
|
||||
// guard
|
||||
if(point_list.isEmpty()) return;
|
||||
|
||||
svm_parameter param = new svm_parameter();
|
||||
|
||||
// default values
|
||||
param.svm_type = svm_parameter.C_SVC;
|
||||
param.kernel_type = svm_parameter.RBF;
|
||||
param.degree = 3;
|
||||
param.gamma = 0;
|
||||
param.coef0 = 0;
|
||||
param.nu = 0.5;
|
||||
param.cache_size = 40;
|
||||
param.C = 1;
|
||||
param.eps = 1e-3;
|
||||
param.p = 0.1;
|
||||
param.shrinking = 1;
|
||||
param.probability = 0;
|
||||
param.nr_weight = 0;
|
||||
param.weight_label = new int[0];
|
||||
param.weight = new double[0];
|
||||
|
||||
// parse options
|
||||
StringTokenizer st = new StringTokenizer(args);
|
||||
String[] argv = new String[st.countTokens()];
|
||||
for(int i=0;i<argv.length;i++)
|
||||
argv[i] = st.nextToken();
|
||||
|
||||
for(int i=0;i<argv.length;i++)
|
||||
{
|
||||
if(argv[i].charAt(0) != '-') break;
|
||||
if(++i>=argv.length)
|
||||
{
|
||||
System.err.print("unknown option\n");
|
||||
break;
|
||||
}
|
||||
switch(argv[i-1].charAt(1))
|
||||
{
|
||||
case 's':
|
||||
param.svm_type = atoi(argv[i]);
|
||||
break;
|
||||
case 't':
|
||||
param.kernel_type = atoi(argv[i]);
|
||||
break;
|
||||
case 'd':
|
||||
param.degree = atoi(argv[i]);
|
||||
break;
|
||||
case 'g':
|
||||
param.gamma = atof(argv[i]);
|
||||
break;
|
||||
case 'r':
|
||||
param.coef0 = atof(argv[i]);
|
||||
break;
|
||||
case 'n':
|
||||
param.nu = atof(argv[i]);
|
||||
break;
|
||||
case 'm':
|
||||
param.cache_size = atof(argv[i]);
|
||||
break;
|
||||
case 'c':
|
||||
param.C = atof(argv[i]);
|
||||
break;
|
||||
case 'e':
|
||||
param.eps = atof(argv[i]);
|
||||
break;
|
||||
case 'p':
|
||||
param.p = atof(argv[i]);
|
||||
break;
|
||||
case 'h':
|
||||
param.shrinking = atoi(argv[i]);
|
||||
break;
|
||||
case 'b':
|
||||
param.probability = atoi(argv[i]);
|
||||
break;
|
||||
case 'w':
|
||||
++param.nr_weight;
|
||||
{
|
||||
int[] old = param.weight_label;
|
||||
param.weight_label = new int[param.nr_weight];
|
||||
System.arraycopy(old,0,param.weight_label,0,param.nr_weight-1);
|
||||
}
|
||||
|
||||
{
|
||||
double[] old = param.weight;
|
||||
param.weight = new double[param.nr_weight];
|
||||
System.arraycopy(old,0,param.weight,0,param.nr_weight-1);
|
||||
}
|
||||
|
||||
param.weight_label[param.nr_weight-1] = atoi(argv[i-1].substring(2));
|
||||
param.weight[param.nr_weight-1] = atof(argv[i]);
|
||||
break;
|
||||
default:
|
||||
System.err.print("unknown option\n");
|
||||
}
|
||||
}
|
||||
|
||||
// build problem
|
||||
svm_problem prob = new svm_problem();
|
||||
prob.l = point_list.size();
|
||||
prob.y = new double[prob.l];
|
||||
|
||||
if(param.kernel_type == svm_parameter.PRECOMPUTED)
|
||||
{
|
||||
}
|
||||
else if(param.svm_type == svm_parameter.EPSILON_SVR ||
|
||||
param.svm_type == svm_parameter.NU_SVR)
|
||||
{
|
||||
if(param.gamma == 0) param.gamma = 1;
|
||||
prob.x = new svm_node[prob.l][1];
|
||||
for(int i=0;i<prob.l;i++)
|
||||
{
|
||||
point p = point_list.elementAt(i);
|
||||
prob.x[i][0] = new svm_node();
|
||||
prob.x[i][0].index = 1;
|
||||
prob.x[i][0].value = p.x;
|
||||
prob.y[i] = p.y;
|
||||
}
|
||||
|
||||
// build model & classify
|
||||
svm_model model = svm.svm_train(prob, param);
|
||||
svm_node[] x = new svm_node[1];
|
||||
x[0] = new svm_node();
|
||||
x[0].index = 1;
|
||||
int[] j = new int[XLEN];
|
||||
|
||||
Graphics window_gc = getGraphics();
|
||||
for (int i = 0; i < XLEN; i++)
|
||||
{
|
||||
x[0].value = (double) i / XLEN;
|
||||
j[i] = (int)(YLEN*svm.svm_predict(model, x));
|
||||
}
|
||||
|
||||
buffer_gc.setColor(colors[0]);
|
||||
buffer_gc.drawLine(0,0,0,YLEN-1);
|
||||
window_gc.setColor(colors[0]);
|
||||
window_gc.drawLine(0,0,0,YLEN-1);
|
||||
|
||||
int p = (int)(param.p * YLEN);
|
||||
for(int i=1;i<XLEN;i++)
|
||||
{
|
||||
buffer_gc.setColor(colors[0]);
|
||||
buffer_gc.drawLine(i,0,i,YLEN-1);
|
||||
window_gc.setColor(colors[0]);
|
||||
window_gc.drawLine(i,0,i,YLEN-1);
|
||||
|
||||
buffer_gc.setColor(colors[5]);
|
||||
window_gc.setColor(colors[5]);
|
||||
buffer_gc.drawLine(i-1,j[i-1],i,j[i]);
|
||||
window_gc.drawLine(i-1,j[i-1],i,j[i]);
|
||||
|
||||
if(param.svm_type == svm_parameter.EPSILON_SVR)
|
||||
{
|
||||
buffer_gc.setColor(colors[2]);
|
||||
window_gc.setColor(colors[2]);
|
||||
buffer_gc.drawLine(i-1,j[i-1]+p,i,j[i]+p);
|
||||
window_gc.drawLine(i-1,j[i-1]+p,i,j[i]+p);
|
||||
|
||||
buffer_gc.setColor(colors[2]);
|
||||
window_gc.setColor(colors[2]);
|
||||
buffer_gc.drawLine(i-1,j[i-1]-p,i,j[i]-p);
|
||||
window_gc.drawLine(i-1,j[i-1]-p,i,j[i]-p);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if(param.gamma == 0) param.gamma = 0.5;
|
||||
prob.x = new svm_node [prob.l][2];
|
||||
for(int i=0;i<prob.l;i++)
|
||||
{
|
||||
point p = point_list.elementAt(i);
|
||||
prob.x[i][0] = new svm_node();
|
||||
prob.x[i][0].index = 1;
|
||||
prob.x[i][0].value = p.x;
|
||||
prob.x[i][1] = new svm_node();
|
||||
prob.x[i][1].index = 2;
|
||||
prob.x[i][1].value = p.y;
|
||||
prob.y[i] = p.value;
|
||||
}
|
||||
|
||||
// build model & classify
|
||||
svm_model model = svm.svm_train(prob, param);
|
||||
svm_node[] x = new svm_node[2];
|
||||
x[0] = new svm_node();
|
||||
x[1] = new svm_node();
|
||||
x[0].index = 1;
|
||||
x[1].index = 2;
|
||||
|
||||
Graphics window_gc = getGraphics();
|
||||
for (int i = 0; i < XLEN; i++)
|
||||
for (int j = 0; j < YLEN ; j++) {
|
||||
x[0].value = (double) i / XLEN;
|
||||
x[1].value = (double) j / YLEN;
|
||||
double d = svm.svm_predict(model, x);
|
||||
if (param.svm_type == svm_parameter.ONE_CLASS && d<0) d=2;
|
||||
buffer_gc.setColor(colors[(int)d]);
|
||||
window_gc.setColor(colors[(int)d]);
|
||||
buffer_gc.drawLine(i,j,i,j);
|
||||
window_gc.drawLine(i,j,i,j);
|
||||
}
|
||||
}
|
||||
|
||||
draw_all_points();
|
||||
}
|
||||
|
||||
void button_clear_clicked()
|
||||
{
|
||||
clear_all();
|
||||
}
|
||||
|
||||
void button_save_clicked(String args)
|
||||
{
|
||||
FileDialog dialog = new FileDialog(new Frame(),"Save",FileDialog.SAVE);
|
||||
dialog.setVisible(true);
|
||||
String filename = dialog.getDirectory() + dialog.getFile();
|
||||
if (filename == null) return;
|
||||
try {
|
||||
DataOutputStream fp = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(filename)));
|
||||
|
||||
int svm_type = svm_parameter.C_SVC;
|
||||
int svm_type_idx = args.indexOf("-s ");
|
||||
if(svm_type_idx != -1)
|
||||
{
|
||||
StringTokenizer svm_str_st = new StringTokenizer(args.substring(svm_type_idx+2).trim());
|
||||
svm_type = atoi(svm_str_st.nextToken());
|
||||
}
|
||||
|
||||
int n = point_list.size();
|
||||
if(svm_type == svm_parameter.EPSILON_SVR || svm_type == svm_parameter.NU_SVR)
|
||||
{
|
||||
for(int i=0;i<n;i++)
|
||||
{
|
||||
point p = point_list.elementAt(i);
|
||||
fp.writeBytes(p.y+" 1:"+p.x+"\n");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for(int i=0;i<n;i++)
|
||||
{
|
||||
point p = point_list.elementAt(i);
|
||||
fp.writeBytes(p.value+" 1:"+p.x+" 2:"+p.y+"\n");
|
||||
}
|
||||
}
|
||||
fp.close();
|
||||
} catch (IOException e) { System.err.print(e); }
|
||||
}
|
||||
|
||||
void button_load_clicked()
|
||||
{
|
||||
FileDialog dialog = new FileDialog(new Frame(),"Load",FileDialog.LOAD);
|
||||
dialog.setVisible(true);
|
||||
String filename = dialog.getDirectory() + dialog.getFile();
|
||||
if (filename == null) return;
|
||||
clear_all();
|
||||
try {
|
||||
BufferedReader fp = new BufferedReader(new FileReader(filename));
|
||||
String line;
|
||||
while((line = fp.readLine()) != null)
|
||||
{
|
||||
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
|
||||
if(st.countTokens() == 5)
|
||||
{
|
||||
byte value = (byte)atoi(st.nextToken());
|
||||
st.nextToken();
|
||||
double x = atof(st.nextToken());
|
||||
st.nextToken();
|
||||
double y = atof(st.nextToken());
|
||||
point_list.addElement(new point(x,y,value));
|
||||
}
|
||||
else if(st.countTokens() == 3)
|
||||
{
|
||||
double y = atof(st.nextToken());
|
||||
st.nextToken();
|
||||
double x = atof(st.nextToken());
|
||||
point_list.addElement(new point(x,y,current_value));
|
||||
}else
|
||||
break;
|
||||
}
|
||||
fp.close();
|
||||
} catch (IOException e) { System.err.print(e); }
|
||||
draw_all_points();
|
||||
}
|
||||
|
||||
protected void processMouseEvent(MouseEvent e)
|
||||
{
|
||||
if(e.getID() == MouseEvent.MOUSE_PRESSED)
|
||||
{
|
||||
if(e.getX() >= XLEN || e.getY() >= YLEN) return;
|
||||
point p = new point((double)e.getX()/XLEN,
|
||||
(double)e.getY()/YLEN,
|
||||
current_value);
|
||||
point_list.addElement(p);
|
||||
draw_point(p);
|
||||
}
|
||||
}
|
||||
|
||||
public void paint(Graphics g)
|
||||
{
|
||||
// create buffer first time
|
||||
if(buffer == null) {
|
||||
buffer = this.createImage(XLEN,YLEN);
|
||||
buffer_gc = buffer.getGraphics();
|
||||
buffer_gc.setColor(colors[0]);
|
||||
buffer_gc.fillRect(0,0,XLEN,YLEN);
|
||||
}
|
||||
g.drawImage(buffer,0,0,this);
|
||||
}
|
||||
}
|
318
libsvm-3.36/java/svm_train.java
Normal file
318
libsvm-3.36/java/svm_train.java
Normal file
@@ -0,0 +1,318 @@
|
||||
import libsvm.*;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
class svm_train {
|
||||
private svm_parameter param; // set by parse_command_line
|
||||
private svm_problem prob; // set by read_problem
|
||||
private svm_model model;
|
||||
private String input_file_name; // set by parse_command_line
|
||||
private String model_file_name; // set by parse_command_line
|
||||
private String error_msg;
|
||||
private int cross_validation;
|
||||
private int nr_fold;
|
||||
|
||||
private static svm_print_interface svm_print_null = new svm_print_interface()
|
||||
{
|
||||
public void print(String s) {}
|
||||
};
|
||||
|
||||
private static void exit_with_help()
|
||||
{
|
||||
System.out.print(
|
||||
"Usage: svm_train [options] training_set_file [model_file]\n"
|
||||
+"options:\n"
|
||||
+"-s svm_type : set type of SVM (default 0)\n"
|
||||
+" 0 -- C-SVC (multi-class classification)\n"
|
||||
+" 1 -- nu-SVC (multi-class classification)\n"
|
||||
+" 2 -- one-class SVM\n"
|
||||
+" 3 -- epsilon-SVR (regression)\n"
|
||||
+" 4 -- nu-SVR (regression)\n"
|
||||
+"-t kernel_type : set type of kernel function (default 2)\n"
|
||||
+" 0 -- linear: u'*v\n"
|
||||
+" 1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
|
||||
+" 2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
|
||||
+" 3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
|
||||
+" 4 -- precomputed kernel (kernel values in training_set_file)\n"
|
||||
+"-d degree : set degree in kernel function (default 3)\n"
|
||||
+"-g gamma : set gamma in kernel function (default 1/num_features)\n"
|
||||
+"-r coef0 : set coef0 in kernel function (default 0)\n"
|
||||
+"-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
|
||||
+"-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
|
||||
+"-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
|
||||
+"-m cachesize : set cache memory size in MB (default 100)\n"
|
||||
+"-e epsilon : set tolerance of termination criterion (default 0.001)\n"
|
||||
+"-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
|
||||
+"-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
|
||||
+"-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
|
||||
+"-v n : n-fold cross validation mode\n"
|
||||
+"-q : quiet mode (no outputs)\n"
|
||||
);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
private void do_cross_validation()
|
||||
{
|
||||
int i;
|
||||
int total_correct = 0;
|
||||
double total_error = 0;
|
||||
double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
|
||||
double[] target = new double[prob.l];
|
||||
|
||||
svm.svm_cross_validation(prob,param,nr_fold,target);
|
||||
if(param.svm_type == svm_parameter.EPSILON_SVR ||
|
||||
param.svm_type == svm_parameter.NU_SVR)
|
||||
{
|
||||
for(i=0;i<prob.l;i++)
|
||||
{
|
||||
double y = prob.y[i];
|
||||
double v = target[i];
|
||||
total_error += (v-y)*(v-y);
|
||||
sumv += v;
|
||||
sumy += y;
|
||||
sumvv += v*v;
|
||||
sumyy += y*y;
|
||||
sumvy += v*y;
|
||||
}
|
||||
System.out.print("Cross Validation Mean squared error = "+total_error/prob.l+"\n");
|
||||
System.out.print("Cross Validation Squared correlation coefficient = "+
|
||||
((prob.l*sumvy-sumv*sumy)*(prob.l*sumvy-sumv*sumy))/
|
||||
((prob.l*sumvv-sumv*sumv)*(prob.l*sumyy-sumy*sumy))+"\n"
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
for(i=0;i<prob.l;i++)
|
||||
if(target[i] == prob.y[i])
|
||||
++total_correct;
|
||||
System.out.print("Cross Validation Accuracy = "+100.0*total_correct/prob.l+"%\n");
|
||||
}
|
||||
}
|
||||
|
||||
private void run(String argv[]) throws IOException
|
||||
{
|
||||
parse_command_line(argv);
|
||||
read_problem();
|
||||
error_msg = svm.svm_check_parameter(prob,param);
|
||||
|
||||
if(error_msg != null)
|
||||
{
|
||||
System.err.print("ERROR: "+error_msg+"\n");
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
if(cross_validation != 0)
|
||||
{
|
||||
do_cross_validation();
|
||||
}
|
||||
else
|
||||
{
|
||||
model = svm.svm_train(prob,param);
|
||||
svm.svm_save_model(model_file_name,model);
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String argv[]) throws IOException
|
||||
{
|
||||
svm_train t = new svm_train();
|
||||
t.run(argv);
|
||||
}
|
||||
|
||||
private static double atof(String s)
|
||||
{
|
||||
double d = Double.valueOf(s).doubleValue();
|
||||
if (Double.isNaN(d) || Double.isInfinite(d))
|
||||
{
|
||||
System.err.print("NaN or Infinity in input\n");
|
||||
System.exit(1);
|
||||
}
|
||||
return(d);
|
||||
}
|
||||
|
||||
private static int atoi(String s)
|
||||
{
|
||||
return Integer.parseInt(s);
|
||||
}
|
||||
|
||||
private void parse_command_line(String argv[])
|
||||
{
|
||||
int i;
|
||||
svm_print_interface print_func = null; // default printing to stdout
|
||||
|
||||
param = new svm_parameter();
|
||||
// default values
|
||||
param.svm_type = svm_parameter.C_SVC;
|
||||
param.kernel_type = svm_parameter.RBF;
|
||||
param.degree = 3;
|
||||
param.gamma = 0; // 1/num_features
|
||||
param.coef0 = 0;
|
||||
param.nu = 0.5;
|
||||
param.cache_size = 100;
|
||||
param.C = 1;
|
||||
param.eps = 1e-3;
|
||||
param.p = 0.1;
|
||||
param.shrinking = 1;
|
||||
param.probability = 0;
|
||||
param.nr_weight = 0;
|
||||
param.weight_label = new int[0];
|
||||
param.weight = new double[0];
|
||||
cross_validation = 0;
|
||||
|
||||
// parse options
|
||||
for(i=0;i<argv.length;i++)
|
||||
{
|
||||
if(argv[i].charAt(0) != '-') break;
|
||||
if(++i>=argv.length)
|
||||
exit_with_help();
|
||||
switch(argv[i-1].charAt(1))
|
||||
{
|
||||
case 's':
|
||||
param.svm_type = atoi(argv[i]);
|
||||
break;
|
||||
case 't':
|
||||
param.kernel_type = atoi(argv[i]);
|
||||
break;
|
||||
case 'd':
|
||||
param.degree = atoi(argv[i]);
|
||||
break;
|
||||
case 'g':
|
||||
param.gamma = atof(argv[i]);
|
||||
break;
|
||||
case 'r':
|
||||
param.coef0 = atof(argv[i]);
|
||||
break;
|
||||
case 'n':
|
||||
param.nu = atof(argv[i]);
|
||||
break;
|
||||
case 'm':
|
||||
param.cache_size = atof(argv[i]);
|
||||
break;
|
||||
case 'c':
|
||||
param.C = atof(argv[i]);
|
||||
break;
|
||||
case 'e':
|
||||
param.eps = atof(argv[i]);
|
||||
break;
|
||||
case 'p':
|
||||
param.p = atof(argv[i]);
|
||||
break;
|
||||
case 'h':
|
||||
param.shrinking = atoi(argv[i]);
|
||||
break;
|
||||
case 'b':
|
||||
param.probability = atoi(argv[i]);
|
||||
break;
|
||||
case 'q':
|
||||
print_func = svm_print_null;
|
||||
i--;
|
||||
break;
|
||||
case 'v':
|
||||
cross_validation = 1;
|
||||
nr_fold = atoi(argv[i]);
|
||||
if(nr_fold < 2)
|
||||
{
|
||||
System.err.print("n-fold cross validation: n must >= 2\n");
|
||||
exit_with_help();
|
||||
}
|
||||
break;
|
||||
case 'w':
|
||||
++param.nr_weight;
|
||||
{
|
||||
int[] old = param.weight_label;
|
||||
param.weight_label = new int[param.nr_weight];
|
||||
System.arraycopy(old,0,param.weight_label,0,param.nr_weight-1);
|
||||
}
|
||||
|
||||
{
|
||||
double[] old = param.weight;
|
||||
param.weight = new double[param.nr_weight];
|
||||
System.arraycopy(old,0,param.weight,0,param.nr_weight-1);
|
||||
}
|
||||
|
||||
param.weight_label[param.nr_weight-1] = atoi(argv[i-1].substring(2));
|
||||
param.weight[param.nr_weight-1] = atof(argv[i]);
|
||||
break;
|
||||
default:
|
||||
System.err.print("Unknown option: " + argv[i-1] + "\n");
|
||||
exit_with_help();
|
||||
}
|
||||
}
|
||||
|
||||
svm.svm_set_print_string_function(print_func);
|
||||
|
||||
// determine filenames
|
||||
|
||||
if(i>=argv.length)
|
||||
exit_with_help();
|
||||
|
||||
input_file_name = argv[i];
|
||||
|
||||
if(i<argv.length-1)
|
||||
model_file_name = argv[i+1];
|
||||
else
|
||||
{
|
||||
int p = argv[i].lastIndexOf('/');
|
||||
++p; // whew...
|
||||
model_file_name = argv[i].substring(p)+".model";
|
||||
}
|
||||
}
|
||||
|
||||
// read in a problem (in svmlight format)
|
||||
|
||||
private void read_problem() throws IOException
|
||||
{
|
||||
BufferedReader fp = new BufferedReader(new FileReader(input_file_name));
|
||||
Vector<Double> vy = new Vector<Double>();
|
||||
Vector<svm_node[]> vx = new Vector<svm_node[]>();
|
||||
int max_index = 0;
|
||||
|
||||
while(true)
|
||||
{
|
||||
String line = fp.readLine();
|
||||
if(line == null) break;
|
||||
|
||||
StringTokenizer st = new StringTokenizer(line," \t\n\r\f:");
|
||||
|
||||
vy.addElement(atof(st.nextToken()));
|
||||
int m = st.countTokens()/2;
|
||||
svm_node[] x = new svm_node[m];
|
||||
for(int j=0;j<m;j++)
|
||||
{
|
||||
x[j] = new svm_node();
|
||||
x[j].index = atoi(st.nextToken());
|
||||
x[j].value = atof(st.nextToken());
|
||||
}
|
||||
if(m>0) max_index = Math.max(max_index, x[m-1].index);
|
||||
vx.addElement(x);
|
||||
}
|
||||
|
||||
prob = new svm_problem();
|
||||
prob.l = vy.size();
|
||||
prob.x = new svm_node[prob.l][];
|
||||
for(int i=0;i<prob.l;i++)
|
||||
prob.x[i] = vx.elementAt(i);
|
||||
prob.y = new double[prob.l];
|
||||
for(int i=0;i<prob.l;i++)
|
||||
prob.y[i] = vy.elementAt(i);
|
||||
|
||||
if(param.gamma == 0 && max_index > 0)
|
||||
param.gamma = 1.0/max_index;
|
||||
|
||||
if(param.kernel_type == svm_parameter.PRECOMPUTED)
|
||||
for(int i=0;i<prob.l;i++)
|
||||
{
|
||||
if (prob.x[i][0].index != 0)
|
||||
{
|
||||
System.err.print("Wrong kernel matrix: first column must be 0:sample_serial_number\n");
|
||||
System.exit(1);
|
||||
}
|
||||
if ((int)prob.x[i][0].value <= 0 || (int)prob.x[i][0].value > max_index)
|
||||
{
|
||||
System.err.print("Wrong input format: sample_serial_number out of range\n");
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
fp.close();
|
||||
}
|
||||
}
|
39
libsvm-3.36/matlab/Makefile
Normal file
39
libsvm-3.36/matlab/Makefile
Normal file
@@ -0,0 +1,39 @@
|
||||
# This Makefile is used under Linux
|
||||
|
||||
MATLABDIR ?= /usr/local/matlab
|
||||
# for Mac
|
||||
# MATLABDIR ?= /opt/local/matlab
|
||||
|
||||
CXX ?= g++
|
||||
#CXX = g++-4.1
|
||||
CFLAGS = -Wall -Wconversion -O3 -fPIC -I$(MATLABDIR)/extern/include -I..
|
||||
|
||||
MEX = $(MATLABDIR)/bin/mex
|
||||
MEX_OPTION = CC="$(CXX)" CXX="$(CXX)" CFLAGS="$(CFLAGS)" CXXFLAGS="$(CFLAGS)"
|
||||
# comment the following line if you use MATLAB on 32-bit computer
|
||||
MEX_OPTION += -largeArrayDims
|
||||
MEX_EXT = $(shell $(MATLABDIR)/bin/mexext)
|
||||
|
||||
all: matlab
|
||||
|
||||
matlab: binary
|
||||
|
||||
octave:
|
||||
@echo "please type make under Octave"
|
||||
|
||||
binary: svmpredict.$(MEX_EXT) svmtrain.$(MEX_EXT) libsvmread.$(MEX_EXT) libsvmwrite.$(MEX_EXT)
|
||||
|
||||
svmpredict.$(MEX_EXT): svmpredict.c ../svm.h ../svm.cpp svm_model_matlab.c
|
||||
$(MEX) $(MEX_OPTION) svmpredict.c ../svm.cpp svm_model_matlab.c
|
||||
|
||||
svmtrain.$(MEX_EXT): svmtrain.c ../svm.h ../svm.cpp svm_model_matlab.c
|
||||
$(MEX) $(MEX_OPTION) svmtrain.c ../svm.cpp svm_model_matlab.c
|
||||
|
||||
libsvmread.$(MEX_EXT): libsvmread.c
|
||||
$(MEX) $(MEX_OPTION) libsvmread.c
|
||||
|
||||
libsvmwrite.$(MEX_EXT): libsvmwrite.c
|
||||
$(MEX) $(MEX_OPTION) libsvmwrite.c
|
||||
|
||||
clean:
|
||||
rm -f *~ *.o *.mex* *.obj
|
243
libsvm-3.36/matlab/README
Normal file
243
libsvm-3.36/matlab/README
Normal file
@@ -0,0 +1,243 @@
|
||||
-----------------------------------------
|
||||
--- MATLAB/OCTAVE interface of LIBSVM ---
|
||||
-----------------------------------------
|
||||
|
||||
Table of Contents
|
||||
=================
|
||||
|
||||
- Introduction
|
||||
- Installation
|
||||
- Usage
|
||||
- Returned Model Structure
|
||||
- Other Utilities
|
||||
- Examples
|
||||
- Additional Information
|
||||
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
This tool provides a simple interface to LIBSVM, a library for support vector
|
||||
machines (http://www.csie.ntu.edu.tw/~cjlin/libsvm). It is very easy to use as
|
||||
the usage and the way of specifying parameters are the same as that of LIBSVM.
|
||||
|
||||
Installation
|
||||
============
|
||||
|
||||
On Windows systems, pre-built mex files are already in the
|
||||
directory '..\windows', so please just copy them to the matlab
|
||||
directory. Now we provide binary files only for 64bit MATLAB on
|
||||
Windows. If you would like to re-build the package, please rely on the
|
||||
following steps.
|
||||
|
||||
We recommend using make.m on both MATLAB and OCTAVE. Just type 'make'
|
||||
to build 'libsvmread.mex', 'libsvmwrite.mex', 'svmtrain.mex', and
|
||||
'svmpredict.mex'.
|
||||
|
||||
On MATLAB or Octave:
|
||||
|
||||
>> make
|
||||
|
||||
If make.m does not work on MATLAB (especially for Windows), try 'mex
|
||||
-setup' to choose a suitable compiler for mex. Make sure your compiler
|
||||
is accessible and workable. Then type 'make' to do the installation.
|
||||
|
||||
Example:
|
||||
|
||||
matlab>> mex -setup
|
||||
|
||||
MATLAB will choose the default compiler. If you have multiple compliers,
|
||||
a list is given and you can choose one from the list. For more details,
|
||||
please check the following page:
|
||||
|
||||
https://www.mathworks.com/help/matlab/matlab_external/choose-c-or-c-compilers.html
|
||||
|
||||
On Windows, make.m has been tested via using Visual C++.
|
||||
|
||||
On Unix systems, if neither make.m nor 'mex -setup' works, please use
|
||||
Makefile and type 'make' in a command window. Note that we assume
|
||||
your MATLAB is installed in '/usr/local/matlab'. If not, please change
|
||||
MATLABDIR in Makefile.
|
||||
|
||||
Example:
|
||||
linux> make
|
||||
|
||||
To use octave, type 'make octave':
|
||||
|
||||
Example:
|
||||
linux> make octave
|
||||
|
||||
For a list of supported/compatible compilers for MATLAB, please check
|
||||
the following page:
|
||||
|
||||
http://www.mathworks.com/support/compilers/current_release/
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
matlab> model = svmtrain(training_label_vector, training_instance_matrix [, 'libsvm_options']);
|
||||
|
||||
-training_label_vector:
|
||||
An m by 1 vector of training labels (type must be double).
|
||||
-training_instance_matrix:
|
||||
An m by n matrix of m training instances with n features.
|
||||
It can be dense or sparse (type must be double).
|
||||
-libsvm_options:
|
||||
A string of training options in the same format as that of LIBSVM.
|
||||
|
||||
matlab> [predicted_label, accuracy, decision_values/prob_estimates] = svmpredict(testing_label_vector, testing_instance_matrix, model [, 'libsvm_options']);
|
||||
matlab> [predicted_label] = svmpredict(testing_label_vector, testing_instance_matrix, model [, 'libsvm_options']);
|
||||
|
||||
-testing_label_vector:
|
||||
An m by 1 vector of prediction labels. If labels of test
|
||||
data are unknown, simply use any random values. (type must be double)
|
||||
-testing_instance_matrix:
|
||||
An m by n matrix of m testing instances with n features.
|
||||
It can be dense or sparse. (type must be double)
|
||||
-model:
|
||||
The output of svmtrain.
|
||||
-libsvm_options:
|
||||
A string of testing options in the same format as that of LIBSVM.
|
||||
|
||||
Returned Model Structure
|
||||
========================
|
||||
|
||||
The 'svmtrain' function returns a model which can be used for future
|
||||
prediction. It is a structure and is organized as [Parameters, nr_class,
|
||||
totalSV, rho, Label, ProbA, ProbB, Prob_density_marks, nSV, sv_coef, SVs]:
|
||||
|
||||
-Parameters: parameters
|
||||
-nr_class: number of classes; = 2 for regression/one-class svm
|
||||
-totalSV: total #SV
|
||||
-rho: -b of the decision function(s) wx+b
|
||||
-Label: label of each class; empty for regression/one-class SVM
|
||||
-sv_indices: values in [1,...,num_traning_data] to indicate SVs in the training set
|
||||
-ProbA: pairwise probability information; empty if -b 0 or in one-class SVM
|
||||
-ProbB: pairwise probability information; empty if -b 0 or in one-class SVM
|
||||
-Prob_density_marks: probability information for one-class SVM; empty if -b 0 or not in one-class SVM
|
||||
-nSV: number of SVs for each class; empty for regression/one-class SVM
|
||||
-sv_coef: coefficients for SVs in decision functions
|
||||
-SVs: support vectors
|
||||
|
||||
If you do not use the option '-b 1', ProbA and ProbB are empty
|
||||
matrices. If the '-v' option is specified, cross validation is
|
||||
conducted and the returned model is just a scalar: cross-validation
|
||||
accuracy for classification and mean-squared error for regression.
|
||||
|
||||
More details about this model can be found in LIBSVM FAQ
|
||||
(http://www.csie.ntu.edu.tw/~cjlin/libsvm/faq.html) and LIBSVM
|
||||
implementation document
|
||||
(http://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf).
|
||||
|
||||
Result of Prediction
|
||||
====================
|
||||
|
||||
The function 'svmpredict' has three outputs. The first one,
|
||||
predictd_label, is a vector of predicted labels. The second output,
|
||||
accuracy, is a vector including accuracy (for classification), mean
|
||||
squared error, and squared correlation coefficient (for regression).
|
||||
The third is a matrix containing decision values or probability
|
||||
estimates (if '-b 1' is specified). If k is the number of classes
|
||||
in training data, for decision values, each row includes results of
|
||||
predicting k(k-1)/2 binary-class SVMs. For classification, k = 1 is a
|
||||
special case. Decision value +1 is returned for each testing instance,
|
||||
instead of an empty vector. For probabilities, each row contains k values
|
||||
indicating the probability that the testing instance is in each class.
|
||||
Note that the order of classes here is the same as 'Label' field
|
||||
in the model structure.
|
||||
For one-class SVM, each row contains two elements for probabilities
|
||||
of normal instance/outlier.
|
||||
|
||||
Other Utilities
|
||||
===============
|
||||
|
||||
A matlab function libsvmread reads files in LIBSVM format:
|
||||
|
||||
[label_vector, instance_matrix] = libsvmread('data.txt');
|
||||
|
||||
Two outputs are labels and instances, which can then be used as inputs
|
||||
of svmtrain or svmpredict.
|
||||
|
||||
A matlab function libsvmwrite writes Matlab matrix to a file in LIBSVM format:
|
||||
|
||||
libsvmwrite('data.txt', label_vector, instance_matrix)
|
||||
|
||||
The instance_matrix must be a sparse matrix. (type must be double)
|
||||
For 32bit and 64bit MATLAB on Windows, pre-built binary files are ready
|
||||
in the directory `..\windows', but in future releases, we will only
|
||||
include 64bit MATLAB binary files.
|
||||
|
||||
These codes are prepared by Rong-En Fan and Kai-Wei Chang from National
|
||||
Taiwan University.
|
||||
|
||||
Examples
|
||||
========
|
||||
|
||||
Train and test on the provided data heart_scale:
|
||||
|
||||
matlab> [heart_scale_label, heart_scale_inst] = libsvmread('../heart_scale');
|
||||
matlab> model = svmtrain(heart_scale_label, heart_scale_inst, '-c 1 -g 0.07');
|
||||
matlab> [predict_label, accuracy, dec_values] = svmpredict(heart_scale_label, heart_scale_inst, model); % test the training data
|
||||
|
||||
For probability estimates, you need '-b 1' for training and testing:
|
||||
|
||||
matlab> [heart_scale_label, heart_scale_inst] = libsvmread('../heart_scale');
|
||||
matlab> model = svmtrain(heart_scale_label, heart_scale_inst, '-c 1 -g 0.07 -b 1');
|
||||
matlab> [heart_scale_label, heart_scale_inst] = libsvmread('../heart_scale');
|
||||
matlab> [predict_label, accuracy, prob_estimates] = svmpredict(heart_scale_label, heart_scale_inst, model, '-b 1');
|
||||
|
||||
To use precomputed kernel, you must include sample serial number as
|
||||
the first column of the training and testing data (assume your kernel
|
||||
matrix is K, # of instances is n):
|
||||
|
||||
matlab> K1 = [(1:n)', K]; % include sample serial number as first column
|
||||
matlab> model = svmtrain(label_vector, K1, '-t 4');
|
||||
matlab> [predict_label, accuracy, dec_values] = svmpredict(label_vector, K1, model); % test the training data
|
||||
|
||||
We give the following detailed example by splitting heart_scale into
|
||||
150 training and 120 testing data. Constructing a linear kernel
|
||||
matrix and then using the precomputed kernel gives exactly the same
|
||||
testing error as using the LIBSVM built-in linear kernel.
|
||||
|
||||
matlab> [heart_scale_label, heart_scale_inst] = libsvmread('../heart_scale');
|
||||
matlab>
|
||||
matlab> % Split Data
|
||||
matlab> train_data = heart_scale_inst(1:150,:);
|
||||
matlab> train_label = heart_scale_label(1:150,:);
|
||||
matlab> test_data = heart_scale_inst(151:270,:);
|
||||
matlab> test_label = heart_scale_label(151:270,:);
|
||||
matlab>
|
||||
matlab> % Linear Kernel
|
||||
matlab> model_linear = svmtrain(train_label, train_data, '-t 0');
|
||||
matlab> [predict_label_L, accuracy_L, dec_values_L] = svmpredict(test_label, test_data, model_linear);
|
||||
matlab>
|
||||
matlab> % Precomputed Kernel
|
||||
matlab> model_precomputed = svmtrain(train_label, [(1:150)', train_data*train_data'], '-t 4');
|
||||
matlab> [predict_label_P, accuracy_P, dec_values_P] = svmpredict(test_label, [(1:120)', test_data*train_data'], model_precomputed);
|
||||
matlab>
|
||||
matlab> accuracy_L % Display the accuracy using linear kernel
|
||||
matlab> accuracy_P % Display the accuracy using precomputed kernel
|
||||
|
||||
Note that for testing, you can put anything in the
|
||||
testing_label_vector. For more details of precomputed kernels, please
|
||||
read the section ``Precomputed Kernels'' in the README of the LIBSVM
|
||||
package.
|
||||
|
||||
Additional Information
|
||||
======================
|
||||
|
||||
This interface was initially written by Jun-Cheng Chen, Kuan-Jen Peng,
|
||||
Chih-Yuan Yang and Chih-Huai Cheng from Department of Computer
|
||||
Science, National Taiwan University. The current version was prepared
|
||||
by Rong-En Fan and Ting-Fan Wu. If you find this tool useful, please
|
||||
cite LIBSVM as follows
|
||||
|
||||
Chih-Chung Chang and Chih-Jen Lin, LIBSVM : a library for support
|
||||
vector machines. ACM Transactions on Intelligent Systems and
|
||||
Technology, 2:27:1--27:27, 2011. Software available at
|
||||
http://www.csie.ntu.edu.tw/~cjlin/libsvm
|
||||
|
||||
For any question, please contact Chih-Jen Lin <cjlin@csie.ntu.edu.tw>,
|
||||
or check the FAQ page:
|
||||
|
||||
http://www.csie.ntu.edu.tw/~cjlin/libsvm/faq.html#/Q10:_MATLAB_interface
|
212
libsvm-3.36/matlab/libsvmread.c
Normal file
212
libsvm-3.36/matlab/libsvmread.c
Normal file
@@ -0,0 +1,212 @@
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <stdlib.h>
|
||||
#include <ctype.h>
|
||||
#include <errno.h>
|
||||
|
||||
#include "mex.h"
|
||||
|
||||
#ifdef MX_API_VER
|
||||
#if MX_API_VER < 0x07030000
|
||||
typedef int mwIndex;
|
||||
#endif
|
||||
#endif
|
||||
#ifndef max
|
||||
#define max(x,y) (((x)>(y))?(x):(y))
|
||||
#endif
|
||||
#ifndef min
|
||||
#define min(x,y) (((x)<(y))?(x):(y))
|
||||
#endif
|
||||
|
||||
void exit_with_help()
|
||||
{
|
||||
mexPrintf(
|
||||
"Usage: [label_vector, instance_matrix] = libsvmread('filename');\n"
|
||||
);
|
||||
}
|
||||
|
||||
static void fake_answer(int nlhs, mxArray *plhs[])
|
||||
{
|
||||
int i;
|
||||
for(i=0;i<nlhs;i++)
|
||||
plhs[i] = mxCreateDoubleMatrix(0, 0, mxREAL);
|
||||
}
|
||||
|
||||
static char *line;
|
||||
static int max_line_len;
|
||||
|
||||
static char* readline(FILE *input)
|
||||
{
|
||||
int len;
|
||||
|
||||
if(fgets(line,max_line_len,input) == NULL)
|
||||
return NULL;
|
||||
|
||||
while(strrchr(line,'\n') == NULL)
|
||||
{
|
||||
max_line_len *= 2;
|
||||
line = (char *) realloc(line, max_line_len);
|
||||
len = (int) strlen(line);
|
||||
if(fgets(line+len,max_line_len-len,input) == NULL)
|
||||
break;
|
||||
}
|
||||
return line;
|
||||
}
|
||||
|
||||
// read in a problem (in libsvm format)
|
||||
void read_problem(const char *filename, int nlhs, mxArray *plhs[])
|
||||
{
|
||||
int max_index, min_index, inst_max_index;
|
||||
size_t elements, k, i, l=0;
|
||||
FILE *fp = fopen(filename,"r");
|
||||
char *endptr;
|
||||
mwIndex *ir, *jc;
|
||||
double *labels, *samples;
|
||||
|
||||
if(fp == NULL)
|
||||
{
|
||||
mexPrintf("can't open input file %s\n",filename);
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
max_line_len = 1024;
|
||||
line = (char *) malloc(max_line_len*sizeof(char));
|
||||
|
||||
max_index = 0;
|
||||
min_index = 1; // our index starts from 1
|
||||
elements = 0;
|
||||
while(readline(fp) != NULL)
|
||||
{
|
||||
char *idx, *val;
|
||||
// features
|
||||
int index = 0;
|
||||
|
||||
inst_max_index = -1; // strtol gives 0 if wrong format, and precomputed kernel has <index> start from 0
|
||||
strtok(line," \t"); // label
|
||||
while (1)
|
||||
{
|
||||
idx = strtok(NULL,":"); // index:value
|
||||
val = strtok(NULL," \t");
|
||||
if(val == NULL)
|
||||
break;
|
||||
|
||||
errno = 0;
|
||||
index = (int) strtol(idx,&endptr,10);
|
||||
if(endptr == idx || errno != 0 || *endptr != '\0' || index <= inst_max_index)
|
||||
{
|
||||
mexPrintf("Wrong input format at line %d\n",l+1);
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
else
|
||||
inst_max_index = index;
|
||||
|
||||
min_index = min(min_index, index);
|
||||
elements++;
|
||||
}
|
||||
max_index = max(max_index, inst_max_index);
|
||||
l++;
|
||||
}
|
||||
rewind(fp);
|
||||
|
||||
// y
|
||||
plhs[0] = mxCreateDoubleMatrix(l, 1, mxREAL);
|
||||
// x^T
|
||||
if (min_index <= 0)
|
||||
plhs[1] = mxCreateSparse(max_index-min_index+1, l, elements, mxREAL);
|
||||
else
|
||||
plhs[1] = mxCreateSparse(max_index, l, elements, mxREAL);
|
||||
|
||||
labels = mxGetPr(plhs[0]);
|
||||
samples = mxGetPr(plhs[1]);
|
||||
ir = mxGetIr(plhs[1]);
|
||||
jc = mxGetJc(plhs[1]);
|
||||
|
||||
k=0;
|
||||
for(i=0;i<l;i++)
|
||||
{
|
||||
char *idx, *val, *label;
|
||||
jc[i] = k;
|
||||
|
||||
readline(fp);
|
||||
|
||||
label = strtok(line," \t\n");
|
||||
if(label == NULL)
|
||||
{
|
||||
mexPrintf("Empty line at line %d\n",i+1);
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
labels[i] = strtod(label,&endptr);
|
||||
if(endptr == label || *endptr != '\0')
|
||||
{
|
||||
mexPrintf("Wrong input format at line %d\n",i+1);
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
// features
|
||||
while(1)
|
||||
{
|
||||
idx = strtok(NULL,":");
|
||||
val = strtok(NULL," \t");
|
||||
if(val == NULL)
|
||||
break;
|
||||
|
||||
ir[k] = (mwIndex) (strtol(idx,&endptr,10) - min_index); // precomputed kernel has <index> start from 0
|
||||
|
||||
errno = 0;
|
||||
samples[k] = strtod(val,&endptr);
|
||||
if (endptr == val || errno != 0 || (*endptr != '\0' && !isspace(*endptr)))
|
||||
{
|
||||
mexPrintf("Wrong input format at line %d\n",i+1);
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
++k;
|
||||
}
|
||||
}
|
||||
jc[l] = k;
|
||||
|
||||
fclose(fp);
|
||||
free(line);
|
||||
|
||||
{
|
||||
mxArray *rhs[1], *lhs[1];
|
||||
rhs[0] = plhs[1];
|
||||
if(mexCallMATLAB(1, lhs, 1, rhs, "transpose"))
|
||||
{
|
||||
mexPrintf("Error: cannot transpose problem\n");
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
plhs[1] = lhs[0];
|
||||
}
|
||||
}
|
||||
|
||||
void mexFunction( int nlhs, mxArray *plhs[],
|
||||
int nrhs, const mxArray *prhs[] )
|
||||
{
|
||||
#define filename_size 256
|
||||
|
||||
char filename[filename_size];
|
||||
|
||||
if(nrhs != 1 || nlhs != 2)
|
||||
{
|
||||
exit_with_help();
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
if(mxGetString(prhs[0], filename, filename_size) == 1){
|
||||
mexPrintf("Error: wrong or too long filename\n");
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
read_problem(filename, nlhs, plhs);
|
||||
|
||||
return;
|
||||
}
|
||||
|
119
libsvm-3.36/matlab/libsvmwrite.c
Normal file
119
libsvm-3.36/matlab/libsvmwrite.c
Normal file
@@ -0,0 +1,119 @@
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "mex.h"
|
||||
|
||||
#ifdef MX_API_VER
|
||||
#if MX_API_VER < 0x07030000
|
||||
typedef int mwIndex;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
void exit_with_help()
|
||||
{
|
||||
mexPrintf(
|
||||
"Usage: libsvmwrite('filename', label_vector, instance_matrix);\n"
|
||||
);
|
||||
}
|
||||
|
||||
static void fake_answer(int nlhs, mxArray *plhs[])
|
||||
{
|
||||
int i;
|
||||
for(i=0;i<nlhs;i++)
|
||||
plhs[i] = mxCreateDoubleMatrix(0, 0, mxREAL);
|
||||
}
|
||||
|
||||
void libsvmwrite(const char *filename, const mxArray *label_vec, const mxArray *instance_mat)
|
||||
{
|
||||
FILE *fp = fopen(filename,"w");
|
||||
mwIndex *ir, *jc, k, low, high;
|
||||
size_t i, l, label_vector_row_num;
|
||||
double *samples, *labels;
|
||||
mxArray *instance_mat_col; // instance sparse matrix in column format
|
||||
|
||||
if(fp ==NULL)
|
||||
{
|
||||
mexPrintf("can't open output file %s\n",filename);
|
||||
return;
|
||||
}
|
||||
|
||||
// transpose instance matrix
|
||||
{
|
||||
mxArray *prhs[1], *plhs[1];
|
||||
prhs[0] = mxDuplicateArray(instance_mat);
|
||||
if(mexCallMATLAB(1, plhs, 1, prhs, "transpose"))
|
||||
{
|
||||
mexPrintf("Error: cannot transpose instance matrix\n");
|
||||
return;
|
||||
}
|
||||
instance_mat_col = plhs[0];
|
||||
mxDestroyArray(prhs[0]);
|
||||
}
|
||||
|
||||
// the number of instance
|
||||
l = mxGetN(instance_mat_col);
|
||||
label_vector_row_num = mxGetM(label_vec);
|
||||
|
||||
if(label_vector_row_num!=l)
|
||||
{
|
||||
mexPrintf("Length of label vector does not match # of instances.\n");
|
||||
return;
|
||||
}
|
||||
|
||||
// each column is one instance
|
||||
labels = mxGetPr(label_vec);
|
||||
samples = mxGetPr(instance_mat_col);
|
||||
ir = mxGetIr(instance_mat_col);
|
||||
jc = mxGetJc(instance_mat_col);
|
||||
|
||||
for(i=0;i<l;i++)
|
||||
{
|
||||
fprintf(fp,"%.17g", labels[i]);
|
||||
|
||||
low = jc[i], high = jc[i+1];
|
||||
for(k=low;k<high;k++)
|
||||
fprintf(fp," %lu:%g", (size_t)ir[k]+1, samples[k]);
|
||||
|
||||
fprintf(fp,"\n");
|
||||
}
|
||||
|
||||
fclose(fp);
|
||||
return;
|
||||
}
|
||||
|
||||
void mexFunction( int nlhs, mxArray *plhs[],
|
||||
int nrhs, const mxArray *prhs[] )
|
||||
{
|
||||
if(nlhs > 0)
|
||||
{
|
||||
exit_with_help();
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
// Transform the input Matrix to libsvm format
|
||||
if(nrhs == 3)
|
||||
{
|
||||
char filename[256];
|
||||
if(!mxIsDouble(prhs[1]) || !mxIsDouble(prhs[2]))
|
||||
{
|
||||
mexPrintf("Error: label vector and instance matrix must be double\n");
|
||||
return;
|
||||
}
|
||||
|
||||
mxGetString(prhs[0], filename, mxGetN(prhs[0])+1);
|
||||
|
||||
if(mxIsSparse(prhs[2]))
|
||||
libsvmwrite(filename, prhs[1], prhs[2]);
|
||||
else
|
||||
{
|
||||
mexPrintf("Instance_matrix must be sparse\n");
|
||||
return;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
exit_with_help();
|
||||
return;
|
||||
}
|
||||
}
|
22
libsvm-3.36/matlab/make.m
Normal file
22
libsvm-3.36/matlab/make.m
Normal file
@@ -0,0 +1,22 @@
|
||||
% This make.m is for MATLAB and OCTAVE under Windows, Mac, and Unix
|
||||
function make()
|
||||
try
|
||||
% This part is for OCTAVE
|
||||
if (exist ('OCTAVE_VERSION', 'builtin'))
|
||||
mex libsvmread.c
|
||||
mex libsvmwrite.c
|
||||
mex -I.. svmtrain.c ../svm.cpp svm_model_matlab.c
|
||||
mex -I.. svmpredict.c ../svm.cpp svm_model_matlab.c
|
||||
% This part is for MATLAB
|
||||
% Add -largeArrayDims on 64-bit machines of MATLAB
|
||||
else
|
||||
mex -largeArrayDims libsvmread.c
|
||||
mex -largeArrayDims libsvmwrite.c
|
||||
mex -I.. -largeArrayDims svmtrain.c ../svm.cpp svm_model_matlab.c
|
||||
mex -I.. -largeArrayDims svmpredict.c ../svm.cpp svm_model_matlab.c
|
||||
end
|
||||
catch err
|
||||
fprintf('Error: %s failed (line %d)\n', err.stack(1).file, err.stack(1).line);
|
||||
disp(err.message);
|
||||
fprintf('=> Please check README for detailed instructions.\n');
|
||||
end
|
400
libsvm-3.36/matlab/svm_model_matlab.c
Normal file
400
libsvm-3.36/matlab/svm_model_matlab.c
Normal file
@@ -0,0 +1,400 @@
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "svm.h"
|
||||
|
||||
#include "mex.h"
|
||||
|
||||
#ifdef MX_API_VER
|
||||
#if MX_API_VER < 0x07030000
|
||||
typedef int mwIndex;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#define NUM_OF_RETURN_FIELD 12
|
||||
|
||||
#define Malloc(type,n) (type *)malloc((n)*sizeof(type))
|
||||
|
||||
static const char *field_names[] = {
|
||||
"Parameters",
|
||||
"nr_class",
|
||||
"totalSV",
|
||||
"rho",
|
||||
"Label",
|
||||
"sv_indices",
|
||||
"ProbA",
|
||||
"ProbB",
|
||||
"Prob_density_marks",
|
||||
"nSV",
|
||||
"sv_coef",
|
||||
"SVs"
|
||||
};
|
||||
|
||||
const char *model_to_matlab_structure(mxArray *plhs[], int num_of_feature, struct svm_model *model)
|
||||
{
|
||||
int i, j, n;
|
||||
double *ptr;
|
||||
mxArray *return_model, **rhs;
|
||||
int out_id = 0;
|
||||
|
||||
rhs = (mxArray **)mxMalloc(sizeof(mxArray *)*NUM_OF_RETURN_FIELD);
|
||||
|
||||
// Parameters
|
||||
rhs[out_id] = mxCreateDoubleMatrix(5, 1, mxREAL);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
ptr[0] = model->param.svm_type;
|
||||
ptr[1] = model->param.kernel_type;
|
||||
ptr[2] = model->param.degree;
|
||||
ptr[3] = model->param.gamma;
|
||||
ptr[4] = model->param.coef0;
|
||||
out_id++;
|
||||
|
||||
// nr_class
|
||||
rhs[out_id] = mxCreateDoubleMatrix(1, 1, mxREAL);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
ptr[0] = model->nr_class;
|
||||
out_id++;
|
||||
|
||||
// total SV
|
||||
rhs[out_id] = mxCreateDoubleMatrix(1, 1, mxREAL);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
ptr[0] = model->l;
|
||||
out_id++;
|
||||
|
||||
// rho
|
||||
n = model->nr_class*(model->nr_class-1)/2;
|
||||
rhs[out_id] = mxCreateDoubleMatrix(n, 1, mxREAL);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
for(i = 0; i < n; i++)
|
||||
ptr[i] = model->rho[i];
|
||||
out_id++;
|
||||
|
||||
// Label
|
||||
if(model->label)
|
||||
{
|
||||
rhs[out_id] = mxCreateDoubleMatrix(model->nr_class, 1, mxREAL);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
for(i = 0; i < model->nr_class; i++)
|
||||
ptr[i] = model->label[i];
|
||||
}
|
||||
else
|
||||
rhs[out_id] = mxCreateDoubleMatrix(0, 0, mxREAL);
|
||||
out_id++;
|
||||
|
||||
// sv_indices
|
||||
if(model->sv_indices)
|
||||
{
|
||||
rhs[out_id] = mxCreateDoubleMatrix(model->l, 1, mxREAL);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
for(i = 0; i < model->l; i++)
|
||||
ptr[i] = model->sv_indices[i];
|
||||
}
|
||||
else
|
||||
rhs[out_id] = mxCreateDoubleMatrix(0, 0, mxREAL);
|
||||
out_id++;
|
||||
|
||||
// probA
|
||||
if(model->probA != NULL)
|
||||
{
|
||||
rhs[out_id] = mxCreateDoubleMatrix(n, 1, mxREAL);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
for(i = 0; i < n; i++)
|
||||
ptr[i] = model->probA[i];
|
||||
}
|
||||
else
|
||||
rhs[out_id] = mxCreateDoubleMatrix(0, 0, mxREAL);
|
||||
out_id ++;
|
||||
|
||||
// probB
|
||||
if(model->probB != NULL)
|
||||
{
|
||||
rhs[out_id] = mxCreateDoubleMatrix(n, 1, mxREAL);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
for(i = 0; i < n; i++)
|
||||
ptr[i] = model->probB[i];
|
||||
}
|
||||
else
|
||||
rhs[out_id] = mxCreateDoubleMatrix(0, 0, mxREAL);
|
||||
out_id++;
|
||||
|
||||
// prob_density_marks
|
||||
if(model->prob_density_marks != NULL)
|
||||
{
|
||||
int nr_marks = 10;
|
||||
rhs[out_id] = mxCreateDoubleMatrix(nr_marks, 1, mxREAL);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
for(i = 0; i < nr_marks; i++)
|
||||
ptr[i] = model->prob_density_marks[i];
|
||||
}
|
||||
else
|
||||
rhs[out_id] = mxCreateDoubleMatrix(0, 0, mxREAL);
|
||||
out_id++;
|
||||
|
||||
// nSV
|
||||
if(model->nSV)
|
||||
{
|
||||
rhs[out_id] = mxCreateDoubleMatrix(model->nr_class, 1, mxREAL);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
for(i = 0; i < model->nr_class; i++)
|
||||
ptr[i] = model->nSV[i];
|
||||
}
|
||||
else
|
||||
rhs[out_id] = mxCreateDoubleMatrix(0, 0, mxREAL);
|
||||
out_id++;
|
||||
|
||||
// sv_coef
|
||||
rhs[out_id] = mxCreateDoubleMatrix(model->l, model->nr_class-1, mxREAL);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
for(i = 0; i < model->nr_class-1; i++)
|
||||
for(j = 0; j < model->l; j++)
|
||||
ptr[(i*(model->l))+j] = model->sv_coef[i][j];
|
||||
out_id++;
|
||||
|
||||
// SVs
|
||||
{
|
||||
int ir_index, nonzero_element;
|
||||
mwIndex *ir, *jc;
|
||||
mxArray *pprhs[1], *pplhs[1];
|
||||
|
||||
if(model->param.kernel_type == PRECOMPUTED)
|
||||
{
|
||||
nonzero_element = model->l;
|
||||
num_of_feature = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
nonzero_element = 0;
|
||||
for(i = 0; i < model->l; i++) {
|
||||
j = 0;
|
||||
while(model->SV[i][j].index != -1)
|
||||
{
|
||||
nonzero_element++;
|
||||
j++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// SV in column, easier accessing
|
||||
rhs[out_id] = mxCreateSparse(num_of_feature, model->l, nonzero_element, mxREAL);
|
||||
ir = mxGetIr(rhs[out_id]);
|
||||
jc = mxGetJc(rhs[out_id]);
|
||||
ptr = mxGetPr(rhs[out_id]);
|
||||
jc[0] = ir_index = 0;
|
||||
for(i = 0;i < model->l; i++)
|
||||
{
|
||||
if(model->param.kernel_type == PRECOMPUTED)
|
||||
{
|
||||
// make a (1 x model->l) matrix
|
||||
ir[ir_index] = 0;
|
||||
ptr[ir_index] = model->SV[i][0].value;
|
||||
ir_index++;
|
||||
jc[i+1] = jc[i] + 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
int x_index = 0;
|
||||
while (model->SV[i][x_index].index != -1)
|
||||
{
|
||||
ir[ir_index] = model->SV[i][x_index].index - 1;
|
||||
ptr[ir_index] = model->SV[i][x_index].value;
|
||||
ir_index++, x_index++;
|
||||
}
|
||||
jc[i+1] = jc[i] + x_index;
|
||||
}
|
||||
}
|
||||
// transpose back to SV in row
|
||||
pprhs[0] = rhs[out_id];
|
||||
if(mexCallMATLAB(1, pplhs, 1, pprhs, "transpose"))
|
||||
return "cannot transpose SV matrix";
|
||||
rhs[out_id] = pplhs[0];
|
||||
out_id++;
|
||||
}
|
||||
|
||||
/* Create a struct matrix contains NUM_OF_RETURN_FIELD fields */
|
||||
return_model = mxCreateStructMatrix(1, 1, NUM_OF_RETURN_FIELD, field_names);
|
||||
|
||||
/* Fill struct matrix with input arguments */
|
||||
for(i = 0; i < NUM_OF_RETURN_FIELD; i++)
|
||||
mxSetField(return_model,0,field_names[i],mxDuplicateArray(rhs[i]));
|
||||
/* return */
|
||||
plhs[0] = return_model;
|
||||
mxFree(rhs);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
||||
struct svm_model *matlab_matrix_to_model(const mxArray *matlab_struct, const char **msg)
|
||||
{
|
||||
int i, j, n, num_of_fields;
|
||||
double *ptr;
|
||||
int id = 0;
|
||||
struct svm_node *x_space;
|
||||
struct svm_model *model;
|
||||
mxArray **rhs;
|
||||
|
||||
num_of_fields = mxGetNumberOfFields(matlab_struct);
|
||||
if(num_of_fields != NUM_OF_RETURN_FIELD)
|
||||
{
|
||||
*msg = "number of return field is not correct";
|
||||
return NULL;
|
||||
}
|
||||
rhs = (mxArray **) mxMalloc(sizeof(mxArray *)*num_of_fields);
|
||||
|
||||
for(i=0;i<num_of_fields;i++)
|
||||
rhs[i] = mxGetFieldByNumber(matlab_struct, 0, i);
|
||||
|
||||
model = Malloc(struct svm_model, 1);
|
||||
model->rho = NULL;
|
||||
model->probA = NULL;
|
||||
model->probB = NULL;
|
||||
model->prob_density_marks = NULL;
|
||||
model->label = NULL;
|
||||
model->sv_indices = NULL;
|
||||
model->nSV = NULL;
|
||||
model->free_sv = 1; // XXX
|
||||
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
model->param.svm_type = (int)ptr[0];
|
||||
model->param.kernel_type = (int)ptr[1];
|
||||
model->param.degree = (int)ptr[2];
|
||||
model->param.gamma = ptr[3];
|
||||
model->param.coef0 = ptr[4];
|
||||
id++;
|
||||
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
model->nr_class = (int)ptr[0];
|
||||
id++;
|
||||
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
model->l = (int)ptr[0];
|
||||
id++;
|
||||
|
||||
// rho
|
||||
n = model->nr_class * (model->nr_class-1)/2;
|
||||
model->rho = (double*) malloc(n*sizeof(double));
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
for(i=0;i<n;i++)
|
||||
model->rho[i] = ptr[i];
|
||||
id++;
|
||||
|
||||
// label
|
||||
if(mxIsEmpty(rhs[id]) == 0)
|
||||
{
|
||||
model->label = (int*) malloc(model->nr_class*sizeof(int));
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
for(i=0;i<model->nr_class;i++)
|
||||
model->label[i] = (int)ptr[i];
|
||||
}
|
||||
id++;
|
||||
|
||||
// sv_indices
|
||||
if(mxIsEmpty(rhs[id]) == 0)
|
||||
{
|
||||
model->sv_indices = (int*) malloc(model->l*sizeof(int));
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
for(i=0;i<model->l;i++)
|
||||
model->sv_indices[i] = (int)ptr[i];
|
||||
}
|
||||
id++;
|
||||
|
||||
// probA
|
||||
if(mxIsEmpty(rhs[id]) == 0)
|
||||
{
|
||||
model->probA = (double*) malloc(n*sizeof(double));
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
for(i=0;i<n;i++)
|
||||
model->probA[i] = ptr[i];
|
||||
}
|
||||
id++;
|
||||
|
||||
// probB
|
||||
if(mxIsEmpty(rhs[id]) == 0)
|
||||
{
|
||||
model->probB = (double*) malloc(n*sizeof(double));
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
for(i=0;i<n;i++)
|
||||
model->probB[i] = ptr[i];
|
||||
}
|
||||
id++;
|
||||
|
||||
// prob_density_marks
|
||||
if(mxIsEmpty(rhs[id]) == 0)
|
||||
{
|
||||
int nr_marks = 10;
|
||||
model->prob_density_marks = (double*) malloc(nr_marks*sizeof(double));
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
for(i=0;i<nr_marks;i++)
|
||||
model->prob_density_marks[i] = ptr[i];
|
||||
}
|
||||
id++;
|
||||
|
||||
// nSV
|
||||
if(mxIsEmpty(rhs[id]) == 0)
|
||||
{
|
||||
model->nSV = (int*) malloc(model->nr_class*sizeof(int));
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
for(i=0;i<model->nr_class;i++)
|
||||
model->nSV[i] = (int)ptr[i];
|
||||
}
|
||||
id++;
|
||||
|
||||
// sv_coef
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
model->sv_coef = (double**) malloc((model->nr_class-1)*sizeof(double));
|
||||
for( i=0 ; i< model->nr_class -1 ; i++ )
|
||||
model->sv_coef[i] = (double*) malloc((model->l)*sizeof(double));
|
||||
for(i = 0; i < model->nr_class - 1; i++)
|
||||
for(j = 0; j < model->l; j++)
|
||||
model->sv_coef[i][j] = ptr[i*(model->l)+j];
|
||||
id++;
|
||||
|
||||
// SV
|
||||
{
|
||||
int sr, elements;
|
||||
int num_samples;
|
||||
mwIndex *ir, *jc;
|
||||
mxArray *pprhs[1], *pplhs[1];
|
||||
|
||||
// transpose SV
|
||||
pprhs[0] = rhs[id];
|
||||
if(mexCallMATLAB(1, pplhs, 1, pprhs, "transpose"))
|
||||
{
|
||||
svm_free_and_destroy_model(&model);
|
||||
*msg = "cannot transpose SV matrix";
|
||||
return NULL;
|
||||
}
|
||||
rhs[id] = pplhs[0];
|
||||
|
||||
sr = (int)mxGetN(rhs[id]);
|
||||
|
||||
ptr = mxGetPr(rhs[id]);
|
||||
ir = mxGetIr(rhs[id]);
|
||||
jc = mxGetJc(rhs[id]);
|
||||
|
||||
num_samples = (int)mxGetNzmax(rhs[id]);
|
||||
|
||||
elements = num_samples + sr;
|
||||
|
||||
model->SV = (struct svm_node **) malloc(sr * sizeof(struct svm_node *));
|
||||
x_space = (struct svm_node *)malloc(elements * sizeof(struct svm_node));
|
||||
|
||||
// SV is in column
|
||||
for(i=0;i<sr;i++)
|
||||
{
|
||||
int low = (int)jc[i], high = (int)jc[i+1];
|
||||
int x_index = 0;
|
||||
model->SV[i] = &x_space[low+i];
|
||||
for(j=low;j<high;j++)
|
||||
{
|
||||
model->SV[i][x_index].index = (int)ir[j] + 1;
|
||||
model->SV[i][x_index].value = ptr[j];
|
||||
x_index++;
|
||||
}
|
||||
model->SV[i][x_index].index = -1;
|
||||
}
|
||||
|
||||
id++;
|
||||
}
|
||||
mxFree(rhs);
|
||||
|
||||
return model;
|
||||
}
|
2
libsvm-3.36/matlab/svm_model_matlab.h
Normal file
2
libsvm-3.36/matlab/svm_model_matlab.h
Normal file
@@ -0,0 +1,2 @@
|
||||
const char *model_to_matlab_structure(mxArray *plhs[], int num_of_feature, struct svm_model *model);
|
||||
struct svm_model *matlab_matrix_to_model(const mxArray *matlab_struct, const char **error_message);
|
373
libsvm-3.36/matlab/svmpredict.c
Normal file
373
libsvm-3.36/matlab/svmpredict.c
Normal file
@@ -0,0 +1,373 @@
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "svm.h"
|
||||
|
||||
#include "mex.h"
|
||||
#include "svm_model_matlab.h"
|
||||
|
||||
#ifdef MX_API_VER
|
||||
#if MX_API_VER < 0x07030000
|
||||
typedef int mwIndex;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#define CMD_LEN 2048
|
||||
|
||||
int print_null(const char *s,...) {return 0;}
|
||||
int (*info)(const char *fmt,...) = &mexPrintf;
|
||||
|
||||
void read_sparse_instance(const mxArray *prhs, int index, struct svm_node *x)
|
||||
{
|
||||
int i, j, low, high;
|
||||
mwIndex *ir, *jc;
|
||||
double *samples;
|
||||
|
||||
ir = mxGetIr(prhs);
|
||||
jc = mxGetJc(prhs);
|
||||
samples = mxGetPr(prhs);
|
||||
|
||||
// each column is one instance
|
||||
j = 0;
|
||||
low = (int)jc[index], high = (int)jc[index+1];
|
||||
for(i=low;i<high;i++)
|
||||
{
|
||||
x[j].index = (int)ir[i] + 1;
|
||||
x[j].value = samples[i];
|
||||
j++;
|
||||
}
|
||||
x[j].index = -1;
|
||||
}
|
||||
|
||||
static void fake_answer(int nlhs, mxArray *plhs[])
|
||||
{
|
||||
int i;
|
||||
for(i=0;i<nlhs;i++)
|
||||
plhs[i] = mxCreateDoubleMatrix(0, 0, mxREAL);
|
||||
}
|
||||
|
||||
void predict(int nlhs, mxArray *plhs[], const mxArray *prhs[], struct svm_model *model, const int predict_probability)
|
||||
{
|
||||
int label_vector_row_num, label_vector_col_num;
|
||||
int feature_number, testing_instance_number;
|
||||
int instance_index;
|
||||
double *ptr_instance, *ptr_label, *ptr_predict_label;
|
||||
double *ptr_prob_estimates, *ptr_dec_values, *ptr;
|
||||
struct svm_node *x;
|
||||
mxArray *pplhs[1]; // transposed instance sparse matrix
|
||||
mxArray *tplhs[3]; // temporary storage for plhs[]
|
||||
|
||||
int correct = 0;
|
||||
int total = 0;
|
||||
double error = 0;
|
||||
double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;
|
||||
|
||||
int svm_type=svm_get_svm_type(model);
|
||||
int nr_class=svm_get_nr_class(model);
|
||||
double *prob_estimates=NULL;
|
||||
|
||||
// prhs[1] = testing instance matrix
|
||||
feature_number = (int)mxGetN(prhs[1]);
|
||||
testing_instance_number = (int)mxGetM(prhs[1]);
|
||||
label_vector_row_num = (int)mxGetM(prhs[0]);
|
||||
label_vector_col_num = (int)mxGetN(prhs[0]);
|
||||
|
||||
if(label_vector_row_num!=testing_instance_number)
|
||||
{
|
||||
mexPrintf("Length of label vector does not match # of instances.\n");
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
if(label_vector_col_num!=1)
|
||||
{
|
||||
mexPrintf("label (1st argument) should be a vector (# of column is 1).\n");
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
ptr_instance = mxGetPr(prhs[1]);
|
||||
ptr_label = mxGetPr(prhs[0]);
|
||||
|
||||
// transpose instance matrix
|
||||
if(mxIsSparse(prhs[1]))
|
||||
{
|
||||
if(model->param.kernel_type == PRECOMPUTED)
|
||||
{
|
||||
// precomputed kernel requires dense matrix, so we make one
|
||||
mxArray *rhs[1], *lhs[1];
|
||||
rhs[0] = mxDuplicateArray(prhs[1]);
|
||||
if(mexCallMATLAB(1, lhs, 1, rhs, "full"))
|
||||
{
|
||||
mexPrintf("Error: cannot full testing instance matrix\n");
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
ptr_instance = mxGetPr(lhs[0]);
|
||||
mxDestroyArray(rhs[0]);
|
||||
}
|
||||
else
|
||||
{
|
||||
mxArray *pprhs[1];
|
||||
pprhs[0] = mxDuplicateArray(prhs[1]);
|
||||
if(mexCallMATLAB(1, pplhs, 1, pprhs, "transpose"))
|
||||
{
|
||||
mexPrintf("Error: cannot transpose testing instance matrix\n");
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(predict_probability)
|
||||
{
|
||||
if(svm_type==NU_SVR || svm_type==EPSILON_SVR)
|
||||
info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
|
||||
else
|
||||
prob_estimates = (double *) malloc(nr_class*sizeof(double));
|
||||
}
|
||||
|
||||
tplhs[0] = mxCreateDoubleMatrix(testing_instance_number, 1, mxREAL);
|
||||
if(predict_probability)
|
||||
{
|
||||
// prob estimates are in plhs[2]
|
||||
if(svm_type==C_SVC || svm_type==NU_SVC || svm_type==ONE_CLASS)
|
||||
{
|
||||
// nr_class = 2 for ONE_CLASS
|
||||
tplhs[2] = mxCreateDoubleMatrix(testing_instance_number, nr_class, mxREAL);
|
||||
}
|
||||
else
|
||||
tplhs[2] = mxCreateDoubleMatrix(0, 0, mxREAL);
|
||||
}
|
||||
else
|
||||
{
|
||||
// decision values are in plhs[2]
|
||||
if(svm_type == ONE_CLASS ||
|
||||
svm_type == EPSILON_SVR ||
|
||||
svm_type == NU_SVR ||
|
||||
nr_class == 1) // if only one class in training data, decision values are still returned.
|
||||
tplhs[2] = mxCreateDoubleMatrix(testing_instance_number, 1, mxREAL);
|
||||
else
|
||||
tplhs[2] = mxCreateDoubleMatrix(testing_instance_number, nr_class*(nr_class-1)/2, mxREAL);
|
||||
}
|
||||
|
||||
ptr_predict_label = mxGetPr(tplhs[0]);
|
||||
ptr_prob_estimates = mxGetPr(tplhs[2]);
|
||||
ptr_dec_values = mxGetPr(tplhs[2]);
|
||||
x = (struct svm_node*)malloc((feature_number+1)*sizeof(struct svm_node) );
|
||||
for(instance_index=0;instance_index<testing_instance_number;instance_index++)
|
||||
{
|
||||
int i;
|
||||
double target_label, predict_label;
|
||||
|
||||
target_label = ptr_label[instance_index];
|
||||
|
||||
if(mxIsSparse(prhs[1]) && model->param.kernel_type != PRECOMPUTED) // prhs[1]^T is still sparse
|
||||
read_sparse_instance(pplhs[0], instance_index, x);
|
||||
else
|
||||
{
|
||||
for(i=0;i<feature_number;i++)
|
||||
{
|
||||
x[i].index = i+1;
|
||||
x[i].value = ptr_instance[testing_instance_number*i+instance_index];
|
||||
}
|
||||
x[feature_number].index = -1;
|
||||
}
|
||||
|
||||
if(predict_probability)
|
||||
{
|
||||
if(svm_type==C_SVC || svm_type==NU_SVC || svm_type==ONE_CLASS)
|
||||
{
|
||||
predict_label = svm_predict_probability(model, x, prob_estimates);
|
||||
ptr_predict_label[instance_index] = predict_label;
|
||||
for(i=0;i<nr_class;i++)
|
||||
ptr_prob_estimates[instance_index + i * testing_instance_number] = prob_estimates[i];
|
||||
} else {
|
||||
predict_label = svm_predict(model,x);
|
||||
ptr_predict_label[instance_index] = predict_label;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if(svm_type == ONE_CLASS ||
|
||||
svm_type == EPSILON_SVR ||
|
||||
svm_type == NU_SVR)
|
||||
{
|
||||
double res;
|
||||
predict_label = svm_predict_values(model, x, &res);
|
||||
ptr_dec_values[instance_index] = res;
|
||||
}
|
||||
else
|
||||
{
|
||||
double *dec_values = (double *) malloc(sizeof(double) * nr_class*(nr_class-1)/2);
|
||||
predict_label = svm_predict_values(model, x, dec_values);
|
||||
if(nr_class == 1)
|
||||
ptr_dec_values[instance_index] = 1;
|
||||
else
|
||||
for(i=0;i<(nr_class*(nr_class-1))/2;i++)
|
||||
ptr_dec_values[instance_index + i * testing_instance_number] = dec_values[i];
|
||||
free(dec_values);
|
||||
}
|
||||
ptr_predict_label[instance_index] = predict_label;
|
||||
}
|
||||
|
||||
if(predict_label == target_label)
|
||||
++correct;
|
||||
error += (predict_label-target_label)*(predict_label-target_label);
|
||||
sump += predict_label;
|
||||
sumt += target_label;
|
||||
sumpp += predict_label*predict_label;
|
||||
sumtt += target_label*target_label;
|
||||
sumpt += predict_label*target_label;
|
||||
++total;
|
||||
}
|
||||
if(svm_type==NU_SVR || svm_type==EPSILON_SVR)
|
||||
{
|
||||
info("Mean squared error = %g (regression)\n",error/total);
|
||||
info("Squared correlation coefficient = %g (regression)\n",
|
||||
((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
|
||||
((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt))
|
||||
);
|
||||
}
|
||||
else
|
||||
info("Accuracy = %g%% (%d/%d) (classification)\n",
|
||||
(double)correct/total*100,correct,total);
|
||||
|
||||
// return accuracy, mean squared error, squared correlation coefficient
|
||||
tplhs[1] = mxCreateDoubleMatrix(3, 1, mxREAL);
|
||||
ptr = mxGetPr(tplhs[1]);
|
||||
ptr[0] = (double)correct/total*100;
|
||||
ptr[1] = error/total;
|
||||
ptr[2] = ((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
|
||||
((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt));
|
||||
|
||||
free(x);
|
||||
if(prob_estimates != NULL)
|
||||
free(prob_estimates);
|
||||
|
||||
switch(nlhs)
|
||||
{
|
||||
case 3:
|
||||
plhs[2] = tplhs[2];
|
||||
plhs[1] = tplhs[1];
|
||||
case 1:
|
||||
case 0:
|
||||
plhs[0] = tplhs[0];
|
||||
}
|
||||
}
|
||||
|
||||
void exit_with_help()
|
||||
{
|
||||
mexPrintf(
|
||||
"Usage: [predicted_label, accuracy, decision_values/prob_estimates] = svmpredict(testing_label_vector, testing_instance_matrix, model, 'libsvm_options')\n"
|
||||
" [predicted_label] = svmpredict(testing_label_vector, testing_instance_matrix, model, 'libsvm_options')\n"
|
||||
"Parameters:\n"
|
||||
" model: SVM model structure from svmtrain.\n"
|
||||
" libsvm_options:\n"
|
||||
" -b probability_estimates: whether to predict probability estimates, 0 or 1 (default 0); one-class SVM not supported yet\n"
|
||||
" -q : quiet mode (no outputs)\n"
|
||||
"Returns:\n"
|
||||
" predicted_label: SVM prediction output vector.\n"
|
||||
" accuracy: a vector with accuracy, mean squared error, squared correlation coefficient.\n"
|
||||
" prob_estimates: If selected, probability estimate vector.\n"
|
||||
);
|
||||
}
|
||||
|
||||
void mexFunction( int nlhs, mxArray *plhs[],
|
||||
int nrhs, const mxArray *prhs[] )
|
||||
{
|
||||
int prob_estimate_flag = 0;
|
||||
struct svm_model *model;
|
||||
info = &mexPrintf;
|
||||
|
||||
if(nlhs == 2 || nlhs > 3 || nrhs > 4 || nrhs < 3)
|
||||
{
|
||||
exit_with_help();
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
if(!mxIsDouble(prhs[0]) || !mxIsDouble(prhs[1])) {
|
||||
mexPrintf("Error: label vector and instance matrix must be double\n");
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
if(mxIsStruct(prhs[2]))
|
||||
{
|
||||
const char *error_msg;
|
||||
|
||||
// parse options
|
||||
if(nrhs==4)
|
||||
{
|
||||
int i, argc = 1;
|
||||
char cmd[CMD_LEN], *argv[CMD_LEN/2];
|
||||
|
||||
// put options in argv[]
|
||||
mxGetString(prhs[3], cmd, mxGetN(prhs[3]) + 1);
|
||||
if((argv[argc] = strtok(cmd, " ")) != NULL)
|
||||
while((argv[++argc] = strtok(NULL, " ")) != NULL)
|
||||
;
|
||||
|
||||
for(i=1;i<argc;i++)
|
||||
{
|
||||
if(argv[i][0] != '-') break;
|
||||
if((++i>=argc) && argv[i-1][1] != 'q')
|
||||
{
|
||||
exit_with_help();
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
switch(argv[i-1][1])
|
||||
{
|
||||
case 'b':
|
||||
prob_estimate_flag = atoi(argv[i]);
|
||||
break;
|
||||
case 'q':
|
||||
i--;
|
||||
info = &print_null;
|
||||
break;
|
||||
default:
|
||||
mexPrintf("Unknown option: -%c\n", argv[i-1][1]);
|
||||
exit_with_help();
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
model = matlab_matrix_to_model(prhs[2], &error_msg);
|
||||
if (model == NULL)
|
||||
{
|
||||
mexPrintf("Error: can't read model: %s\n", error_msg);
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
if(prob_estimate_flag)
|
||||
{
|
||||
if(svm_check_probability_model(model)==0)
|
||||
{
|
||||
mexPrintf("Model does not support probabiliy estimates\n");
|
||||
fake_answer(nlhs, plhs);
|
||||
svm_free_and_destroy_model(&model);
|
||||
return;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if(svm_check_probability_model(model)!=0)
|
||||
info("Model supports probability estimates, but disabled in predicton.\n");
|
||||
}
|
||||
|
||||
predict(nlhs, plhs, prhs, model, prob_estimate_flag);
|
||||
// destroy model
|
||||
svm_free_and_destroy_model(&model);
|
||||
}
|
||||
else
|
||||
{
|
||||
mexPrintf("model file should be a struct array\n");
|
||||
fake_answer(nlhs, plhs);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
495
libsvm-3.36/matlab/svmtrain.c
Normal file
495
libsvm-3.36/matlab/svmtrain.c
Normal file
@@ -0,0 +1,495 @@
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
#include "svm.h"
|
||||
|
||||
#include "mex.h"
|
||||
#include "svm_model_matlab.h"
|
||||
|
||||
#ifdef MX_API_VER
|
||||
#if MX_API_VER < 0x07030000
|
||||
typedef int mwIndex;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#define CMD_LEN 2048
|
||||
#define Malloc(type,n) (type *)malloc((n)*sizeof(type))
|
||||
|
||||
void print_null(const char *s) {}
|
||||
void print_string_matlab(const char *s) {mexPrintf(s);}
|
||||
|
||||
void exit_with_help()
|
||||
{
|
||||
mexPrintf(
|
||||
"Usage: model = svmtrain(training_label_vector, training_instance_matrix, 'libsvm_options');\n"
|
||||
"libsvm_options:\n"
|
||||
"-s svm_type : set type of SVM (default 0)\n"
|
||||
" 0 -- C-SVC (multi-class classification)\n"
|
||||
" 1 -- nu-SVC (multi-class classification)\n"
|
||||
" 2 -- one-class SVM\n"
|
||||
" 3 -- epsilon-SVR (regression)\n"
|
||||
" 4 -- nu-SVR (regression)\n"
|
||||
"-t kernel_type : set type of kernel function (default 2)\n"
|
||||
" 0 -- linear: u'*v\n"
|
||||
" 1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
|
||||
" 2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
|
||||
" 3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
|
||||
" 4 -- precomputed kernel (kernel values in training_instance_matrix)\n"
|
||||
"-d degree : set degree in kernel function (default 3)\n"
|
||||
"-g gamma : set gamma in kernel function (default 1/num_features)\n"
|
||||
"-r coef0 : set coef0 in kernel function (default 0)\n"
|
||||
"-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
|
||||
"-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
|
||||
"-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
|
||||
"-m cachesize : set cache memory size in MB (default 100)\n"
|
||||
"-e epsilon : set tolerance of termination criterion (default 0.001)\n"
|
||||
"-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
|
||||
"-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
|
||||
"-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
|
||||
"-v n: n-fold cross validation mode\n"
|
||||
"-q : quiet mode (no outputs)\n"
|
||||
);
|
||||
}
|
||||
|
||||
// svm arguments
|
||||
struct svm_parameter param; // set by parse_command_line
|
||||
struct svm_problem prob; // set by read_problem
|
||||
struct svm_model *model;
|
||||
struct svm_node *x_space;
|
||||
int cross_validation;
|
||||
int nr_fold;
|
||||
|
||||
|
||||
double do_cross_validation()
|
||||
{
|
||||
int i;
|
||||
int total_correct = 0;
|
||||
double total_error = 0;
|
||||
double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
|
||||
double *target = Malloc(double,prob.l);
|
||||
double retval = 0.0;
|
||||
|
||||
svm_cross_validation(&prob,¶m,nr_fold,target);
|
||||
if(param.svm_type == EPSILON_SVR ||
|
||||
param.svm_type == NU_SVR)
|
||||
{
|
||||
for(i=0;i<prob.l;i++)
|
||||
{
|
||||
double y = prob.y[i];
|
||||
double v = target[i];
|
||||
total_error += (v-y)*(v-y);
|
||||
sumv += v;
|
||||
sumy += y;
|
||||
sumvv += v*v;
|
||||
sumyy += y*y;
|
||||
sumvy += v*y;
|
||||
}
|
||||
mexPrintf("Cross Validation Mean squared error = %g\n",total_error/prob.l);
|
||||
mexPrintf("Cross Validation Squared correlation coefficient = %g\n",
|
||||
((prob.l*sumvy-sumv*sumy)*(prob.l*sumvy-sumv*sumy))/
|
||||
((prob.l*sumvv-sumv*sumv)*(prob.l*sumyy-sumy*sumy))
|
||||
);
|
||||
retval = total_error/prob.l;
|
||||
}
|
||||
else
|
||||
{
|
||||
for(i=0;i<prob.l;i++)
|
||||
if(target[i] == prob.y[i])
|
||||
++total_correct;
|
||||
mexPrintf("Cross Validation Accuracy = %g%%\n",100.0*total_correct/prob.l);
|
||||
retval = 100.0*total_correct/prob.l;
|
||||
}
|
||||
free(target);
|
||||
return retval;
|
||||
}
|
||||
|
||||
// nrhs should be 3
|
||||
int parse_command_line(int nrhs, const mxArray *prhs[], char *model_file_name)
|
||||
{
|
||||
int i, argc = 1;
|
||||
char cmd[CMD_LEN];
|
||||
char *argv[CMD_LEN/2];
|
||||
void (*print_func)(const char *) = print_string_matlab; // default printing to matlab display
|
||||
|
||||
// default values
|
||||
param.svm_type = C_SVC;
|
||||
param.kernel_type = RBF;
|
||||
param.degree = 3;
|
||||
param.gamma = 0; // 1/num_features
|
||||
param.coef0 = 0;
|
||||
param.nu = 0.5;
|
||||
param.cache_size = 100;
|
||||
param.C = 1;
|
||||
param.eps = 1e-3;
|
||||
param.p = 0.1;
|
||||
param.shrinking = 1;
|
||||
param.probability = 0;
|
||||
param.nr_weight = 0;
|
||||
param.weight_label = NULL;
|
||||
param.weight = NULL;
|
||||
cross_validation = 0;
|
||||
|
||||
if(nrhs <= 1)
|
||||
return 1;
|
||||
|
||||
if(nrhs > 2)
|
||||
{
|
||||
// put options in argv[]
|
||||
mxGetString(prhs[2], cmd, mxGetN(prhs[2]) + 1);
|
||||
if((argv[argc] = strtok(cmd, " ")) != NULL)
|
||||
while((argv[++argc] = strtok(NULL, " ")) != NULL)
|
||||
;
|
||||
}
|
||||
|
||||
// parse options
|
||||
for(i=1;i<argc;i++)
|
||||
{
|
||||
if(argv[i][0] != '-') break;
|
||||
++i;
|
||||
if(i>=argc && argv[i-1][1] != 'q') // since option -q has no parameter
|
||||
return 1;
|
||||
switch(argv[i-1][1])
|
||||
{
|
||||
case 's':
|
||||
param.svm_type = atoi(argv[i]);
|
||||
break;
|
||||
case 't':
|
||||
param.kernel_type = atoi(argv[i]);
|
||||
break;
|
||||
case 'd':
|
||||
param.degree = atoi(argv[i]);
|
||||
break;
|
||||
case 'g':
|
||||
param.gamma = atof(argv[i]);
|
||||
break;
|
||||
case 'r':
|
||||
param.coef0 = atof(argv[i]);
|
||||
break;
|
||||
case 'n':
|
||||
param.nu = atof(argv[i]);
|
||||
break;
|
||||
case 'm':
|
||||
param.cache_size = atof(argv[i]);
|
||||
break;
|
||||
case 'c':
|
||||
param.C = atof(argv[i]);
|
||||
break;
|
||||
case 'e':
|
||||
param.eps = atof(argv[i]);
|
||||
break;
|
||||
case 'p':
|
||||
param.p = atof(argv[i]);
|
||||
break;
|
||||
case 'h':
|
||||
param.shrinking = atoi(argv[i]);
|
||||
break;
|
||||
case 'b':
|
||||
param.probability = atoi(argv[i]);
|
||||
break;
|
||||
case 'q':
|
||||
print_func = &print_null;
|
||||
i--;
|
||||
break;
|
||||
case 'v':
|
||||
cross_validation = 1;
|
||||
nr_fold = atoi(argv[i]);
|
||||
if(nr_fold < 2)
|
||||
{
|
||||
mexPrintf("n-fold cross validation: n must >= 2\n");
|
||||
return 1;
|
||||
}
|
||||
break;
|
||||
case 'w':
|
||||
++param.nr_weight;
|
||||
param.weight_label = (int *)realloc(param.weight_label,sizeof(int)*param.nr_weight);
|
||||
param.weight = (double *)realloc(param.weight,sizeof(double)*param.nr_weight);
|
||||
param.weight_label[param.nr_weight-1] = atoi(&argv[i-1][2]);
|
||||
param.weight[param.nr_weight-1] = atof(argv[i]);
|
||||
break;
|
||||
default:
|
||||
mexPrintf("Unknown option -%c\n", argv[i-1][1]);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
svm_set_print_string_function(print_func);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// read in a problem (in svmlight format)
|
||||
int read_problem_dense(const mxArray *label_vec, const mxArray *instance_mat)
|
||||
{
|
||||
// using size_t due to the output type of matlab functions
|
||||
size_t i, j, k, l;
|
||||
size_t elements, max_index, sc, label_vector_row_num;
|
||||
double *samples, *labels;
|
||||
|
||||
prob.x = NULL;
|
||||
prob.y = NULL;
|
||||
x_space = NULL;
|
||||
|
||||
labels = mxGetPr(label_vec);
|
||||
samples = mxGetPr(instance_mat);
|
||||
sc = mxGetN(instance_mat);
|
||||
|
||||
elements = 0;
|
||||
// number of instances
|
||||
l = mxGetM(instance_mat);
|
||||
label_vector_row_num = mxGetM(label_vec);
|
||||
prob.l = (int)l;
|
||||
|
||||
if(label_vector_row_num!=l)
|
||||
{
|
||||
mexPrintf("Length of label vector does not match # of instances.\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(param.kernel_type == PRECOMPUTED)
|
||||
elements = l * (sc + 1);
|
||||
else
|
||||
{
|
||||
for(i = 0; i < l; i++)
|
||||
{
|
||||
for(k = 0; k < sc; k++)
|
||||
if(samples[k * l + i] != 0)
|
||||
elements++;
|
||||
// count the '-1' element
|
||||
elements++;
|
||||
}
|
||||
}
|
||||
|
||||
prob.y = Malloc(double,l);
|
||||
prob.x = Malloc(struct svm_node *,l);
|
||||
x_space = Malloc(struct svm_node, elements);
|
||||
|
||||
max_index = sc;
|
||||
j = 0;
|
||||
for(i = 0; i < l; i++)
|
||||
{
|
||||
prob.x[i] = &x_space[j];
|
||||
prob.y[i] = labels[i];
|
||||
|
||||
for(k = 0; k < sc; k++)
|
||||
{
|
||||
if(param.kernel_type == PRECOMPUTED || samples[k * l + i] != 0)
|
||||
{
|
||||
x_space[j].index = (int)k + 1;
|
||||
x_space[j].value = samples[k * l + i];
|
||||
j++;
|
||||
}
|
||||
}
|
||||
x_space[j++].index = -1;
|
||||
}
|
||||
|
||||
if(param.gamma == 0 && max_index > 0)
|
||||
param.gamma = (double)(1.0/max_index);
|
||||
|
||||
if(param.kernel_type == PRECOMPUTED)
|
||||
for(i=0;i<l;i++)
|
||||
{
|
||||
if((int)prob.x[i][0].value <= 0 || (int)prob.x[i][0].value > (int)max_index)
|
||||
{
|
||||
mexPrintf("Wrong input format: sample_serial_number out of range\n");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int read_problem_sparse(const mxArray *label_vec, const mxArray *instance_mat)
|
||||
{
|
||||
mwIndex *ir, *jc, low, high, k;
|
||||
// using size_t due to the output type of matlab functions
|
||||
size_t i, j, l, elements, max_index, label_vector_row_num;
|
||||
mwSize num_samples;
|
||||
double *samples, *labels;
|
||||
mxArray *instance_mat_col; // transposed instance sparse matrix
|
||||
|
||||
prob.x = NULL;
|
||||
prob.y = NULL;
|
||||
x_space = NULL;
|
||||
|
||||
// transpose instance matrix
|
||||
{
|
||||
mxArray *prhs[1], *plhs[1];
|
||||
prhs[0] = mxDuplicateArray(instance_mat);
|
||||
if(mexCallMATLAB(1, plhs, 1, prhs, "transpose"))
|
||||
{
|
||||
mexPrintf("Error: cannot transpose training instance matrix\n");
|
||||
return -1;
|
||||
}
|
||||
instance_mat_col = plhs[0];
|
||||
mxDestroyArray(prhs[0]);
|
||||
}
|
||||
|
||||
// each column is one instance
|
||||
labels = mxGetPr(label_vec);
|
||||
samples = mxGetPr(instance_mat_col);
|
||||
ir = mxGetIr(instance_mat_col);
|
||||
jc = mxGetJc(instance_mat_col);
|
||||
|
||||
num_samples = mxGetNzmax(instance_mat_col);
|
||||
|
||||
// number of instances
|
||||
l = mxGetN(instance_mat_col);
|
||||
label_vector_row_num = mxGetM(label_vec);
|
||||
prob.l = (int) l;
|
||||
|
||||
if(label_vector_row_num!=l)
|
||||
{
|
||||
mexPrintf("Length of label vector does not match # of instances.\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
elements = num_samples + l;
|
||||
max_index = mxGetM(instance_mat_col);
|
||||
|
||||
prob.y = Malloc(double,l);
|
||||
prob.x = Malloc(struct svm_node *,l);
|
||||
x_space = Malloc(struct svm_node, elements);
|
||||
|
||||
j = 0;
|
||||
for(i=0;i<l;i++)
|
||||
{
|
||||
prob.x[i] = &x_space[j];
|
||||
prob.y[i] = labels[i];
|
||||
low = jc[i], high = jc[i+1];
|
||||
for(k=low;k<high;k++)
|
||||
{
|
||||
x_space[j].index = (int)ir[k] + 1;
|
||||
x_space[j].value = samples[k];
|
||||
j++;
|
||||
}
|
||||
x_space[j++].index = -1;
|
||||
}
|
||||
|
||||
if(param.gamma == 0 && max_index > 0)
|
||||
param.gamma = (double)(1.0/max_index);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void fake_answer(int nlhs, mxArray *plhs[])
|
||||
{
|
||||
int i;
|
||||
for(i=0;i<nlhs;i++)
|
||||
plhs[i] = mxCreateDoubleMatrix(0, 0, mxREAL);
|
||||
}
|
||||
|
||||
// Interface function of matlab
|
||||
// now assume prhs[0]: label prhs[1]: features
|
||||
void mexFunction( int nlhs, mxArray *plhs[],
|
||||
int nrhs, const mxArray *prhs[] )
|
||||
{
|
||||
const char *error_msg;
|
||||
|
||||
// fix random seed to have same results for each run
|
||||
// (for cross validation and probability estimation)
|
||||
srand(1);
|
||||
|
||||
if(nlhs > 1)
|
||||
{
|
||||
exit_with_help();
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
// Transform the input Matrix to libsvm format
|
||||
if(nrhs > 1 && nrhs < 4)
|
||||
{
|
||||
int err;
|
||||
|
||||
if(!mxIsDouble(prhs[0]) || !mxIsDouble(prhs[1]))
|
||||
{
|
||||
mexPrintf("Error: label vector and instance matrix must be double\n");
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
if(mxIsSparse(prhs[0]))
|
||||
{
|
||||
mexPrintf("Error: label vector should not be in sparse format\n");
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
if(parse_command_line(nrhs, prhs, NULL))
|
||||
{
|
||||
exit_with_help();
|
||||
svm_destroy_param(¶m);
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
if(mxIsSparse(prhs[1]))
|
||||
{
|
||||
if(param.kernel_type == PRECOMPUTED)
|
||||
{
|
||||
// precomputed kernel requires dense matrix, so we make one
|
||||
mxArray *rhs[1], *lhs[1];
|
||||
|
||||
rhs[0] = mxDuplicateArray(prhs[1]);
|
||||
if(mexCallMATLAB(1, lhs, 1, rhs, "full"))
|
||||
{
|
||||
mexPrintf("Error: cannot generate a full training instance matrix\n");
|
||||
svm_destroy_param(¶m);
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
err = read_problem_dense(prhs[0], lhs[0]);
|
||||
mxDestroyArray(lhs[0]);
|
||||
mxDestroyArray(rhs[0]);
|
||||
}
|
||||
else
|
||||
err = read_problem_sparse(prhs[0], prhs[1]);
|
||||
}
|
||||
else
|
||||
err = read_problem_dense(prhs[0], prhs[1]);
|
||||
|
||||
// svmtrain's original code
|
||||
error_msg = svm_check_parameter(&prob, ¶m);
|
||||
|
||||
if(err || error_msg)
|
||||
{
|
||||
if (error_msg != NULL)
|
||||
mexPrintf("Error: %s\n", error_msg);
|
||||
svm_destroy_param(¶m);
|
||||
free(prob.y);
|
||||
free(prob.x);
|
||||
free(x_space);
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
|
||||
if(cross_validation)
|
||||
{
|
||||
double *ptr;
|
||||
plhs[0] = mxCreateDoubleMatrix(1, 1, mxREAL);
|
||||
ptr = mxGetPr(plhs[0]);
|
||||
ptr[0] = do_cross_validation();
|
||||
}
|
||||
else
|
||||
{
|
||||
int nr_feat = (int)mxGetN(prhs[1]);
|
||||
const char *error_msg;
|
||||
model = svm_train(&prob, ¶m);
|
||||
error_msg = model_to_matlab_structure(plhs, nr_feat, model);
|
||||
if(error_msg)
|
||||
mexPrintf("Error: can't convert libsvm model to matrix structure: %s\n", error_msg);
|
||||
svm_free_and_destroy_model(&model);
|
||||
}
|
||||
svm_destroy_param(¶m);
|
||||
free(prob.y);
|
||||
free(prob.x);
|
||||
free(x_space);
|
||||
}
|
||||
else
|
||||
{
|
||||
exit_with_help();
|
||||
fake_answer(nlhs, plhs);
|
||||
return;
|
||||
}
|
||||
}
|
2
libsvm-3.36/python/MANIFEST.in
Normal file
2
libsvm-3.36/python/MANIFEST.in
Normal file
@@ -0,0 +1,2 @@
|
||||
include cpp-source/*
|
||||
include cpp-source/*/*
|
4
libsvm-3.36/python/Makefile
Normal file
4
libsvm-3.36/python/Makefile
Normal file
@@ -0,0 +1,4 @@
|
||||
all = lib
|
||||
|
||||
lib:
|
||||
make -C .. lib
|
511
libsvm-3.36/python/README
Normal file
511
libsvm-3.36/python/README
Normal file
@@ -0,0 +1,511 @@
|
||||
----------------------------------
|
||||
--- Python interface of LIBSVM ---
|
||||
----------------------------------
|
||||
|
||||
Table of Contents
|
||||
=================
|
||||
|
||||
- Introduction
|
||||
- Installation via PyPI
|
||||
- Installation via Sources
|
||||
- Quick Start
|
||||
- Quick Start with Scipy
|
||||
- Design Description
|
||||
- Data Structures
|
||||
- Utility Functions
|
||||
- Additional Information
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
Python (http://www.python.org/) is a programming language suitable for rapid
|
||||
development. This tool provides a simple Python interface to LIBSVM, a library
|
||||
for support vector machines (http://www.csie.ntu.edu.tw/~cjlin/libsvm). The
|
||||
interface is very easy to use as the usage is the same as that of LIBSVM. The
|
||||
interface is developed with the built-in Python library "ctypes."
|
||||
|
||||
Installation via PyPI
|
||||
=====================
|
||||
|
||||
To install the interface from PyPI, execute the following command:
|
||||
|
||||
> pip install -U libsvm-official
|
||||
|
||||
Installation via Sources
|
||||
========================
|
||||
|
||||
Alternatively, you may install the interface from sources by
|
||||
generating the LIBSVM shared library.
|
||||
|
||||
Depending on your use cases, you can choose between local-directory
|
||||
and system-wide installation.
|
||||
|
||||
- Local-directory installation:
|
||||
|
||||
On Unix systems, type
|
||||
|
||||
> make
|
||||
|
||||
This generates a .so file in the LIBSVM main directory and you
|
||||
can run the interface in the current python directory.
|
||||
|
||||
For Windows, the shared library libsvm.dll is ready in the
|
||||
directory `..\windows' and you can directly run the interface in
|
||||
the current python directory. You can copy libsvm.dll to the
|
||||
system directory (e.g., `C:\WINDOWS\system32\') to make it
|
||||
system-widely available. To regenerate libsvm.dll, please
|
||||
follow the instruction of building Windows binaries in LIBSVM
|
||||
README.
|
||||
|
||||
- System-wide installation:
|
||||
|
||||
Type
|
||||
|
||||
> pip install -e .
|
||||
|
||||
or
|
||||
|
||||
> pip install --user -e .
|
||||
|
||||
The option --user would install the package in the home directory
|
||||
instead of the system directory, and thus does not require the
|
||||
root privilege.
|
||||
|
||||
Please note that you must keep the sources after the installation.
|
||||
|
||||
For Windows, to run the above command, Microsoft Visual C++ and
|
||||
other tools are needed.
|
||||
|
||||
In addition, DON'T use the following FAILED commands
|
||||
|
||||
> python setup.py install (failed to run at the python directory)
|
||||
> pip install .
|
||||
|
||||
Quick Start
|
||||
===========
|
||||
|
||||
"Quick Start with Scipy" is in the next section.
|
||||
|
||||
There are two levels of usage. The high-level one uses utility
|
||||
functions in svmutil.py and commonutil.py (shared with LIBLINEAR and
|
||||
imported by svmutil.py). The usage is the same as the LIBSVM MATLAB
|
||||
interface.
|
||||
|
||||
>>> from libsvm.svmutil import *
|
||||
# Read data in LIBSVM format
|
||||
>>> y, x = svm_read_problem('../heart_scale')
|
||||
>>> m = svm_train(y[:200], x[:200], '-c 4')
|
||||
>>> p_label, p_acc, p_val = svm_predict(y[200:], x[200:], m)
|
||||
|
||||
# Construct problem in python format
|
||||
# Dense data
|
||||
>>> y, x = [1,-1], [[1,0,1], [-1,0,-1]]
|
||||
# Sparse data
|
||||
>>> y, x = [1,-1], [{1:1, 3:1}, {1:-1,3:-1}]
|
||||
>>> prob = svm_problem(y, x)
|
||||
>>> param = svm_parameter('-t 0 -c 4 -b 1')
|
||||
>>> m = svm_train(prob, param)
|
||||
|
||||
# Precomputed kernel data (-t 4)
|
||||
# Dense data
|
||||
>>> y, x = [1,-1], [[1, 2, -2], [2, -2, 2]]
|
||||
# Sparse data
|
||||
>>> y, x = [1,-1], [{0:1, 1:2, 2:-2}, {0:2, 1:-2, 2:2}]
|
||||
# isKernel=True must be set for precomputed kernel
|
||||
>>> prob = svm_problem(y, x, isKernel=True)
|
||||
>>> param = svm_parameter('-t 4 -c 4 -b 1')
|
||||
>>> m = svm_train(prob, param)
|
||||
# For the format of precomputed kernel, please read LIBSVM README.
|
||||
|
||||
|
||||
# Other utility functions
|
||||
>>> svm_save_model('heart_scale.model', m)
|
||||
>>> m = svm_load_model('heart_scale.model')
|
||||
>>> p_label, p_acc, p_val = svm_predict(y, x, m, '-b 1')
|
||||
>>> ACC, MSE, SCC = evaluations(y, p_label)
|
||||
|
||||
# Getting online help
|
||||
>>> help(svm_train)
|
||||
|
||||
The low-level use directly calls C interfaces imported by svm.py. Note that
|
||||
all arguments and return values are in ctypes format. You need to handle them
|
||||
carefully.
|
||||
|
||||
>>> from libsvm.svm import *
|
||||
>>> prob = svm_problem([1,-1], [{1:1, 3:1}, {1:-1,3:-1}])
|
||||
>>> param = svm_parameter('-c 4')
|
||||
>>> m = libsvm.svm_train(prob, param) # m is a ctype pointer to an svm_model
|
||||
# Convert a Python-format instance to svm_nodearray, a ctypes structure
|
||||
>>> x0, max_idx = gen_svm_nodearray({1:1, 3:1})
|
||||
>>> label = libsvm.svm_predict(m, x0)
|
||||
|
||||
Quick Start with Scipy
|
||||
======================
|
||||
|
||||
Make sure you have Scipy installed to proceed in this section.
|
||||
If numba (http://numba.pydata.org) is installed, some operations will be much faster.
|
||||
|
||||
There are two levels of usage. The high-level one uses utility functions
|
||||
in svmutil.py and the usage is the same as the LIBSVM MATLAB interface.
|
||||
|
||||
>>> import numpy as np
|
||||
>>> import scipy
|
||||
>>> from libsvm.svmutil import *
|
||||
# Read data in LIBSVM format
|
||||
>>> y, x = svm_read_problem('../heart_scale', return_scipy = True) # y: ndarray, x: csr_matrix
|
||||
>>> m = svm_train(y[:200], x[:200, :], '-c 4')
|
||||
>>> p_label, p_acc, p_val = svm_predict(y[200:], x[200:, :], m)
|
||||
|
||||
# Construct problem in Scipy format
|
||||
# Dense data: numpy ndarray
|
||||
>>> y, x = np.asarray([1,-1]), np.asarray([[1,0,1], [-1,0,-1]])
|
||||
# Sparse data: scipy csr_matrix((data, (row_ind, col_ind))
|
||||
>>> y, x = np.asarray([1,-1]), scipy.sparse.csr_matrix(([1, 1, -1, -1], ([0, 0, 1, 1], [0, 2, 0, 2])))
|
||||
>>> prob = svm_problem(y, x)
|
||||
>>> param = svm_parameter('-t 0 -c 4 -b 1')
|
||||
>>> m = svm_train(prob, param)
|
||||
|
||||
# Precomputed kernel data (-t 4)
|
||||
# Dense data: numpy ndarray
|
||||
>>> y, x = np.asarray([1,-1]), np.asarray([[1,2,-2], [2,-2,2]])
|
||||
# Sparse data: scipy csr_matrix((data, (row_ind, col_ind))
|
||||
>>> y, x = np.asarray([1,-1]), scipy.sparse.csr_matrix(([1, 2, -2, 2, -2, 2], ([0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 1, 2])))
|
||||
# isKernel=True must be set for precomputed kernel
|
||||
>>> prob = svm_problem(y, x, isKernel=True)
|
||||
>>> param = svm_parameter('-t 4 -c 4 -b 1')
|
||||
>>> m = svm_train(prob, param)
|
||||
# For the format of precomputed kernel, please read LIBSVM README.
|
||||
|
||||
# Apply data scaling in Scipy format
|
||||
>>> y, x = svm_read_problem('../heart_scale', return_scipy=True)
|
||||
>>> scale_param = csr_find_scale_param(x, lower=0)
|
||||
>>> scaled_x = csr_scale(x, scale_param)
|
||||
|
||||
# Other utility functions
|
||||
>>> svm_save_model('heart_scale.model', m)
|
||||
>>> m = svm_load_model('heart_scale.model')
|
||||
>>> p_label, p_acc, p_val = svm_predict(y, x, m, '-b 1')
|
||||
>>> ACC, MSE, SCC = evaluations(y, p_label)
|
||||
|
||||
# Getting online help
|
||||
>>> help(svm_train)
|
||||
|
||||
The low-level use directly calls C interfaces imported by svm.py. Note that
|
||||
all arguments and return values are in ctypes format. You need to handle them
|
||||
carefully.
|
||||
|
||||
>>> from libsvm.svm import *
|
||||
>>> prob = svm_problem(np.asarray([1,-1]), scipy.sparse.csr_matrix(([1, 1, -1, -1], ([0, 0, 1, 1], [0, 2, 0, 2]))))
|
||||
>>> param = svm_parameter('-c 4')
|
||||
>>> m = libsvm.svm_train(prob, param) # m is a ctype pointer to an svm_model
|
||||
# Convert a tuple of ndarray (index, data) to feature_nodearray, a ctypes structure
|
||||
# Note that index starts from 0, though the following example will be changed to 1:1, 3:1 internally
|
||||
>>> x0, max_idx = gen_svm_nodearray((np.asarray([0,2]), np.asarray([1,1])))
|
||||
>>> label = libsvm.svm_predict(m, x0)
|
||||
|
||||
Design Description
|
||||
==================
|
||||
|
||||
There are two files svm.py and svmutil.py, which respectively correspond to
|
||||
low-level and high-level use of the interface.
|
||||
|
||||
In svm.py, we adopt the Python built-in library "ctypes," so that
|
||||
Python can directly access C structures and interface functions defined
|
||||
in svm.h.
|
||||
|
||||
While advanced users can use structures/functions in svm.py, to
|
||||
avoid handling ctypes structures, in svmutil.py we provide some easy-to-use
|
||||
functions. The usage is similar to LIBSVM MATLAB interface.
|
||||
|
||||
Data Structures
|
||||
===============
|
||||
|
||||
Four data structures derived from svm.h are svm_node, svm_problem, svm_parameter,
|
||||
and svm_model. They all contain fields with the same names in svm.h. Access
|
||||
these fields carefully because you directly use a C structure instead of a
|
||||
Python object. For svm_model, accessing the field directly is not recommanded.
|
||||
Programmers should use the interface functions or methods of svm_model class
|
||||
in Python to get the values. The following description introduces additional
|
||||
fields and methods.
|
||||
|
||||
Before using the data structures, execute the following command to load the
|
||||
LIBSVM shared library:
|
||||
|
||||
>>> from libsvm.svm import *
|
||||
|
||||
- class svm_node:
|
||||
|
||||
Construct an svm_node.
|
||||
|
||||
>>> node = svm_node(idx, val)
|
||||
|
||||
idx: an integer indicates the feature index.
|
||||
|
||||
val: a float indicates the feature value.
|
||||
|
||||
Show the index and the value of a node.
|
||||
|
||||
>>> print(node)
|
||||
|
||||
- Function: gen_svm_nodearray(xi [,feature_max=None [,isKernel=False]])
|
||||
|
||||
Generate a feature vector from a Python list/tuple/dictionary, numpy ndarray or tuple of (index, data):
|
||||
|
||||
>>> xi_ctype, max_idx = gen_svm_nodearray({1:1, 3:1, 5:-2})
|
||||
|
||||
xi_ctype: the returned svm_nodearray (a ctypes structure)
|
||||
|
||||
max_idx: the maximal feature index of xi
|
||||
|
||||
feature_max: if feature_max is assigned, features with indices larger than
|
||||
feature_max are removed.
|
||||
|
||||
isKernel: if isKernel == True, the list index starts from 0 for precomputed
|
||||
kernel. Otherwise, the list index starts from 1. The default
|
||||
value is False.
|
||||
|
||||
- class svm_problem:
|
||||
|
||||
Construct an svm_problem instance
|
||||
|
||||
>>> prob = svm_problem(y, x)
|
||||
|
||||
y: a Python list/tuple/ndarray of l labels (type must be int/double).
|
||||
|
||||
x: 1. a list/tuple of l training instances. Feature vector of
|
||||
each training instance is a list/tuple or dictionary.
|
||||
|
||||
2. an l * n numpy ndarray or scipy spmatrix (n: number of features).
|
||||
|
||||
Note that if your x contains sparse data (i.e., dictionary), the internal
|
||||
ctypes data format is still sparse.
|
||||
|
||||
For pre-computed kernel, the isKernel flag should be set to True:
|
||||
|
||||
>>> prob = svm_problem(y, x, isKernel=True)
|
||||
|
||||
Please read LIBSVM README for more details of pre-computed kernel.
|
||||
|
||||
- class svm_parameter:
|
||||
|
||||
Construct an svm_parameter instance
|
||||
|
||||
>>> param = svm_parameter('training_options')
|
||||
|
||||
If 'training_options' is empty, LIBSVM default values are applied.
|
||||
|
||||
Set param to LIBSVM default values.
|
||||
|
||||
>>> param.set_to_default_values()
|
||||
|
||||
Parse a string of options.
|
||||
|
||||
>>> param.parse_options('training_options')
|
||||
|
||||
Show values of parameters.
|
||||
|
||||
>>> print(param)
|
||||
|
||||
- class svm_model:
|
||||
|
||||
There are two ways to obtain an instance of svm_model:
|
||||
|
||||
>>> model = svm_train(y, x)
|
||||
>>> model = svm_load_model('model_file_name')
|
||||
|
||||
Note that the returned structure of interface functions
|
||||
libsvm.svm_train and libsvm.svm_load_model is a ctypes pointer of
|
||||
svm_model, which is different from the svm_model object returned
|
||||
by svm_train and svm_load_model in svmutil.py. We provide a
|
||||
function toPyModel for the conversion:
|
||||
|
||||
>>> model_ptr = libsvm.svm_train(prob, param)
|
||||
>>> model = toPyModel(model_ptr)
|
||||
|
||||
If you obtain a model in a way other than the above approaches,
|
||||
handle it carefully to avoid memory leak or segmentation fault.
|
||||
|
||||
Some interface functions to access LIBSVM models are wrapped as
|
||||
members of the class svm_model:
|
||||
|
||||
>>> svm_type = model.get_svm_type()
|
||||
>>> nr_class = model.get_nr_class()
|
||||
>>> svr_probability = model.get_svr_probability()
|
||||
>>> class_labels = model.get_labels()
|
||||
>>> sv_indices = model.get_sv_indices()
|
||||
>>> nr_sv = model.get_nr_sv()
|
||||
>>> is_prob_model = model.is_probability_model()
|
||||
>>> support_vector_coefficients = model.get_sv_coef()
|
||||
>>> support_vectors = model.get_SV()
|
||||
|
||||
Utility Functions
|
||||
=================
|
||||
|
||||
To use utility functions, type
|
||||
|
||||
>>> from libsvm.svmutil import *
|
||||
|
||||
The above command loads
|
||||
svm_train() : train an SVM model
|
||||
svm_predict() : predict testing data
|
||||
svm_read_problem() : read the data from a LIBSVM-format file or object.
|
||||
svm_load_model() : load a LIBSVM model.
|
||||
svm_save_model() : save model to a file.
|
||||
evaluations() : evaluate prediction results.
|
||||
csr_find_scale_param() : find scaling parameter for data in csr format.
|
||||
csr_scale() : apply data scaling to data in csr format.
|
||||
|
||||
- Function: svm_train
|
||||
|
||||
There are three ways to call svm_train()
|
||||
|
||||
>>> model = svm_train(y, x [, 'training_options'])
|
||||
>>> model = svm_train(prob [, 'training_options'])
|
||||
>>> model = svm_train(prob, param)
|
||||
|
||||
y: a list/tuple/ndarray of l training labels (type must be int/double).
|
||||
|
||||
x: 1. a list/tuple of l training instances. Feature vector of
|
||||
each training instance is a list/tuple or dictionary.
|
||||
|
||||
2. an l * n numpy ndarray or scipy spmatrix (n: number of features).
|
||||
|
||||
training_options: a string in the same form as that for LIBSVM command
|
||||
mode.
|
||||
|
||||
prob: an svm_problem instance generated by calling
|
||||
svm_problem(y, x).
|
||||
For pre-computed kernel, you should use
|
||||
svm_problem(y, x, isKernel=True)
|
||||
|
||||
param: an svm_parameter instance generated by calling
|
||||
svm_parameter('training_options')
|
||||
|
||||
model: the returned svm_model instance. See svm.h for details of this
|
||||
structure. If '-v' is specified, cross validation is
|
||||
conducted and the returned model is just a scalar: cross-validation
|
||||
accuracy for classification and mean-squared error for regression.
|
||||
|
||||
To train the same data many times with different
|
||||
parameters, the second and the third ways should be faster..
|
||||
|
||||
Examples:
|
||||
|
||||
>>> y, x = svm_read_problem('../heart_scale')
|
||||
>>> prob = svm_problem(y, x)
|
||||
>>> param = svm_parameter('-s 3 -c 5 -h 0')
|
||||
>>> m = svm_train(y, x, '-c 5')
|
||||
>>> m = svm_train(prob, '-t 2 -c 5')
|
||||
>>> m = svm_train(prob, param)
|
||||
>>> CV_ACC = svm_train(y, x, '-v 3')
|
||||
|
||||
- Function: svm_predict
|
||||
|
||||
To predict testing data with a model, use
|
||||
|
||||
>>> p_labs, p_acc, p_vals = svm_predict(y, x, model [,'predicting_options'])
|
||||
|
||||
y: a list/tuple/ndarray of l true labels (type must be int/double).
|
||||
It is used for calculating the accuracy. Use [] if true labels are
|
||||
unavailable.
|
||||
|
||||
x: 1. a list/tuple of l training instances. Feature vector of
|
||||
each training instance is a list/tuple or dictionary.
|
||||
|
||||
2. an l * n numpy ndarray or scipy spmatrix (n: number of features).
|
||||
|
||||
predicting_options: a string of predicting options in the same format as
|
||||
that of LIBSVM.
|
||||
|
||||
model: an svm_model instance.
|
||||
|
||||
p_labels: a list of predicted labels
|
||||
|
||||
p_acc: a tuple including accuracy (for classification), mean
|
||||
squared error, and squared correlation coefficient (for
|
||||
regression).
|
||||
|
||||
p_vals: a list of decision values or probability estimates (if '-b 1'
|
||||
is specified). If k is the number of classes in training data,
|
||||
for decision values, each element includes results of predicting
|
||||
k(k-1)/2 binary-class SVMs. For classification, k = 1 is a
|
||||
special case. Decision value [+1] is returned for each testing
|
||||
instance, instead of an empty list.
|
||||
For probabilities, each element contains k values indicating
|
||||
the probability that the testing instance is in each class.
|
||||
For one-class SVM, the list has two elements indicating the
|
||||
probabilities of normal instance/outlier.
|
||||
Note that the order of classes is the same as the 'model.label'
|
||||
field in the model structure.
|
||||
|
||||
Example:
|
||||
|
||||
>>> m = svm_train(y, x, '-c 5')
|
||||
>>> p_labels, p_acc, p_vals = svm_predict(y, x, m)
|
||||
|
||||
- Functions: svm_read_problem/svm_load_model/svm_save_model
|
||||
|
||||
See the usage by examples:
|
||||
|
||||
>>> y, x = svm_read_problem('data.txt')
|
||||
>>> with open('data.txt') as f:
|
||||
>>> y, x = svm_read_problem(f)
|
||||
>>> m = svm_load_model('model_file')
|
||||
>>> svm_save_model('model_file', m)
|
||||
|
||||
- Function: evaluations
|
||||
|
||||
Calculate some evaluations using the true values (ty) and the predicted
|
||||
values (pv):
|
||||
|
||||
>>> (ACC, MSE, SCC) = evaluations(ty, pv, useScipy)
|
||||
|
||||
ty: a list/tuple/ndarray of true values.
|
||||
|
||||
pv: a list/tuple/ndarray of predicted values.
|
||||
|
||||
useScipy: convert ty, pv to ndarray, and use scipy functions to do the evaluation
|
||||
|
||||
ACC: accuracy.
|
||||
|
||||
MSE: mean squared error.
|
||||
|
||||
SCC: squared correlation coefficient.
|
||||
|
||||
- Function: csr_find_scale_parameter/csr_scale
|
||||
|
||||
Scale data in csr format.
|
||||
|
||||
>>> param = csr_find_scale_param(x [, lower=l, upper=u])
|
||||
>>> x = csr_scale(x, param)
|
||||
|
||||
x: a csr_matrix of data.
|
||||
|
||||
l: x scaling lower limit; default -1.
|
||||
|
||||
u: x scaling upper limit; default 1.
|
||||
|
||||
The scaling process is: x * diag(coef) + ones(l, 1) * offset'
|
||||
|
||||
param: a dictionary of scaling parameters, where param['coef'] = coef and param['offset'] = offset.
|
||||
|
||||
coef: a scipy array of scaling coefficients.
|
||||
|
||||
offset: a scipy array of scaling offsets.
|
||||
|
||||
Additional Information
|
||||
======================
|
||||
|
||||
This interface was originally written by Hsiang-Fu Yu from Department of Computer
|
||||
Science, National Taiwan University. If you find this tool useful, please
|
||||
cite LIBSVM as follows
|
||||
|
||||
Chih-Chung Chang and Chih-Jen Lin, LIBSVM : a library for support
|
||||
vector machines. ACM Transactions on Intelligent Systems and
|
||||
Technology, 2:27:1--27:27, 2011. Software available at
|
||||
http://www.csie.ntu.edu.tw/~cjlin/libsvm
|
||||
|
||||
For any question, please contact Chih-Jen Lin <cjlin@csie.ntu.edu.tw>,
|
||||
or check the FAQ page:
|
||||
|
||||
http://www.csie.ntu.edu.tw/~cjlin/libsvm/faq.html
|
0
libsvm-3.36/python/libsvm/__init__.py
Normal file
0
libsvm-3.36/python/libsvm/__init__.py
Normal file
189
libsvm-3.36/python/libsvm/commonutil.py
Normal file
189
libsvm-3.36/python/libsvm/commonutil.py
Normal file
@@ -0,0 +1,189 @@
|
||||
from __future__ import print_function
|
||||
from array import array
|
||||
import sys
|
||||
|
||||
try:
|
||||
import numpy as np
|
||||
import scipy
|
||||
from scipy import sparse
|
||||
except:
|
||||
scipy = None
|
||||
|
||||
|
||||
__all__ = ['svm_read_problem', 'evaluations', 'csr_find_scale_param', 'csr_scale']
|
||||
|
||||
def svm_read_problem(data_source, return_scipy=False):
|
||||
"""
|
||||
svm_read_problem(data_source, return_scipy=False) -> [y, x], y: list, x: list of dictionary
|
||||
svm_read_problem(data_source, return_scipy=True) -> [y, x], y: ndarray, x: csr_matrix
|
||||
|
||||
Read LIBSVM-format data from data_source and return labels y
|
||||
and data instances x.
|
||||
"""
|
||||
if scipy != None and return_scipy:
|
||||
prob_y = array('d')
|
||||
prob_x = array('d')
|
||||
row_ptr = array('l', [0])
|
||||
col_idx = array('l')
|
||||
else:
|
||||
prob_y = []
|
||||
prob_x = []
|
||||
row_ptr = [0]
|
||||
col_idx = []
|
||||
indx_start = 1
|
||||
|
||||
if hasattr(data_source, "read"):
|
||||
file = data_source
|
||||
else:
|
||||
file = open(data_source)
|
||||
try:
|
||||
for line in file:
|
||||
line = line.split(None, 1)
|
||||
# In case an instance with all zero features
|
||||
if len(line) == 1: line += ['']
|
||||
label, features = line
|
||||
prob_y.append(float(label))
|
||||
if scipy != None and return_scipy:
|
||||
nz = 0
|
||||
for e in features.split():
|
||||
ind, val = e.split(":")
|
||||
if ind == '0':
|
||||
indx_start = 0
|
||||
val = float(val)
|
||||
if val != 0:
|
||||
col_idx.append(int(ind)-indx_start)
|
||||
prob_x.append(val)
|
||||
nz += 1
|
||||
row_ptr.append(row_ptr[-1]+nz)
|
||||
else:
|
||||
xi = {}
|
||||
for e in features.split():
|
||||
ind, val = e.split(":")
|
||||
xi[int(ind)] = float(val)
|
||||
prob_x += [xi]
|
||||
except Exception as err_msg:
|
||||
raise err_msg
|
||||
finally:
|
||||
if not hasattr(data_source, "read"):
|
||||
# close file only if it was created by us
|
||||
file.close()
|
||||
|
||||
if scipy != None and return_scipy:
|
||||
prob_y = np.frombuffer(prob_y, dtype='d')
|
||||
prob_x = np.frombuffer(prob_x, dtype='d')
|
||||
col_idx = np.frombuffer(col_idx, dtype='l')
|
||||
row_ptr = np.frombuffer(row_ptr, dtype='l')
|
||||
prob_x = sparse.csr_matrix((prob_x, col_idx, row_ptr))
|
||||
return (prob_y, prob_x)
|
||||
|
||||
def evaluations_scipy(ty, pv):
|
||||
"""
|
||||
evaluations_scipy(ty, pv) -> (ACC, MSE, SCC)
|
||||
ty, pv: ndarray
|
||||
|
||||
Calculate accuracy, mean squared error and squared correlation coefficient
|
||||
using the true values (ty) and predicted values (pv).
|
||||
"""
|
||||
if not (scipy != None and isinstance(ty, np.ndarray) and isinstance(pv, np.ndarray)):
|
||||
raise TypeError("type of ty and pv must be ndarray")
|
||||
if len(ty) != len(pv):
|
||||
raise ValueError("len(ty) must be equal to len(pv)")
|
||||
ACC = 100.0*(ty == pv).mean()
|
||||
MSE = ((ty - pv)**2).mean()
|
||||
l = len(ty)
|
||||
sumv = pv.sum()
|
||||
sumy = ty.sum()
|
||||
sumvy = (pv*ty).sum()
|
||||
sumvv = (pv*pv).sum()
|
||||
sumyy = (ty*ty).sum()
|
||||
with np.errstate(all = 'raise'):
|
||||
try:
|
||||
SCC = ((l*sumvy-sumv*sumy)*(l*sumvy-sumv*sumy))/((l*sumvv-sumv*sumv)*(l*sumyy-sumy*sumy))
|
||||
except:
|
||||
SCC = float('nan')
|
||||
return (float(ACC), float(MSE), float(SCC))
|
||||
|
||||
def evaluations(ty, pv, useScipy = True):
|
||||
"""
|
||||
evaluations(ty, pv, useScipy) -> (ACC, MSE, SCC)
|
||||
ty, pv: list, tuple or ndarray
|
||||
useScipy: convert ty, pv to ndarray, and use scipy functions for the evaluation
|
||||
|
||||
Calculate accuracy, mean squared error and squared correlation coefficient
|
||||
using the true values (ty) and predicted values (pv).
|
||||
"""
|
||||
if scipy != None and useScipy:
|
||||
return evaluations_scipy(np.asarray(ty), np.asarray(pv))
|
||||
if len(ty) != len(pv):
|
||||
raise ValueError("len(ty) must be equal to len(pv)")
|
||||
total_correct = total_error = 0
|
||||
sumv = sumy = sumvv = sumyy = sumvy = 0
|
||||
for v, y in zip(pv, ty):
|
||||
if y == v:
|
||||
total_correct += 1
|
||||
total_error += (v-y)*(v-y)
|
||||
sumv += v
|
||||
sumy += y
|
||||
sumvv += v*v
|
||||
sumyy += y*y
|
||||
sumvy += v*y
|
||||
l = len(ty)
|
||||
ACC = 100.0*total_correct/l
|
||||
MSE = total_error/l
|
||||
try:
|
||||
SCC = ((l*sumvy-sumv*sumy)*(l*sumvy-sumv*sumy))/((l*sumvv-sumv*sumv)*(l*sumyy-sumy*sumy))
|
||||
except:
|
||||
SCC = float('nan')
|
||||
return (float(ACC), float(MSE), float(SCC))
|
||||
|
||||
def csr_find_scale_param(x, lower=-1, upper=1):
|
||||
assert isinstance(x, sparse.csr_matrix)
|
||||
assert lower < upper
|
||||
l, n = x.shape
|
||||
feat_min = x.min(axis=0).toarray().flatten()
|
||||
feat_max = x.max(axis=0).toarray().flatten()
|
||||
coef = (feat_max - feat_min) / (upper - lower)
|
||||
coef[coef != 0] = 1.0 / coef[coef != 0]
|
||||
|
||||
# (x - ones(l,1) * feat_min') * diag(coef) + lower
|
||||
# = x * diag(coef) - ones(l, 1) * (feat_min' * diag(coef)) + lower
|
||||
# = x * diag(coef) + ones(l, 1) * (-feat_min' * diag(coef) + lower)
|
||||
# = x * diag(coef) + ones(l, 1) * offset'
|
||||
offset = -feat_min * coef + lower
|
||||
offset[coef == 0] = 0
|
||||
|
||||
if sum(offset != 0) * l > 3 * x.getnnz():
|
||||
print(
|
||||
"WARNING: The #nonzeros of the scaled data is at least 2 times larger than the original one.\n"
|
||||
"If feature values are non-negative and sparse, set lower=0 rather than the default lower=-1.",
|
||||
file=sys.stderr)
|
||||
|
||||
return {'coef':coef, 'offset':offset}
|
||||
|
||||
def csr_scale(x, scale_param):
|
||||
assert isinstance(x, sparse.csr_matrix)
|
||||
|
||||
offset = scale_param['offset']
|
||||
coef = scale_param['coef']
|
||||
assert len(coef) == len(offset)
|
||||
|
||||
l, n = x.shape
|
||||
|
||||
if not n == len(coef):
|
||||
print("WARNING: The dimension of scaling parameters and feature number do not match.", file=sys.stderr)
|
||||
coef = coef.resize(n) # zeros padded if n > len(coef)
|
||||
offset = offset.resize(n)
|
||||
|
||||
# scaled_x = x * diag(coef) + ones(l, 1) * offset'
|
||||
offset = sparse.csr_matrix(offset.reshape(1, n))
|
||||
offset = sparse.vstack([offset] * l, format='csr', dtype=x.dtype)
|
||||
scaled_x = x.dot(sparse.diags(coef, 0, shape=(n, n))) + offset
|
||||
|
||||
if scaled_x.getnnz() > x.getnnz():
|
||||
print(
|
||||
"WARNING: original #nonzeros %d\n" % x.getnnz() +
|
||||
" > new #nonzeros %d\n" % scaled_x.getnnz() +
|
||||
"If feature values are non-negative and sparse, get scale_param by setting lower=0 rather than the default lower=-1.",
|
||||
file=sys.stderr)
|
||||
|
||||
return scaled_x
|
465
libsvm-3.36/python/libsvm/svm.py
Normal file
465
libsvm-3.36/python/libsvm/svm.py
Normal file
@@ -0,0 +1,465 @@
|
||||
from ctypes import *
|
||||
from ctypes.util import find_library
|
||||
from os import path
|
||||
from glob import glob
|
||||
from enum import IntEnum
|
||||
import sys
|
||||
|
||||
try:
|
||||
import numpy as np
|
||||
import scipy
|
||||
from scipy import sparse
|
||||
except:
|
||||
scipy = None
|
||||
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
range = xrange
|
||||
from itertools import izip as zip
|
||||
|
||||
__all__ = ['libsvm', 'svm_problem', 'svm_parameter',
|
||||
'toPyModel', 'gen_svm_nodearray', 'print_null', 'svm_node', 'svm_forms',
|
||||
'PRINT_STRING_FUN', 'kernel_names', 'c_double', 'svm_model']
|
||||
|
||||
try:
|
||||
dirname = path.dirname(path.abspath(__file__))
|
||||
dynamic_lib_name = 'clib.cp*'
|
||||
path_to_so = glob(path.join(dirname, dynamic_lib_name))[0]
|
||||
libsvm = CDLL(path_to_so)
|
||||
except:
|
||||
try:
|
||||
if sys.platform == 'win32':
|
||||
libsvm = CDLL(path.join(dirname, r'..\..\windows\libsvm.dll'))
|
||||
else:
|
||||
libsvm = CDLL(path.join(dirname, '../../libsvm.so.4'))
|
||||
except:
|
||||
# For unix the prefix 'lib' is not considered.
|
||||
if find_library('svm'):
|
||||
libsvm = CDLL(find_library('svm'))
|
||||
elif find_library('libsvm'):
|
||||
libsvm = CDLL(find_library('libsvm'))
|
||||
else:
|
||||
raise Exception('LIBSVM library not found.')
|
||||
|
||||
class svm_forms(IntEnum):
|
||||
C_SVC = 0
|
||||
NU_SVC = 1
|
||||
ONE_CLASS = 2
|
||||
EPSILON_SVR = 3
|
||||
NU_SVR = 4
|
||||
|
||||
class kernel_names(IntEnum):
|
||||
LINEAR = 0
|
||||
POLY = 1
|
||||
RBF = 2
|
||||
SIGMOID = 3
|
||||
PRECOMPUTED = 4
|
||||
|
||||
PRINT_STRING_FUN = CFUNCTYPE(None, c_char_p)
|
||||
def print_null(s):
|
||||
return
|
||||
|
||||
# In multi-threading, all threads share the same memory space of
|
||||
# the dynamic library (libsvm). Thus, we use a module-level
|
||||
# variable to keep a reference to ctypes print_null, preventing
|
||||
# python from garbage collecting it in thread B while thread A
|
||||
# still needs it. Check the usage of svm_set_print_string_function()
|
||||
# in LIBSVM README for details.
|
||||
ctypes_print_null = PRINT_STRING_FUN(print_null)
|
||||
|
||||
def genFields(names, types):
|
||||
return list(zip(names, types))
|
||||
|
||||
def fillprototype(f, restype, argtypes):
|
||||
f.restype = restype
|
||||
f.argtypes = argtypes
|
||||
|
||||
class svm_node(Structure):
|
||||
_names = ["index", "value"]
|
||||
_types = [c_int, c_double]
|
||||
_fields_ = genFields(_names, _types)
|
||||
|
||||
def __init__(self, index=-1, value=0):
|
||||
self.index, self.value = index, value
|
||||
|
||||
def __str__(self):
|
||||
return '%d:%g' % (self.index, self.value)
|
||||
|
||||
def gen_svm_nodearray(xi, feature_max=None, isKernel=False):
|
||||
if feature_max:
|
||||
assert(isinstance(feature_max, int))
|
||||
|
||||
xi_shift = 0 # ensure correct indices of xi
|
||||
if scipy and isinstance(xi, tuple) and len(xi) == 2\
|
||||
and isinstance(xi[0], np.ndarray) and isinstance(xi[1], np.ndarray): # for a sparse vector
|
||||
if not isKernel:
|
||||
index_range = xi[0] + 1 # index starts from 1
|
||||
else:
|
||||
index_range = xi[0] # index starts from 0 for precomputed kernel
|
||||
if feature_max:
|
||||
index_range = index_range[np.where(index_range <= feature_max)]
|
||||
elif scipy and isinstance(xi, np.ndarray):
|
||||
if not isKernel:
|
||||
xi_shift = 1
|
||||
index_range = xi.nonzero()[0] + 1 # index starts from 1
|
||||
else:
|
||||
index_range = np.arange(0, len(xi)) # index starts from 0 for precomputed kernel
|
||||
if feature_max:
|
||||
index_range = index_range[np.where(index_range <= feature_max)]
|
||||
elif isinstance(xi, (dict, list, tuple)):
|
||||
if isinstance(xi, dict):
|
||||
index_range = sorted(xi.keys())
|
||||
elif isinstance(xi, (list, tuple)):
|
||||
if not isKernel:
|
||||
xi_shift = 1
|
||||
index_range = range(1, len(xi) + 1) # index starts from 1
|
||||
else:
|
||||
index_range = range(0, len(xi)) # index starts from 0 for precomputed kernel
|
||||
|
||||
if feature_max:
|
||||
index_range = list(filter(lambda j: j <= feature_max, index_range))
|
||||
if not isKernel:
|
||||
index_range = list(filter(lambda j:xi[j-xi_shift] != 0, index_range))
|
||||
else:
|
||||
raise TypeError('xi should be a dictionary, list, tuple, 1-d numpy array, or tuple of (index, data)')
|
||||
|
||||
ret = (svm_node*(len(index_range)+1))()
|
||||
ret[-1].index = -1
|
||||
|
||||
if scipy and isinstance(xi, tuple) and len(xi) == 2\
|
||||
and isinstance(xi[0], np.ndarray) and isinstance(xi[1], np.ndarray): # for a sparse vector
|
||||
# since xi=(indices, values), we must sort them simultaneously.
|
||||
for idx, arg in enumerate(np.argsort(index_range)):
|
||||
ret[idx].index = index_range[arg]
|
||||
ret[idx].value = (xi[1])[arg]
|
||||
else:
|
||||
for idx, j in enumerate(index_range):
|
||||
ret[idx].index = j
|
||||
ret[idx].value = xi[j - xi_shift]
|
||||
|
||||
max_idx = 0
|
||||
if len(index_range) > 0:
|
||||
max_idx = index_range[-1]
|
||||
return ret, max_idx
|
||||
|
||||
try:
|
||||
from numba import jit
|
||||
jit_enabled = True
|
||||
except:
|
||||
# We need to support two cases: when jit is called with no arguments, and when jit is called with
|
||||
# a keyword argument.
|
||||
def jit(func=None, *args, **kwargs):
|
||||
if func is None:
|
||||
# This handles the case where jit is used with parentheses: @jit(nopython=True)
|
||||
return lambda x: x
|
||||
else:
|
||||
# This handles the case where jit is used without parentheses: @jit
|
||||
return func
|
||||
jit_enabled = False
|
||||
|
||||
@jit(nopython=True)
|
||||
def csr_to_problem_jit(l, x_val, x_ind, x_rowptr, prob_val, prob_ind, prob_rowptr, indx_start):
|
||||
for i in range(l):
|
||||
b1,e1 = x_rowptr[i], x_rowptr[i+1]
|
||||
b2,e2 = prob_rowptr[i], prob_rowptr[i+1]-1
|
||||
for j in range(b1,e1):
|
||||
prob_ind[j-b1+b2] = x_ind[j]+indx_start
|
||||
prob_val[j-b1+b2] = x_val[j]
|
||||
def csr_to_problem_nojit(l, x_val, x_ind, x_rowptr, prob_val, prob_ind, prob_rowptr, indx_start):
|
||||
for i in range(l):
|
||||
x_slice = slice(x_rowptr[i], x_rowptr[i+1])
|
||||
prob_slice = slice(prob_rowptr[i], prob_rowptr[i+1]-1)
|
||||
prob_ind[prob_slice] = x_ind[x_slice]+indx_start
|
||||
prob_val[prob_slice] = x_val[x_slice]
|
||||
|
||||
def csr_to_problem(x, prob, isKernel):
|
||||
if not x.has_sorted_indices:
|
||||
x.sort_indices()
|
||||
|
||||
# Extra space for termination node and (possibly) bias term
|
||||
x_space = prob.x_space = np.empty((x.nnz+x.shape[0]), dtype=svm_node)
|
||||
# rowptr has to be a 64bit integer because it will later be used for pointer arithmetic,
|
||||
# which overflows when the added pointer points to an address that is numerically high.
|
||||
prob.rowptr = x.indptr.astype(np.int64, copy=True)
|
||||
prob.rowptr[1:] += np.arange(1,x.shape[0]+1)
|
||||
prob_ind = x_space["index"]
|
||||
prob_val = x_space["value"]
|
||||
prob_ind[:] = -1
|
||||
if not isKernel:
|
||||
indx_start = 1 # index starts from 1
|
||||
else:
|
||||
indx_start = 0 # index starts from 0 for precomputed kernel
|
||||
if jit_enabled:
|
||||
csr_to_problem_jit(x.shape[0], x.data, x.indices, x.indptr, prob_val, prob_ind, prob.rowptr, indx_start)
|
||||
else:
|
||||
csr_to_problem_nojit(x.shape[0], x.data, x.indices, x.indptr, prob_val, prob_ind, prob.rowptr, indx_start)
|
||||
|
||||
class svm_problem(Structure):
|
||||
_names = ["l", "y", "x"]
|
||||
_types = [c_int, POINTER(c_double), POINTER(POINTER(svm_node))]
|
||||
_fields_ = genFields(_names, _types)
|
||||
|
||||
def __init__(self, y, x, isKernel=False):
|
||||
if (not isinstance(y, (list, tuple))) and (not (scipy and isinstance(y, np.ndarray))):
|
||||
raise TypeError("type of y: {0} is not supported!".format(type(y)))
|
||||
|
||||
if isinstance(x, (list, tuple)):
|
||||
if len(y) != len(x):
|
||||
raise ValueError("len(y) != len(x)")
|
||||
elif scipy != None and isinstance(x, (np.ndarray, sparse.spmatrix)):
|
||||
if len(y) != x.shape[0]:
|
||||
raise ValueError("len(y) != len(x)")
|
||||
if isinstance(x, np.ndarray):
|
||||
x = np.ascontiguousarray(x) # enforce row-major
|
||||
if isinstance(x, sparse.spmatrix):
|
||||
x = x.tocsr()
|
||||
pass
|
||||
else:
|
||||
raise TypeError("type of x: {0} is not supported!".format(type(x)))
|
||||
self.l = l = len(y)
|
||||
|
||||
max_idx = 0
|
||||
x_space = self.x_space = []
|
||||
if scipy != None and isinstance(x, sparse.csr_matrix):
|
||||
csr_to_problem(x, self, isKernel)
|
||||
max_idx = x.shape[1]
|
||||
else:
|
||||
for i, xi in enumerate(x):
|
||||
tmp_xi, tmp_idx = gen_svm_nodearray(xi,isKernel=isKernel)
|
||||
x_space += [tmp_xi]
|
||||
max_idx = max(max_idx, tmp_idx)
|
||||
self.n = max_idx
|
||||
|
||||
self.y = (c_double * l)()
|
||||
if scipy != None and isinstance(y, np.ndarray):
|
||||
np.ctypeslib.as_array(self.y, (self.l,))[:] = y
|
||||
else:
|
||||
for i, yi in enumerate(y): self.y[i] = yi
|
||||
|
||||
self.x = (POINTER(svm_node) * l)()
|
||||
if scipy != None and isinstance(x, sparse.csr_matrix):
|
||||
base = addressof(self.x_space.ctypes.data_as(POINTER(svm_node))[0])
|
||||
x_ptr = cast(self.x, POINTER(c_uint64))
|
||||
x_ptr = np.ctypeslib.as_array(x_ptr,(self.l,))
|
||||
x_ptr[:] = self.rowptr[:-1]*sizeof(svm_node)+base
|
||||
else:
|
||||
for i, xi in enumerate(self.x_space): self.x[i] = xi
|
||||
|
||||
class svm_parameter(Structure):
|
||||
_names = ["svm_type", "kernel_type", "degree", "gamma", "coef0",
|
||||
"cache_size", "eps", "C", "nr_weight", "weight_label", "weight",
|
||||
"nu", "p", "shrinking", "probability"]
|
||||
_types = [c_int, c_int, c_int, c_double, c_double,
|
||||
c_double, c_double, c_double, c_int, POINTER(c_int), POINTER(c_double),
|
||||
c_double, c_double, c_int, c_int]
|
||||
_fields_ = genFields(_names, _types)
|
||||
|
||||
def __init__(self, options = None):
|
||||
if options == None:
|
||||
options = ''
|
||||
self.parse_options(options)
|
||||
|
||||
def __str__(self):
|
||||
s = ''
|
||||
attrs = svm_parameter._names + list(self.__dict__.keys())
|
||||
values = map(lambda attr: getattr(self, attr), attrs)
|
||||
for attr, val in zip(attrs, values):
|
||||
s += (' %s: %s\n' % (attr, val))
|
||||
s = s.strip()
|
||||
|
||||
return s
|
||||
|
||||
def set_to_default_values(self):
|
||||
self.svm_type = svm_forms.C_SVC;
|
||||
self.kernel_type = kernel_names.RBF
|
||||
self.degree = 3
|
||||
self.gamma = 0
|
||||
self.coef0 = 0
|
||||
self.nu = 0.5
|
||||
self.cache_size = 100
|
||||
self.C = 1
|
||||
self.eps = 0.001
|
||||
self.p = 0.1
|
||||
self.shrinking = 1
|
||||
self.probability = 0
|
||||
self.nr_weight = 0
|
||||
self.weight_label = None
|
||||
self.weight = None
|
||||
self.cross_validation = False
|
||||
self.nr_fold = 0
|
||||
self.print_func = cast(None, PRINT_STRING_FUN)
|
||||
|
||||
def parse_options(self, options):
|
||||
if isinstance(options, list):
|
||||
argv = options
|
||||
elif isinstance(options, str):
|
||||
argv = options.split()
|
||||
else:
|
||||
raise TypeError("arg 1 should be a list or a str.")
|
||||
self.set_to_default_values()
|
||||
self.print_func = cast(None, PRINT_STRING_FUN)
|
||||
weight_label = []
|
||||
weight = []
|
||||
|
||||
i = 0
|
||||
while i < len(argv):
|
||||
if argv[i] == "-s":
|
||||
i = i + 1
|
||||
self.svm_type = svm_forms(int(argv[i]))
|
||||
elif argv[i] == "-t":
|
||||
i = i + 1
|
||||
self.kernel_type = kernel_names(int(argv[i]))
|
||||
elif argv[i] == "-d":
|
||||
i = i + 1
|
||||
self.degree = int(argv[i])
|
||||
elif argv[i] == "-g":
|
||||
i = i + 1
|
||||
self.gamma = float(argv[i])
|
||||
elif argv[i] == "-r":
|
||||
i = i + 1
|
||||
self.coef0 = float(argv[i])
|
||||
elif argv[i] == "-n":
|
||||
i = i + 1
|
||||
self.nu = float(argv[i])
|
||||
elif argv[i] == "-m":
|
||||
i = i + 1
|
||||
self.cache_size = float(argv[i])
|
||||
elif argv[i] == "-c":
|
||||
i = i + 1
|
||||
self.C = float(argv[i])
|
||||
elif argv[i] == "-e":
|
||||
i = i + 1
|
||||
self.eps = float(argv[i])
|
||||
elif argv[i] == "-p":
|
||||
i = i + 1
|
||||
self.p = float(argv[i])
|
||||
elif argv[i] == "-h":
|
||||
i = i + 1
|
||||
self.shrinking = int(argv[i])
|
||||
elif argv[i] == "-b":
|
||||
i = i + 1
|
||||
self.probability = int(argv[i])
|
||||
elif argv[i] == "-q":
|
||||
self.print_func = ctypes_print_null
|
||||
elif argv[i] == "-v":
|
||||
i = i + 1
|
||||
self.cross_validation = 1
|
||||
self.nr_fold = int(argv[i])
|
||||
if self.nr_fold < 2:
|
||||
raise ValueError("n-fold cross validation: n must >= 2")
|
||||
elif argv[i].startswith("-w"):
|
||||
i = i + 1
|
||||
self.nr_weight += 1
|
||||
weight_label += [int(argv[i-1][2:])]
|
||||
weight += [float(argv[i])]
|
||||
else:
|
||||
raise ValueError("Wrong options")
|
||||
i += 1
|
||||
|
||||
libsvm.svm_set_print_string_function(self.print_func)
|
||||
self.weight_label = (c_int*self.nr_weight)()
|
||||
self.weight = (c_double*self.nr_weight)()
|
||||
for i in range(self.nr_weight):
|
||||
self.weight[i] = weight[i]
|
||||
self.weight_label[i] = weight_label[i]
|
||||
|
||||
class svm_model(Structure):
|
||||
_names = ['param', 'nr_class', 'l', 'SV', 'sv_coef', 'rho',
|
||||
'probA', 'probB', 'prob_density_marks', 'sv_indices',
|
||||
'label', 'nSV', 'free_sv']
|
||||
_types = [svm_parameter, c_int, c_int, POINTER(POINTER(svm_node)),
|
||||
POINTER(POINTER(c_double)), POINTER(c_double),
|
||||
POINTER(c_double), POINTER(c_double), POINTER(c_double),
|
||||
POINTER(c_int), POINTER(c_int), POINTER(c_int), c_int]
|
||||
_fields_ = genFields(_names, _types)
|
||||
|
||||
def __init__(self):
|
||||
self.__createfrom__ = 'python'
|
||||
|
||||
def __del__(self):
|
||||
# free memory created by C to avoid memory leak
|
||||
if hasattr(self, '__createfrom__') and self.__createfrom__ == 'C':
|
||||
libsvm.svm_free_and_destroy_model(pointer(pointer(self)))
|
||||
|
||||
def get_svm_type(self):
|
||||
return libsvm.svm_get_svm_type(self)
|
||||
|
||||
def get_nr_class(self):
|
||||
return libsvm.svm_get_nr_class(self)
|
||||
|
||||
def get_svr_probability(self):
|
||||
return libsvm.svm_get_svr_probability(self)
|
||||
|
||||
def get_labels(self):
|
||||
nr_class = self.get_nr_class()
|
||||
labels = (c_int * nr_class)()
|
||||
libsvm.svm_get_labels(self, labels)
|
||||
return labels[:nr_class]
|
||||
|
||||
def get_sv_indices(self):
|
||||
total_sv = self.get_nr_sv()
|
||||
sv_indices = (c_int * total_sv)()
|
||||
libsvm.svm_get_sv_indices(self, sv_indices)
|
||||
return sv_indices[:total_sv]
|
||||
|
||||
def get_nr_sv(self):
|
||||
return libsvm.svm_get_nr_sv(self)
|
||||
|
||||
def is_probability_model(self):
|
||||
return (libsvm.svm_check_probability_model(self) == 1)
|
||||
|
||||
def get_sv_coef(self):
|
||||
return [tuple(self.sv_coef[j][i] for j in range(self.nr_class - 1))
|
||||
for i in range(self.l)]
|
||||
|
||||
def get_SV(self):
|
||||
result = []
|
||||
for sparse_sv in self.SV[:self.l]:
|
||||
row = dict()
|
||||
|
||||
i = 0
|
||||
while True:
|
||||
if sparse_sv[i].index == -1:
|
||||
break
|
||||
row[sparse_sv[i].index] = sparse_sv[i].value
|
||||
i += 1
|
||||
|
||||
result.append(row)
|
||||
return result
|
||||
|
||||
def toPyModel(model_ptr):
|
||||
"""
|
||||
toPyModel(model_ptr) -> svm_model
|
||||
|
||||
Convert a ctypes POINTER(svm_model) to a Python svm_model
|
||||
"""
|
||||
if bool(model_ptr) == False:
|
||||
raise ValueError("Null pointer")
|
||||
m = model_ptr.contents
|
||||
m.__createfrom__ = 'C'
|
||||
return m
|
||||
|
||||
fillprototype(libsvm.svm_train, POINTER(svm_model), [POINTER(svm_problem), POINTER(svm_parameter)])
|
||||
fillprototype(libsvm.svm_cross_validation, None, [POINTER(svm_problem), POINTER(svm_parameter), c_int, POINTER(c_double)])
|
||||
|
||||
fillprototype(libsvm.svm_save_model, c_int, [c_char_p, POINTER(svm_model)])
|
||||
fillprototype(libsvm.svm_load_model, POINTER(svm_model), [c_char_p])
|
||||
|
||||
fillprototype(libsvm.svm_get_svm_type, c_int, [POINTER(svm_model)])
|
||||
fillprototype(libsvm.svm_get_nr_class, c_int, [POINTER(svm_model)])
|
||||
fillprototype(libsvm.svm_get_labels, None, [POINTER(svm_model), POINTER(c_int)])
|
||||
fillprototype(libsvm.svm_get_sv_indices, None, [POINTER(svm_model), POINTER(c_int)])
|
||||
fillprototype(libsvm.svm_get_nr_sv, c_int, [POINTER(svm_model)])
|
||||
fillprototype(libsvm.svm_get_svr_probability, c_double, [POINTER(svm_model)])
|
||||
|
||||
fillprototype(libsvm.svm_predict_values, c_double, [POINTER(svm_model), POINTER(svm_node), POINTER(c_double)])
|
||||
fillprototype(libsvm.svm_predict, c_double, [POINTER(svm_model), POINTER(svm_node)])
|
||||
fillprototype(libsvm.svm_predict_probability, c_double, [POINTER(svm_model), POINTER(svm_node), POINTER(c_double)])
|
||||
|
||||
fillprototype(libsvm.svm_free_model_content, None, [POINTER(svm_model)])
|
||||
fillprototype(libsvm.svm_free_and_destroy_model, None, [POINTER(POINTER(svm_model))])
|
||||
fillprototype(libsvm.svm_destroy_param, None, [POINTER(svm_parameter)])
|
||||
|
||||
fillprototype(libsvm.svm_check_parameter, c_char_p, [POINTER(svm_problem), POINTER(svm_parameter)])
|
||||
fillprototype(libsvm.svm_check_probability_model, c_int, [POINTER(svm_model)])
|
||||
fillprototype(libsvm.svm_set_print_string_function, None, [PRINT_STRING_FUN])
|
263
libsvm-3.36/python/libsvm/svmutil.py
Normal file
263
libsvm-3.36/python/libsvm/svmutil.py
Normal file
@@ -0,0 +1,263 @@
|
||||
import os, sys
|
||||
from .svm import *
|
||||
from .svm import __all__ as svm_all
|
||||
from .commonutil import *
|
||||
from .commonutil import __all__ as common_all
|
||||
|
||||
try:
|
||||
import numpy as np
|
||||
import scipy
|
||||
from scipy import sparse
|
||||
except:
|
||||
scipy = None
|
||||
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
range = xrange
|
||||
from itertools import izip as zip
|
||||
_cstr = lambda s: s.encode("utf-8") if isinstance(s,unicode) else str(s)
|
||||
else:
|
||||
_cstr = lambda s: bytes(s, "utf-8")
|
||||
|
||||
__all__ = ['svm_load_model', 'svm_predict', 'svm_save_model', 'svm_train'] + svm_all + common_all
|
||||
|
||||
|
||||
def svm_load_model(model_file_name):
|
||||
"""
|
||||
svm_load_model(model_file_name) -> model
|
||||
|
||||
Load a LIBSVM model from model_file_name and return.
|
||||
"""
|
||||
model = libsvm.svm_load_model(_cstr(model_file_name))
|
||||
if not model:
|
||||
print("can't open model file %s" % model_file_name)
|
||||
return None
|
||||
model = toPyModel(model)
|
||||
return model
|
||||
|
||||
def svm_save_model(model_file_name, model):
|
||||
"""
|
||||
svm_save_model(model_file_name, model) -> None
|
||||
|
||||
Save a LIBSVM model to the file model_file_name.
|
||||
"""
|
||||
libsvm.svm_save_model(_cstr(model_file_name), model)
|
||||
|
||||
def svm_train(arg1, arg2=None, arg3=None):
|
||||
"""
|
||||
svm_train(y, x [, options]) -> model | ACC | MSE
|
||||
|
||||
y: a list/tuple/ndarray of l true labels (type must be int/double).
|
||||
|
||||
x: 1. a list/tuple of l training instances. Feature vector of
|
||||
each training instance is a list/tuple or dictionary.
|
||||
|
||||
2. an l * n numpy ndarray or scipy spmatrix (n: number of features).
|
||||
|
||||
svm_train(prob [, options]) -> model | ACC | MSE
|
||||
svm_train(prob, param) -> model | ACC| MSE
|
||||
|
||||
Train an SVM model from data (y, x) or an svm_problem prob using
|
||||
'options' or an svm_parameter param.
|
||||
If '-v' is specified in 'options' (i.e., cross validation)
|
||||
either accuracy (ACC) or mean-squared error (MSE) is returned.
|
||||
options:
|
||||
-s svm_type : set type of SVM (default 0)
|
||||
0 -- C-SVC (multi-class classification)
|
||||
1 -- nu-SVC (multi-class classification)
|
||||
2 -- one-class SVM
|
||||
3 -- epsilon-SVR (regression)
|
||||
4 -- nu-SVR (regression)
|
||||
-t kernel_type : set type of kernel function (default 2)
|
||||
0 -- linear: u'*v
|
||||
1 -- polynomial: (gamma*u'*v + coef0)^degree
|
||||
2 -- radial basis function: exp(-gamma*|u-v|^2)
|
||||
3 -- sigmoid: tanh(gamma*u'*v + coef0)
|
||||
4 -- precomputed kernel (kernel values in training_set_file)
|
||||
-d degree : set degree in kernel function (default 3)
|
||||
-g gamma : set gamma in kernel function (default 1/num_features)
|
||||
-r coef0 : set coef0 in kernel function (default 0)
|
||||
-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)
|
||||
-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)
|
||||
-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)
|
||||
-m cachesize : set cache memory size in MB (default 100)
|
||||
-e epsilon : set tolerance of termination criterion (default 0.001)
|
||||
-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)
|
||||
-b probability_estimates : whether to train a model for probability estimates, 0 or 1 (default 0)
|
||||
-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)
|
||||
-v n: n-fold cross validation mode
|
||||
-q : quiet mode (no outputs)
|
||||
"""
|
||||
prob, param = None, None
|
||||
if isinstance(arg1, (list, tuple)) or (scipy and isinstance(arg1, np.ndarray)):
|
||||
assert isinstance(arg2, (list, tuple)) or (scipy and isinstance(arg2, (np.ndarray, sparse.spmatrix)))
|
||||
y, x, options = arg1, arg2, arg3
|
||||
param = svm_parameter(options)
|
||||
prob = svm_problem(y, x, isKernel=(param.kernel_type == kernel_names.PRECOMPUTED))
|
||||
elif isinstance(arg1, svm_problem):
|
||||
prob = arg1
|
||||
if isinstance(arg2, svm_parameter):
|
||||
param = arg2
|
||||
else:
|
||||
param = svm_parameter(arg2)
|
||||
if prob == None or param == None:
|
||||
raise TypeError("Wrong types for the arguments")
|
||||
|
||||
if param.kernel_type == kernel_names.PRECOMPUTED:
|
||||
for i in range(prob.l):
|
||||
xi = prob.x[i]
|
||||
idx, val = xi[0].index, xi[0].value
|
||||
if idx != 0:
|
||||
raise ValueError('Wrong input format: first column must be 0:sample_serial_number')
|
||||
if val <= 0 or val > prob.n:
|
||||
raise ValueError('Wrong input format: sample_serial_number out of range')
|
||||
|
||||
if param.gamma == 0 and prob.n > 0:
|
||||
param.gamma = 1.0 / prob.n
|
||||
libsvm.svm_set_print_string_function(param.print_func)
|
||||
err_msg = libsvm.svm_check_parameter(prob, param)
|
||||
if err_msg:
|
||||
raise ValueError('Error: %s' % err_msg)
|
||||
|
||||
if param.cross_validation:
|
||||
l, nr_fold = prob.l, param.nr_fold
|
||||
target = (c_double * l)()
|
||||
libsvm.svm_cross_validation(prob, param, nr_fold, target)
|
||||
ACC, MSE, SCC = evaluations(prob.y[:l], target[:l])
|
||||
if param.svm_type in [svm_forms.EPSILON_SVR, svm_forms.NU_SVR]:
|
||||
print("Cross Validation Mean squared error = %g" % MSE)
|
||||
print("Cross Validation Squared correlation coefficient = %g" % SCC)
|
||||
return MSE
|
||||
else:
|
||||
print("Cross Validation Accuracy = %g%%" % ACC)
|
||||
return ACC
|
||||
else:
|
||||
m = libsvm.svm_train(prob, param)
|
||||
m = toPyModel(m)
|
||||
|
||||
# If prob is destroyed, data including SVs pointed by m can remain.
|
||||
m.x_space = prob.x_space
|
||||
return m
|
||||
|
||||
def svm_predict(y, x, m, options=""):
|
||||
"""
|
||||
svm_predict(y, x, m [, options]) -> (p_labels, p_acc, p_vals)
|
||||
|
||||
y: a list/tuple/ndarray of l true labels (type must be int/double).
|
||||
It is used for calculating the accuracy. Use [] if true labels are
|
||||
unavailable.
|
||||
|
||||
x: 1. a list/tuple of l training instances. Feature vector of
|
||||
each training instance is a list/tuple or dictionary.
|
||||
|
||||
2. an l * n numpy ndarray or scipy spmatrix (n: number of features).
|
||||
|
||||
Predict data (y, x) with the SVM model m.
|
||||
options:
|
||||
-b probability_estimates: whether to predict probability estimates,
|
||||
0 or 1 (default 0).
|
||||
-q : quiet mode (no outputs).
|
||||
|
||||
The return tuple contains
|
||||
p_labels: a list of predicted labels
|
||||
p_acc: a tuple including accuracy (for classification), mean-squared
|
||||
error, and squared correlation coefficient (for regression).
|
||||
p_vals: a list of decision values or probability estimates (if '-b 1'
|
||||
is specified). If k is the number of classes, for decision values,
|
||||
each element includes results of predicting k(k-1)/2 binary-class
|
||||
SVMs. For probabilities, each element contains k values indicating
|
||||
the probability that the testing instance is in each class.
|
||||
Note that the order of classes here is the same as 'model.label'
|
||||
field in the model structure.
|
||||
"""
|
||||
|
||||
def info(s):
|
||||
print(s)
|
||||
|
||||
if scipy and isinstance(x, np.ndarray):
|
||||
x = np.ascontiguousarray(x) # enforce row-major
|
||||
elif sparse and isinstance(x, sparse.spmatrix):
|
||||
x = x.tocsr()
|
||||
elif not isinstance(x, (list, tuple)):
|
||||
raise TypeError("type of x: {0} is not supported!".format(type(x)))
|
||||
|
||||
if (not isinstance(y, (list, tuple))) and (not (scipy and isinstance(y, np.ndarray))):
|
||||
raise TypeError("type of y: {0} is not supported!".format(type(y)))
|
||||
|
||||
predict_probability = 0
|
||||
argv = options.split()
|
||||
i = 0
|
||||
while i < len(argv):
|
||||
if argv[i] == '-b':
|
||||
i += 1
|
||||
predict_probability = int(argv[i])
|
||||
elif argv[i] == '-q':
|
||||
info = print_null
|
||||
else:
|
||||
raise ValueError("Wrong options")
|
||||
i+=1
|
||||
|
||||
svm_type = m.get_svm_type()
|
||||
is_prob_model = m.is_probability_model()
|
||||
nr_class = m.get_nr_class()
|
||||
pred_labels = []
|
||||
pred_values = []
|
||||
|
||||
if scipy and isinstance(x, sparse.spmatrix):
|
||||
nr_instance = x.shape[0]
|
||||
else:
|
||||
nr_instance = len(x)
|
||||
|
||||
if predict_probability:
|
||||
if not is_prob_model:
|
||||
raise ValueError("Model does not support probabiliy estimates")
|
||||
|
||||
if svm_type in [svm_forms.NU_SVR, svm_forms.EPSILON_SVR]:
|
||||
info("Prob. model for test data: target value = predicted value + z,\n"
|
||||
"z: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g" % m.get_svr_probability());
|
||||
nr_class = 0
|
||||
|
||||
prob_estimates = (c_double * nr_class)()
|
||||
for i in range(nr_instance):
|
||||
if scipy and isinstance(x, sparse.spmatrix):
|
||||
indslice = slice(x.indptr[i], x.indptr[i+1])
|
||||
xi, idx = gen_svm_nodearray((x.indices[indslice], x.data[indslice]), isKernel=(m.param.kernel_type == kernel_names.PRECOMPUTED))
|
||||
else:
|
||||
xi, idx = gen_svm_nodearray(x[i], isKernel=(m.param.kernel_type == kernel_names.PRECOMPUTED))
|
||||
label = libsvm.svm_predict_probability(m, xi, prob_estimates)
|
||||
values = prob_estimates[:nr_class]
|
||||
pred_labels += [label]
|
||||
pred_values += [values]
|
||||
else:
|
||||
if is_prob_model:
|
||||
info("Model supports probability estimates, but disabled in predicton.")
|
||||
if svm_type in [svm_forms.ONE_CLASS, svm_forms.EPSILON_SVR, svm_forms.NU_SVC]:
|
||||
nr_classifier = 1
|
||||
else:
|
||||
nr_classifier = nr_class*(nr_class-1)//2
|
||||
dec_values = (c_double * nr_classifier)()
|
||||
for i in range(nr_instance):
|
||||
if scipy and isinstance(x, sparse.spmatrix):
|
||||
indslice = slice(x.indptr[i], x.indptr[i+1])
|
||||
xi, idx = gen_svm_nodearray((x.indices[indslice], x.data[indslice]), isKernel=(m.param.kernel_type == kernel_names.PRECOMPUTED))
|
||||
else:
|
||||
xi, idx = gen_svm_nodearray(x[i], isKernel=(m.param.kernel_type == kernel_names.PRECOMPUTED))
|
||||
label = libsvm.svm_predict_values(m, xi, dec_values)
|
||||
if(nr_class == 1):
|
||||
values = [1]
|
||||
else:
|
||||
values = dec_values[:nr_classifier]
|
||||
pred_labels += [label]
|
||||
pred_values += [values]
|
||||
|
||||
if len(y) == 0:
|
||||
y = [0] * nr_instance
|
||||
ACC, MSE, SCC = evaluations(y, pred_labels)
|
||||
|
||||
if svm_type in [svm_forms.EPSILON_SVR, svm_forms.NU_SVR]:
|
||||
info("Mean squared error = %g (regression)" % MSE)
|
||||
info("Squared correlation coefficient = %g (regression)" % SCC)
|
||||
else:
|
||||
info("Accuracy = %g%% (%d/%d) (classification)" % (ACC, int(round(nr_instance*ACC/100)), nr_instance))
|
||||
|
||||
return pred_labels, (ACC, MSE, SCC), pred_values
|
123
libsvm-3.36/python/setup.py
Normal file
123
libsvm-3.36/python/setup.py
Normal file
@@ -0,0 +1,123 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys, os
|
||||
from os import path
|
||||
from shutil import copyfile, rmtree
|
||||
from glob import glob
|
||||
|
||||
from setuptools import setup, Extension
|
||||
from distutils.command.clean import clean as clean_cmd
|
||||
|
||||
# a technique to build a shared library on windows
|
||||
from distutils.command.build_ext import build_ext
|
||||
|
||||
build_ext.get_export_symbols = lambda x, y: []
|
||||
|
||||
|
||||
PACKAGE_DIR = "libsvm"
|
||||
PACKAGE_NAME = "libsvm-official"
|
||||
VERSION = "3.36.0"
|
||||
cpp_dir = "cpp-source"
|
||||
# should be consistent with dynamic_lib_name in libsvm/svm.py
|
||||
dynamic_lib_name = "clib"
|
||||
|
||||
# sources to be included to build the shared library
|
||||
source_codes = [
|
||||
"svm.cpp",
|
||||
]
|
||||
headers = [
|
||||
"svm.h",
|
||||
"svm.def",
|
||||
]
|
||||
|
||||
# license parameters
|
||||
license_source = path.join("..", "COPYRIGHT")
|
||||
license_file = "LICENSE"
|
||||
license_name = "BSD-3-Clause"
|
||||
|
||||
kwargs_for_extension = {
|
||||
"sources": [path.join(cpp_dir, f) for f in source_codes],
|
||||
"depends": [path.join(cpp_dir, f) for f in headers],
|
||||
"include_dirs": [cpp_dir],
|
||||
"language": "c++",
|
||||
}
|
||||
|
||||
# see ../Makefile.win and enable openmp
|
||||
if sys.platform == "win32":
|
||||
kwargs_for_extension.update(
|
||||
{
|
||||
"define_macros": [("_WIN64", ""), ("_CRT_SECURE_NO_DEPRECATE", "")],
|
||||
"extra_link_args": [r"-DEF:{}\svm.def".format(cpp_dir)],
|
||||
"extra_compile_args": ["/openmp"],
|
||||
}
|
||||
)
|
||||
else:
|
||||
kwargs_for_extension.update(
|
||||
{
|
||||
"extra_compile_args": ["-fopenmp"],
|
||||
"extra_link_args": ["-fopenmp"],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def create_cpp_source():
|
||||
for f in source_codes + headers:
|
||||
src_file = path.join("..", f)
|
||||
tgt_file = path.join(cpp_dir, f)
|
||||
# ensure blas directory is created
|
||||
os.makedirs(path.dirname(tgt_file), exist_ok=True)
|
||||
copyfile(src_file, tgt_file)
|
||||
|
||||
|
||||
class CleanCommand(clean_cmd):
|
||||
def run(self):
|
||||
clean_cmd.run(self)
|
||||
to_be_removed = ["build/", "dist/", "MANIFEST", cpp_dir, "{}.egg-info".format(PACKAGE_NAME), license_file]
|
||||
to_be_removed += glob("./{}/{}.*".format(PACKAGE_DIR, dynamic_lib_name))
|
||||
for root, dirs, files in os.walk(os.curdir, topdown=False):
|
||||
if "__pycache__" in dirs:
|
||||
to_be_removed.append(path.join(root, "__pycache__"))
|
||||
to_be_removed += [f for f in files if f.endswith(".pyc")]
|
||||
|
||||
for f in to_be_removed:
|
||||
print("remove {}".format(f))
|
||||
if f == ".":
|
||||
continue
|
||||
elif path.isfile(f):
|
||||
os.remove(f)
|
||||
elif path.isdir(f):
|
||||
rmtree(f)
|
||||
|
||||
def main():
|
||||
if not path.exists(cpp_dir):
|
||||
create_cpp_source()
|
||||
|
||||
if not path.exists(license_file):
|
||||
copyfile(license_source, license_file)
|
||||
|
||||
with open("README") as f:
|
||||
long_description = f.read()
|
||||
|
||||
setup(
|
||||
name=PACKAGE_NAME,
|
||||
packages=[PACKAGE_DIR],
|
||||
version=VERSION,
|
||||
description="Python binding of LIBSVM",
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/plain",
|
||||
author="ML group @ National Taiwan University",
|
||||
author_email="cjlin@csie.ntu.edu.tw",
|
||||
url="https://www.csie.ntu.edu.tw/~cjlin/libsvm",
|
||||
license=license_name,
|
||||
install_requires=["scipy"],
|
||||
ext_modules=[
|
||||
Extension(
|
||||
"{}.{}".format(PACKAGE_DIR, dynamic_lib_name), **kwargs_for_extension
|
||||
)
|
||||
],
|
||||
cmdclass={"clean": CleanCommand},
|
||||
)
|
||||
|
||||
|
||||
main()
|
||||
|
BIN
libsvm-3.36/svm-predict
Executable file
BIN
libsvm-3.36/svm-predict
Executable file
Binary file not shown.
245
libsvm-3.36/svm-predict.c
Normal file
245
libsvm-3.36/svm-predict.c
Normal file
@@ -0,0 +1,245 @@
|
||||
#include <stdio.h>
|
||||
#include <ctype.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <errno.h>
|
||||
#include "svm.h"
|
||||
|
||||
int print_null(const char *s,...) {return 0;}
|
||||
|
||||
static int (*info)(const char *fmt,...) = &printf;
|
||||
|
||||
struct svm_node *x;
|
||||
int max_nr_attr = 64;
|
||||
|
||||
struct svm_model* model;
|
||||
int predict_probability=0;
|
||||
|
||||
static char *line = NULL;
|
||||
static int max_line_len;
|
||||
|
||||
static char* readline(FILE *input)
|
||||
{
|
||||
int len;
|
||||
|
||||
if(fgets(line,max_line_len,input) == NULL)
|
||||
return NULL;
|
||||
|
||||
while(strrchr(line,'\n') == NULL)
|
||||
{
|
||||
max_line_len *= 2;
|
||||
line = (char *) realloc(line,max_line_len);
|
||||
len = (int) strlen(line);
|
||||
if(fgets(line+len,max_line_len-len,input) == NULL)
|
||||
break;
|
||||
}
|
||||
return line;
|
||||
}
|
||||
|
||||
void exit_input_error(int line_num)
|
||||
{
|
||||
fprintf(stderr,"Wrong input format at line %d\n", line_num);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
void predict(FILE *input, FILE *output)
|
||||
{
|
||||
int correct = 0;
|
||||
int total = 0;
|
||||
double error = 0;
|
||||
double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;
|
||||
|
||||
int svm_type=svm_get_svm_type(model);
|
||||
int nr_class=svm_get_nr_class(model);
|
||||
double *prob_estimates=NULL;
|
||||
int j;
|
||||
|
||||
if(predict_probability)
|
||||
{
|
||||
if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
|
||||
info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
|
||||
else if(svm_type==ONE_CLASS)
|
||||
{
|
||||
// nr_class = 2 for ONE_CLASS
|
||||
prob_estimates = (double *) malloc(nr_class*sizeof(double));
|
||||
fprintf(output,"label normal outlier\n");
|
||||
}
|
||||
else
|
||||
{
|
||||
int *labels=(int *) malloc(nr_class*sizeof(int));
|
||||
svm_get_labels(model,labels);
|
||||
prob_estimates = (double *) malloc(nr_class*sizeof(double));
|
||||
fprintf(output,"labels");
|
||||
for(j=0;j<nr_class;j++)
|
||||
fprintf(output," %d",labels[j]);
|
||||
fprintf(output,"\n");
|
||||
free(labels);
|
||||
}
|
||||
}
|
||||
|
||||
max_line_len = 1024;
|
||||
line = (char *)malloc(max_line_len*sizeof(char));
|
||||
while(readline(input) != NULL)
|
||||
{
|
||||
int i = 0;
|
||||
double target_label, predict_label;
|
||||
char *idx, *val, *label, *endptr;
|
||||
int inst_max_index = -1; // strtol gives 0 if wrong format, and precomputed kernel has <index> start from 0
|
||||
|
||||
label = strtok(line," \t\n");
|
||||
if(label == NULL) // empty line
|
||||
exit_input_error(total+1);
|
||||
|
||||
target_label = strtod(label,&endptr);
|
||||
if(endptr == label || *endptr != '\0')
|
||||
exit_input_error(total+1);
|
||||
|
||||
while(1)
|
||||
{
|
||||
if(i>=max_nr_attr-1) // need one more for index = -1
|
||||
{
|
||||
max_nr_attr *= 2;
|
||||
x = (struct svm_node *) realloc(x,max_nr_attr*sizeof(struct svm_node));
|
||||
}
|
||||
|
||||
idx = strtok(NULL,":");
|
||||
val = strtok(NULL," \t");
|
||||
|
||||
if(val == NULL)
|
||||
break;
|
||||
errno = 0;
|
||||
x[i].index = (int) strtol(idx,&endptr,10);
|
||||
if(endptr == idx || errno != 0 || *endptr != '\0' || x[i].index <= inst_max_index)
|
||||
exit_input_error(total+1);
|
||||
else
|
||||
inst_max_index = x[i].index;
|
||||
|
||||
errno = 0;
|
||||
x[i].value = strtod(val,&endptr);
|
||||
if(endptr == val || errno != 0 || (*endptr != '\0' && !isspace(*endptr)))
|
||||
exit_input_error(total+1);
|
||||
|
||||
++i;
|
||||
}
|
||||
x[i].index = -1;
|
||||
|
||||
if (predict_probability && (svm_type==C_SVC || svm_type==NU_SVC || svm_type==ONE_CLASS))
|
||||
{
|
||||
predict_label = svm_predict_probability(model,x,prob_estimates);
|
||||
fprintf(output,"%g",predict_label);
|
||||
for(j=0;j<nr_class;j++)
|
||||
fprintf(output," %g",prob_estimates[j]);
|
||||
fprintf(output,"\n");
|
||||
}
|
||||
else
|
||||
{
|
||||
predict_label = svm_predict(model,x);
|
||||
fprintf(output,"%.17g\n",predict_label);
|
||||
}
|
||||
|
||||
if(predict_label == target_label)
|
||||
++correct;
|
||||
error += (predict_label-target_label)*(predict_label-target_label);
|
||||
sump += predict_label;
|
||||
sumt += target_label;
|
||||
sumpp += predict_label*predict_label;
|
||||
sumtt += target_label*target_label;
|
||||
sumpt += predict_label*target_label;
|
||||
++total;
|
||||
}
|
||||
if (svm_type==NU_SVR || svm_type==EPSILON_SVR)
|
||||
{
|
||||
info("Mean squared error = %g (regression)\n",error/total);
|
||||
info("Squared correlation coefficient = %g (regression)\n",
|
||||
((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
|
||||
((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt))
|
||||
);
|
||||
}
|
||||
else
|
||||
info("Accuracy = %g%% (%d/%d) (classification)\n",
|
||||
(double)correct/total*100,correct,total);
|
||||
if(predict_probability)
|
||||
free(prob_estimates);
|
||||
}
|
||||
|
||||
void exit_with_help()
|
||||
{
|
||||
printf(
|
||||
"Usage: svm-predict [options] test_file model_file output_file\n"
|
||||
"options:\n"
|
||||
"-b probability_estimates: whether to predict probability estimates, 0 or 1 (default 0); for one-class SVM only 0 is supported\n"
|
||||
"-q : quiet mode (no outputs)\n"
|
||||
);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
FILE *input, *output;
|
||||
int i;
|
||||
// parse options
|
||||
for(i=1;i<argc;i++)
|
||||
{
|
||||
if(argv[i][0] != '-') break;
|
||||
++i;
|
||||
switch(argv[i-1][1])
|
||||
{
|
||||
case 'b':
|
||||
predict_probability = atoi(argv[i]);
|
||||
break;
|
||||
case 'q':
|
||||
info = &print_null;
|
||||
i--;
|
||||
break;
|
||||
default:
|
||||
fprintf(stderr,"Unknown option: -%c\n", argv[i-1][1]);
|
||||
exit_with_help();
|
||||
}
|
||||
}
|
||||
|
||||
if(i>=argc-2)
|
||||
exit_with_help();
|
||||
|
||||
input = fopen(argv[i],"r");
|
||||
if(input == NULL)
|
||||
{
|
||||
fprintf(stderr,"can't open input file %s\n",argv[i]);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
output = fopen(argv[i+2],"w");
|
||||
if(output == NULL)
|
||||
{
|
||||
fprintf(stderr,"can't open output file %s\n",argv[i+2]);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if((model=svm_load_model(argv[i+1]))==0)
|
||||
{
|
||||
fprintf(stderr,"can't open model file %s\n",argv[i+1]);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
x = (struct svm_node *) malloc(max_nr_attr*sizeof(struct svm_node));
|
||||
if(predict_probability)
|
||||
{
|
||||
if(svm_check_probability_model(model)==0)
|
||||
{
|
||||
fprintf(stderr,"Model does not support probabiliy estimates\n");
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if(svm_check_probability_model(model)!=0)
|
||||
info("Model supports probability estimates, but disabled in prediction.\n");
|
||||
}
|
||||
|
||||
predict(input,output);
|
||||
svm_free_and_destroy_model(&model);
|
||||
free(x);
|
||||
free(line);
|
||||
fclose(input);
|
||||
fclose(output);
|
||||
return 0;
|
||||
}
|
BIN
libsvm-3.36/svm-scale
Executable file
BIN
libsvm-3.36/svm-scale
Executable file
Binary file not shown.
405
libsvm-3.36/svm-scale.c
Normal file
405
libsvm-3.36/svm-scale.c
Normal file
@@ -0,0 +1,405 @@
|
||||
#include <float.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <ctype.h>
|
||||
#include <string.h>
|
||||
|
||||
void exit_with_help()
|
||||
{
|
||||
printf(
|
||||
"Usage: svm-scale [options] data_filename\n"
|
||||
"options:\n"
|
||||
"-l lower : x scaling lower limit (default -1)\n"
|
||||
"-u upper : x scaling upper limit (default +1)\n"
|
||||
"-y y_lower y_upper : y scaling limits (default: no y scaling)\n"
|
||||
"-s save_filename : save scaling parameters to save_filename\n"
|
||||
"-r restore_filename : restore scaling parameters from restore_filename\n"
|
||||
);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
char *line = NULL;
|
||||
int max_line_len = 1024;
|
||||
double lower=-1.0,upper=1.0,y_lower,y_upper;
|
||||
int y_scaling = 0;
|
||||
double *feature_max;
|
||||
double *feature_min;
|
||||
double y_max = -DBL_MAX;
|
||||
double y_min = DBL_MAX;
|
||||
int max_index;
|
||||
int min_index;
|
||||
long int num_nonzeros = 0;
|
||||
long int new_num_nonzeros = 0;
|
||||
|
||||
#define max(x,y) (((x)>(y))?(x):(y))
|
||||
#define min(x,y) (((x)<(y))?(x):(y))
|
||||
|
||||
void output_target(double value);
|
||||
void output(int index, double value);
|
||||
char* readline(FILE *input);
|
||||
int clean_up(FILE *fp_restore, FILE *fp, const char *msg);
|
||||
|
||||
int main(int argc,char **argv)
|
||||
{
|
||||
int i,index;
|
||||
FILE *fp, *fp_restore = NULL;
|
||||
char *save_filename = NULL;
|
||||
char *restore_filename = NULL;
|
||||
|
||||
for(i=1;i<argc;i++)
|
||||
{
|
||||
if(argv[i][0] != '-') break;
|
||||
++i;
|
||||
switch(argv[i-1][1])
|
||||
{
|
||||
case 'l': lower = atof(argv[i]); break;
|
||||
case 'u': upper = atof(argv[i]); break;
|
||||
case 'y':
|
||||
y_lower = atof(argv[i]);
|
||||
++i;
|
||||
y_upper = atof(argv[i]);
|
||||
y_scaling = 1;
|
||||
break;
|
||||
case 's': save_filename = argv[i]; break;
|
||||
case 'r': restore_filename = argv[i]; break;
|
||||
default:
|
||||
fprintf(stderr,"unknown option\n");
|
||||
exit_with_help();
|
||||
}
|
||||
}
|
||||
|
||||
if(!(upper > lower) || (y_scaling && !(y_upper > y_lower)))
|
||||
{
|
||||
fprintf(stderr,"inconsistent lower/upper specification\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if(restore_filename && save_filename)
|
||||
{
|
||||
fprintf(stderr,"cannot use -r and -s simultaneously\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if(argc != i+1)
|
||||
exit_with_help();
|
||||
|
||||
fp=fopen(argv[i],"r");
|
||||
|
||||
if(fp==NULL)
|
||||
{
|
||||
fprintf(stderr,"can't open file %s\n", argv[i]);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
line = (char *) malloc(max_line_len*sizeof(char));
|
||||
|
||||
#define SKIP_TARGET\
|
||||
while(isspace(*p)) ++p;\
|
||||
while(!isspace(*p)) ++p;
|
||||
|
||||
#define SKIP_ELEMENT\
|
||||
while(*p!=':') ++p;\
|
||||
++p;\
|
||||
while(isspace(*p)) ++p;\
|
||||
while(*p && !isspace(*p)) ++p;
|
||||
|
||||
/* assumption: min index of attributes is 1 */
|
||||
/* pass 1: find out max index of attributes */
|
||||
max_index = 0;
|
||||
min_index = 1;
|
||||
|
||||
if(restore_filename)
|
||||
{
|
||||
int idx, c;
|
||||
|
||||
fp_restore = fopen(restore_filename,"r");
|
||||
if(fp_restore==NULL)
|
||||
{
|
||||
fprintf(stderr,"can't open file %s\n", restore_filename);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
c = fgetc(fp_restore);
|
||||
if(c == 'y')
|
||||
{
|
||||
readline(fp_restore);
|
||||
readline(fp_restore);
|
||||
readline(fp_restore);
|
||||
}
|
||||
readline(fp_restore);
|
||||
readline(fp_restore);
|
||||
|
||||
while(fscanf(fp_restore,"%d %*f %*f\n",&idx) == 1)
|
||||
max_index = max(idx,max_index);
|
||||
rewind(fp_restore);
|
||||
}
|
||||
|
||||
while(readline(fp)!=NULL)
|
||||
{
|
||||
char *p=line;
|
||||
|
||||
SKIP_TARGET
|
||||
|
||||
while(sscanf(p,"%d:%*f",&index)==1)
|
||||
{
|
||||
max_index = max(max_index, index);
|
||||
min_index = min(min_index, index);
|
||||
SKIP_ELEMENT
|
||||
num_nonzeros++;
|
||||
}
|
||||
}
|
||||
|
||||
if(min_index < 1)
|
||||
fprintf(stderr,
|
||||
"WARNING: minimal feature index is %d, but indices should start from 1\n", min_index);
|
||||
|
||||
rewind(fp);
|
||||
|
||||
feature_max = (double *)malloc((max_index+1)* sizeof(double));
|
||||
feature_min = (double *)malloc((max_index+1)* sizeof(double));
|
||||
|
||||
if(feature_max == NULL || feature_min == NULL)
|
||||
{
|
||||
fprintf(stderr,"can't allocate enough memory\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
for(i=0;i<=max_index;i++)
|
||||
{
|
||||
feature_max[i]=-DBL_MAX;
|
||||
feature_min[i]=DBL_MAX;
|
||||
}
|
||||
|
||||
/* pass 2: find out min/max value */
|
||||
while(readline(fp)!=NULL)
|
||||
{
|
||||
char *p=line;
|
||||
int next_index=1;
|
||||
double target;
|
||||
double value;
|
||||
|
||||
if (sscanf(p,"%lf",&target) != 1)
|
||||
return clean_up(fp_restore, fp, "ERROR: failed to read labels\n");
|
||||
y_max = max(y_max,target);
|
||||
y_min = min(y_min,target);
|
||||
|
||||
SKIP_TARGET
|
||||
|
||||
while(sscanf(p,"%d:%lf",&index,&value)==2)
|
||||
{
|
||||
for(i=next_index;i<index;i++)
|
||||
{
|
||||
feature_max[i]=max(feature_max[i],0);
|
||||
feature_min[i]=min(feature_min[i],0);
|
||||
}
|
||||
|
||||
feature_max[index]=max(feature_max[index],value);
|
||||
feature_min[index]=min(feature_min[index],value);
|
||||
|
||||
SKIP_ELEMENT
|
||||
next_index=index+1;
|
||||
}
|
||||
|
||||
for(i=next_index;i<=max_index;i++)
|
||||
{
|
||||
feature_max[i]=max(feature_max[i],0);
|
||||
feature_min[i]=min(feature_min[i],0);
|
||||
}
|
||||
}
|
||||
|
||||
rewind(fp);
|
||||
|
||||
/* pass 2.5: save/restore feature_min/feature_max */
|
||||
|
||||
if(restore_filename)
|
||||
{
|
||||
/* fp_restore rewinded in finding max_index */
|
||||
int idx, c;
|
||||
double fmin, fmax;
|
||||
int next_index = 1;
|
||||
|
||||
if((c = fgetc(fp_restore)) == 'y')
|
||||
{
|
||||
if(fscanf(fp_restore, "%lf %lf\n", &y_lower, &y_upper) != 2 ||
|
||||
fscanf(fp_restore, "%lf %lf\n", &y_min, &y_max) != 2)
|
||||
return clean_up(fp_restore, fp, "ERROR: failed to read scaling parameters\n");
|
||||
y_scaling = 1;
|
||||
}
|
||||
else
|
||||
ungetc(c, fp_restore);
|
||||
|
||||
if (fgetc(fp_restore) == 'x')
|
||||
{
|
||||
if(fscanf(fp_restore, "%lf %lf\n", &lower, &upper) != 2)
|
||||
return clean_up(fp_restore, fp, "ERROR: failed to read scaling parameters\n");
|
||||
while(fscanf(fp_restore,"%d %lf %lf\n",&idx,&fmin,&fmax)==3)
|
||||
{
|
||||
for(i = next_index;i<idx;i++)
|
||||
if(feature_min[i] != feature_max[i])
|
||||
{
|
||||
fprintf(stderr,
|
||||
"WARNING: feature index %d appeared in file %s was not seen in the scaling factor file %s. The feature is scaled to 0.\n",
|
||||
i, argv[argc-1], restore_filename);
|
||||
feature_min[i] = 0;
|
||||
feature_max[i] = 0;
|
||||
}
|
||||
|
||||
feature_min[idx] = fmin;
|
||||
feature_max[idx] = fmax;
|
||||
|
||||
next_index = idx + 1;
|
||||
}
|
||||
|
||||
for(i=next_index;i<=max_index;i++)
|
||||
if(feature_min[i] != feature_max[i])
|
||||
{
|
||||
fprintf(stderr,
|
||||
"WARNING: feature index %d appeared in file %s was not seen in the scaling factor file %s. The feature is scaled to 0.\n",
|
||||
i, argv[argc-1], restore_filename);
|
||||
feature_min[i] = 0;
|
||||
feature_max[i] = 0;
|
||||
}
|
||||
}
|
||||
fclose(fp_restore);
|
||||
}
|
||||
|
||||
if(save_filename)
|
||||
{
|
||||
FILE *fp_save = fopen(save_filename,"w");
|
||||
if(fp_save==NULL)
|
||||
{
|
||||
fprintf(stderr,"can't open file %s\n", save_filename);
|
||||
exit(1);
|
||||
}
|
||||
if(y_scaling)
|
||||
{
|
||||
fprintf(fp_save, "y\n");
|
||||
fprintf(fp_save, "%.17g %.17g\n", y_lower, y_upper);
|
||||
fprintf(fp_save, "%.17g %.17g\n", y_min, y_max);
|
||||
}
|
||||
fprintf(fp_save, "x\n");
|
||||
fprintf(fp_save, "%.17g %.17g\n", lower, upper);
|
||||
for(i=1;i<=max_index;i++)
|
||||
{
|
||||
if(feature_min[i]!=feature_max[i])
|
||||
fprintf(fp_save,"%d %.17g %.17g\n",i,feature_min[i],feature_max[i]);
|
||||
}
|
||||
|
||||
if(min_index < 1)
|
||||
fprintf(stderr,
|
||||
"WARNING: scaling factors with indices smaller than 1 are not stored to the file %s.\n", save_filename);
|
||||
|
||||
fclose(fp_save);
|
||||
}
|
||||
|
||||
/* pass 3: scale */
|
||||
while(readline(fp)!=NULL)
|
||||
{
|
||||
char *p=line;
|
||||
int next_index=1;
|
||||
double target;
|
||||
double value;
|
||||
|
||||
if (sscanf(p,"%lf",&target) != 1)
|
||||
return clean_up(NULL, fp, "ERROR: failed to read labels\n");
|
||||
output_target(target);
|
||||
|
||||
SKIP_TARGET
|
||||
|
||||
while(sscanf(p,"%d:%lf",&index,&value)==2)
|
||||
{
|
||||
for(i=next_index;i<index;i++)
|
||||
output(i,0);
|
||||
|
||||
output(index,value);
|
||||
|
||||
SKIP_ELEMENT
|
||||
next_index=index+1;
|
||||
}
|
||||
|
||||
for(i=next_index;i<=max_index;i++)
|
||||
output(i,0);
|
||||
|
||||
printf("\n");
|
||||
}
|
||||
|
||||
if (new_num_nonzeros > num_nonzeros)
|
||||
fprintf(stderr,
|
||||
"WARNING: original #nonzeros %ld\n"
|
||||
" > new #nonzeros %ld\n"
|
||||
"If feature values are non-negative and sparse, use -l 0 rather than the default -l -1\n",
|
||||
num_nonzeros, new_num_nonzeros);
|
||||
|
||||
free(line);
|
||||
free(feature_max);
|
||||
free(feature_min);
|
||||
fclose(fp);
|
||||
return 0;
|
||||
}
|
||||
|
||||
char* readline(FILE *input)
|
||||
{
|
||||
int len;
|
||||
|
||||
if(fgets(line,max_line_len,input) == NULL)
|
||||
return NULL;
|
||||
|
||||
while(strrchr(line,'\n') == NULL)
|
||||
{
|
||||
max_line_len *= 2;
|
||||
line = (char *) realloc(line, max_line_len);
|
||||
len = (int) strlen(line);
|
||||
if(fgets(line+len,max_line_len-len,input) == NULL)
|
||||
break;
|
||||
}
|
||||
return line;
|
||||
}
|
||||
|
||||
void output_target(double value)
|
||||
{
|
||||
if(y_scaling)
|
||||
{
|
||||
if(value == y_min)
|
||||
value = y_lower;
|
||||
else if(value == y_max)
|
||||
value = y_upper;
|
||||
else value = y_lower + (y_upper-y_lower) *
|
||||
(value - y_min)/(y_max-y_min);
|
||||
}
|
||||
printf("%.17g ",value);
|
||||
}
|
||||
|
||||
void output(int index, double value)
|
||||
{
|
||||
/* skip single-valued attribute */
|
||||
if(feature_max[index] == feature_min[index])
|
||||
return;
|
||||
|
||||
if(value == feature_min[index])
|
||||
value = lower;
|
||||
else if(value == feature_max[index])
|
||||
value = upper;
|
||||
else
|
||||
value = lower + (upper-lower) *
|
||||
(value-feature_min[index])/
|
||||
(feature_max[index]-feature_min[index]);
|
||||
|
||||
if(value != 0)
|
||||
{
|
||||
printf("%d:%g ",index, value);
|
||||
new_num_nonzeros++;
|
||||
}
|
||||
}
|
||||
|
||||
int clean_up(FILE *fp_restore, FILE *fp, const char* msg)
|
||||
{
|
||||
fprintf(stderr, "%s", msg);
|
||||
free(line);
|
||||
free(feature_max);
|
||||
free(feature_min);
|
||||
fclose(fp);
|
||||
if (fp_restore)
|
||||
fclose(fp_restore);
|
||||
return -1;
|
||||
}
|
||||
|
20
libsvm-3.36/svm-toy/qt/Makefile
Normal file
20
libsvm-3.36/svm-toy/qt/Makefile
Normal file
@@ -0,0 +1,20 @@
|
||||
# use ``export QT_SELECT=qt5'' in a command window for using qt5
|
||||
# may need to adjust the path of header files
|
||||
CXX? = g++
|
||||
#INCLUDE = /usr/include/x86_64-linux-gnu/qt5
|
||||
INCLUDE = /usr/include/qt5
|
||||
CFLAGS = -Wall -O3 -I$(INCLUDE) -I$(INCLUDE)/QtWidgets -I$(INCLUDE)/QtGui -I$(INCLUDE)/QtCore -fPIC -std=c++11
|
||||
LIB = -lQt5Widgets -lQt5Gui -lQt5Core
|
||||
MOC = /usr/bin/moc-qt4
|
||||
|
||||
svm-toy: svm-toy.cpp svm-toy.moc ../../svm.o
|
||||
$(CXX) $(CFLAGS) svm-toy.cpp ../../svm.o -o svm-toy $(LIB)
|
||||
|
||||
svm-toy.moc: svm-toy.cpp
|
||||
$(MOC) svm-toy.cpp -o svm-toy.moc
|
||||
|
||||
../../svm.o: ../../svm.cpp ../../svm.h
|
||||
make -C ../.. svm.o
|
||||
|
||||
clean:
|
||||
rm -f *~ svm-toy svm-toy.moc ../../svm.o
|
437
libsvm-3.36/svm-toy/qt/svm-toy.cpp
Normal file
437
libsvm-3.36/svm-toy/qt/svm-toy.cpp
Normal file
@@ -0,0 +1,437 @@
|
||||
#include <QtWidgets>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
#include <list>
|
||||
#include "../../svm.h"
|
||||
using namespace std;
|
||||
|
||||
#define DEFAULT_PARAM "-t 2 -c 100"
|
||||
#define XLEN 500
|
||||
#define YLEN 500
|
||||
|
||||
QRgb colors[] =
|
||||
{
|
||||
qRgb(0,0,0),
|
||||
qRgb(0,120,120),
|
||||
qRgb(120,120,0),
|
||||
qRgb(120,0,120),
|
||||
qRgb(0,200,200),
|
||||
qRgb(200,200,0),
|
||||
qRgb(200,0,200)
|
||||
};
|
||||
|
||||
class SvmToyWindow : public QWidget
|
||||
{
|
||||
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
SvmToyWindow();
|
||||
~SvmToyWindow();
|
||||
protected:
|
||||
virtual void mousePressEvent( QMouseEvent* );
|
||||
virtual void paintEvent( QPaintEvent* );
|
||||
|
||||
private:
|
||||
QPixmap buffer;
|
||||
QPixmap icon1;
|
||||
QPixmap icon2;
|
||||
QPixmap icon3;
|
||||
QPushButton button_change_icon;
|
||||
QPushButton button_run;
|
||||
QPushButton button_clear;
|
||||
QPushButton button_save;
|
||||
QPushButton button_load;
|
||||
QLineEdit input_line;
|
||||
QPainter buffer_painter;
|
||||
struct point {
|
||||
double x, y;
|
||||
signed char value;
|
||||
};
|
||||
list<point> point_list;
|
||||
int current_value;
|
||||
const QPixmap& choose_icon(int v)
|
||||
{
|
||||
if(v==1) return icon1;
|
||||
else if(v==2) return icon2;
|
||||
else return icon3;
|
||||
}
|
||||
void clear_all()
|
||||
{
|
||||
point_list.clear();
|
||||
buffer.fill(Qt::black);
|
||||
repaint();
|
||||
}
|
||||
void draw_point(const point& p)
|
||||
{
|
||||
const QPixmap& icon = choose_icon(p.value);
|
||||
buffer_painter.drawPixmap((int)(p.x*XLEN),(int)(p.y*YLEN),icon);
|
||||
repaint();
|
||||
}
|
||||
void draw_all_points()
|
||||
{
|
||||
for(list<point>::iterator p = point_list.begin(); p != point_list.end();p++)
|
||||
draw_point(*p);
|
||||
}
|
||||
private slots:
|
||||
void button_change_icon_clicked()
|
||||
{
|
||||
++current_value;
|
||||
if(current_value > 3) current_value = 1;
|
||||
button_change_icon.setIcon(choose_icon(current_value));
|
||||
}
|
||||
void button_run_clicked()
|
||||
{
|
||||
// guard
|
||||
if(point_list.empty()) return;
|
||||
|
||||
svm_parameter param;
|
||||
int i,j;
|
||||
|
||||
// default values
|
||||
param.svm_type = C_SVC;
|
||||
param.kernel_type = RBF;
|
||||
param.degree = 3;
|
||||
param.gamma = 0;
|
||||
param.coef0 = 0;
|
||||
param.nu = 0.5;
|
||||
param.cache_size = 100;
|
||||
param.C = 1;
|
||||
param.eps = 1e-3;
|
||||
param.p = 0.1;
|
||||
param.shrinking = 1;
|
||||
param.probability = 0;
|
||||
param.nr_weight = 0;
|
||||
param.weight_label = NULL;
|
||||
param.weight = NULL;
|
||||
|
||||
// parse options
|
||||
const char *p = input_line.text().toLatin1().constData();
|
||||
|
||||
while (1) {
|
||||
while (*p && *p != '-')
|
||||
p++;
|
||||
|
||||
if (*p == '\0')
|
||||
break;
|
||||
|
||||
p++;
|
||||
switch (*p++) {
|
||||
case 's':
|
||||
param.svm_type = atoi(p);
|
||||
break;
|
||||
case 't':
|
||||
param.kernel_type = atoi(p);
|
||||
break;
|
||||
case 'd':
|
||||
param.degree = atoi(p);
|
||||
break;
|
||||
case 'g':
|
||||
param.gamma = atof(p);
|
||||
break;
|
||||
case 'r':
|
||||
param.coef0 = atof(p);
|
||||
break;
|
||||
case 'n':
|
||||
param.nu = atof(p);
|
||||
break;
|
||||
case 'm':
|
||||
param.cache_size = atof(p);
|
||||
break;
|
||||
case 'c':
|
||||
param.C = atof(p);
|
||||
break;
|
||||
case 'e':
|
||||
param.eps = atof(p);
|
||||
break;
|
||||
case 'p':
|
||||
param.p = atof(p);
|
||||
break;
|
||||
case 'h':
|
||||
param.shrinking = atoi(p);
|
||||
break;
|
||||
case 'b':
|
||||
param.probability = atoi(p);
|
||||
break;
|
||||
case 'w':
|
||||
++param.nr_weight;
|
||||
param.weight_label = (int *)realloc(param.weight_label,sizeof(int)*param.nr_weight);
|
||||
param.weight = (double *)realloc(param.weight,sizeof(double)*param.nr_weight);
|
||||
param.weight_label[param.nr_weight-1] = atoi(p);
|
||||
while(*p && !isspace(*p)) ++p;
|
||||
param.weight[param.nr_weight-1] = atof(p);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// build problem
|
||||
svm_problem prob;
|
||||
|
||||
prob.l = point_list.size();
|
||||
prob.y = new double[prob.l];
|
||||
|
||||
if(param.kernel_type == PRECOMPUTED)
|
||||
{
|
||||
}
|
||||
else if(param.svm_type == EPSILON_SVR ||
|
||||
param.svm_type == NU_SVR)
|
||||
{
|
||||
if(param.gamma == 0) param.gamma = 1;
|
||||
svm_node *x_space = new svm_node[2 * prob.l];
|
||||
prob.x = new svm_node *[prob.l];
|
||||
|
||||
i = 0;
|
||||
for (list <point>::iterator q = point_list.begin(); q != point_list.end(); q++, i++)
|
||||
{
|
||||
x_space[2 * i].index = 1;
|
||||
x_space[2 * i].value = q->x;
|
||||
x_space[2 * i + 1].index = -1;
|
||||
prob.x[i] = &x_space[2 * i];
|
||||
prob.y[i] = q->y;
|
||||
}
|
||||
|
||||
// build model & classify
|
||||
svm_model *model = svm_train(&prob, ¶m);
|
||||
svm_node x[2];
|
||||
x[0].index = 1;
|
||||
x[1].index = -1;
|
||||
int *j = new int[XLEN];
|
||||
|
||||
for (i = 0; i < XLEN; i++)
|
||||
{
|
||||
x[0].value = (double) i / XLEN;
|
||||
j[i] = (int)(YLEN*svm_predict(model, x));
|
||||
}
|
||||
|
||||
buffer_painter.setPen(colors[0]);
|
||||
buffer_painter.drawLine(0,0,0,YLEN-1);
|
||||
|
||||
int p = (int)(param.p * YLEN);
|
||||
for(i = 1; i < XLEN; i++)
|
||||
{
|
||||
buffer_painter.setPen(colors[0]);
|
||||
buffer_painter.drawLine(i,0,i,YLEN-1);
|
||||
|
||||
buffer_painter.setPen(colors[5]);
|
||||
buffer_painter.drawLine(i-1,j[i-1],i,j[i]);
|
||||
|
||||
if(param.svm_type == EPSILON_SVR)
|
||||
{
|
||||
buffer_painter.setPen(colors[2]);
|
||||
buffer_painter.drawLine(i-1,j[i-1]+p,i,j[i]+p);
|
||||
|
||||
buffer_painter.setPen(colors[2]);
|
||||
buffer_painter.drawLine(i-1,j[i-1]-p,i,j[i]-p);
|
||||
}
|
||||
}
|
||||
|
||||
svm_free_and_destroy_model(&model);
|
||||
delete[] j;
|
||||
delete[] x_space;
|
||||
delete[] prob.x;
|
||||
delete[] prob.y;
|
||||
}
|
||||
else
|
||||
{
|
||||
if(param.gamma == 0) param.gamma = 0.5;
|
||||
svm_node *x_space = new svm_node[3 * prob.l];
|
||||
prob.x = new svm_node *[prob.l];
|
||||
|
||||
i = 0;
|
||||
for (list <point>::iterator q = point_list.begin(); q != point_list.end(); q++, i++)
|
||||
{
|
||||
x_space[3 * i].index = 1;
|
||||
x_space[3 * i].value = q->x;
|
||||
x_space[3 * i + 1].index = 2;
|
||||
x_space[3 * i + 1].value = q->y;
|
||||
x_space[3 * i + 2].index = -1;
|
||||
prob.x[i] = &x_space[3 * i];
|
||||
prob.y[i] = q->value;
|
||||
}
|
||||
|
||||
// build model & classify
|
||||
svm_model *model = svm_train(&prob, ¶m);
|
||||
svm_node x[3];
|
||||
x[0].index = 1;
|
||||
x[1].index = 2;
|
||||
x[2].index = -1;
|
||||
|
||||
for (i = 0; i < XLEN; i++)
|
||||
for (j = 0; j < YLEN ; j++) {
|
||||
x[0].value = (double) i / XLEN;
|
||||
x[1].value = (double) j / YLEN;
|
||||
double d = svm_predict(model, x);
|
||||
if (param.svm_type == ONE_CLASS && d<0) d=2;
|
||||
buffer_painter.setPen(colors[(int)d]);
|
||||
buffer_painter.drawPoint(i,j);
|
||||
}
|
||||
|
||||
svm_free_and_destroy_model(&model);
|
||||
delete[] x_space;
|
||||
delete[] prob.x;
|
||||
delete[] prob.y;
|
||||
}
|
||||
free(param.weight_label);
|
||||
free(param.weight);
|
||||
draw_all_points();
|
||||
}
|
||||
void button_clear_clicked()
|
||||
{
|
||||
clear_all();
|
||||
}
|
||||
void button_save_clicked()
|
||||
{
|
||||
QString filename = QFileDialog::getSaveFileName();
|
||||
if(!filename.isNull())
|
||||
{
|
||||
FILE *fp = fopen(filename.toLatin1().constData(),"w");
|
||||
|
||||
const char *p = input_line.text().toLatin1().constData();
|
||||
const char* svm_type_str = strstr(p, "-s ");
|
||||
int svm_type = C_SVC;
|
||||
if(svm_type_str != NULL)
|
||||
sscanf(svm_type_str, "-s %d", &svm_type);
|
||||
|
||||
if(fp)
|
||||
{
|
||||
if(svm_type == EPSILON_SVR || svm_type == NU_SVR)
|
||||
{
|
||||
for(list<point>::iterator p = point_list.begin(); p != point_list.end();p++)
|
||||
fprintf(fp,"%f 1:%f\n", p->y, p->x);
|
||||
}
|
||||
else
|
||||
{
|
||||
for(list<point>::iterator p = point_list.begin(); p != point_list.end();p++)
|
||||
fprintf(fp,"%d 1:%f 2:%f\n", p->value, p->x, p->y);
|
||||
}
|
||||
fclose(fp);
|
||||
}
|
||||
}
|
||||
}
|
||||
void button_load_clicked()
|
||||
{
|
||||
QString filename = QFileDialog::getOpenFileName();
|
||||
if(!filename.isNull())
|
||||
{
|
||||
FILE *fp = fopen(filename.toLatin1().constData(),"r");
|
||||
if(fp)
|
||||
{
|
||||
clear_all();
|
||||
char buf[4096];
|
||||
while(fgets(buf,sizeof(buf),fp))
|
||||
{
|
||||
int v;
|
||||
double x,y;
|
||||
if(sscanf(buf,"%d%*d:%lf%*d:%lf",&v,&x,&y)==3)
|
||||
{
|
||||
point p = {x,y,v};
|
||||
point_list.push_back(p);
|
||||
}
|
||||
else if(sscanf(buf,"%lf%*d:%lf",&y,&x)==2)
|
||||
{
|
||||
point p = {x,y,current_value};
|
||||
point_list.push_back(p);
|
||||
}
|
||||
else
|
||||
break;
|
||||
}
|
||||
fclose(fp);
|
||||
draw_all_points();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
#include "svm-toy.moc"
|
||||
|
||||
SvmToyWindow::SvmToyWindow()
|
||||
:button_change_icon(this)
|
||||
,button_run("Run",this)
|
||||
,button_clear("Clear",this)
|
||||
,button_save("Save",this)
|
||||
,button_load("Load",this)
|
||||
,input_line(this)
|
||||
,current_value(1)
|
||||
{
|
||||
buffer = QPixmap(XLEN,YLEN);
|
||||
buffer.fill(Qt::black);
|
||||
|
||||
buffer_painter.begin(&buffer);
|
||||
|
||||
QObject::connect(&button_change_icon, SIGNAL(clicked()), this,
|
||||
SLOT(button_change_icon_clicked()));
|
||||
QObject::connect(&button_run, SIGNAL(clicked()), this,
|
||||
SLOT(button_run_clicked()));
|
||||
QObject::connect(&button_clear, SIGNAL(clicked()), this,
|
||||
SLOT(button_clear_clicked()));
|
||||
QObject::connect(&button_save, SIGNAL(clicked()), this,
|
||||
SLOT(button_save_clicked()));
|
||||
QObject::connect(&button_load, SIGNAL(clicked()), this,
|
||||
SLOT(button_load_clicked()));
|
||||
QObject::connect(&input_line, SIGNAL(returnPressed()), this,
|
||||
SLOT(button_run_clicked()));
|
||||
|
||||
// don't blank the window before repainting
|
||||
setAttribute(Qt::WA_NoBackground);
|
||||
|
||||
icon1 = QPixmap(4,4);
|
||||
icon2 = QPixmap(4,4);
|
||||
icon3 = QPixmap(4,4);
|
||||
|
||||
|
||||
QPainter painter;
|
||||
painter.begin(&icon1);
|
||||
painter.fillRect(0,0,4,4,QBrush(colors[4]));
|
||||
painter.end();
|
||||
|
||||
painter.begin(&icon2);
|
||||
painter.fillRect(0,0,4,4,QBrush(colors[5]));
|
||||
painter.end();
|
||||
|
||||
painter.begin(&icon3);
|
||||
painter.fillRect(0,0,4,4,QBrush(colors[6]));
|
||||
painter.end();
|
||||
|
||||
button_change_icon.setGeometry( 0, YLEN, 50, 25 );
|
||||
button_run.setGeometry( 50, YLEN, 50, 25 );
|
||||
button_clear.setGeometry( 100, YLEN, 50, 25 );
|
||||
button_save.setGeometry( 150, YLEN, 50, 25);
|
||||
button_load.setGeometry( 200, YLEN, 50, 25);
|
||||
input_line.setGeometry( 250, YLEN, 250, 25);
|
||||
|
||||
input_line.setText(DEFAULT_PARAM);
|
||||
button_change_icon.setIcon(icon1);
|
||||
}
|
||||
|
||||
SvmToyWindow::~SvmToyWindow()
|
||||
{
|
||||
buffer_painter.end();
|
||||
}
|
||||
|
||||
void SvmToyWindow::mousePressEvent( QMouseEvent* event )
|
||||
{
|
||||
point p = {(double)event->x()/XLEN, (double)event->y()/YLEN, current_value};
|
||||
point_list.push_back(p);
|
||||
draw_point(p);
|
||||
}
|
||||
|
||||
void SvmToyWindow::paintEvent( QPaintEvent* )
|
||||
{
|
||||
// copy the image from the buffer pixmap to the window
|
||||
QPainter p(this);
|
||||
p.drawPixmap(0, 0, buffer);
|
||||
}
|
||||
|
||||
int main( int argc, char* argv[] )
|
||||
{
|
||||
QApplication myapp( argc, argv );
|
||||
|
||||
SvmToyWindow* mywidget = new SvmToyWindow();
|
||||
mywidget->setGeometry( 100, 100, XLEN, YLEN+25 );
|
||||
|
||||
mywidget->show();
|
||||
return myapp.exec();
|
||||
}
|
102
libsvm-3.36/svm-toy/qt/svm-toy.moc
Normal file
102
libsvm-3.36/svm-toy/qt/svm-toy.moc
Normal file
@@ -0,0 +1,102 @@
|
||||
/****************************************************************************
|
||||
** Meta object code from reading C++ file 'svm-toy.cpp'
|
||||
**
|
||||
** Created by: The Qt Meta Object Compiler version 63 (Qt 4.8.7)
|
||||
**
|
||||
** WARNING! All changes made in this file will be lost!
|
||||
*****************************************************************************/
|
||||
|
||||
#if !defined(Q_MOC_OUTPUT_REVISION)
|
||||
#error "The header file 'svm-toy.cpp' doesn't include <QObject>."
|
||||
#elif Q_MOC_OUTPUT_REVISION != 63
|
||||
#error "This file was generated using the moc from 4.8.7. It"
|
||||
#error "cannot be used with the include files from this version of Qt."
|
||||
#error "(The moc has changed too much.)"
|
||||
#endif
|
||||
|
||||
QT_BEGIN_MOC_NAMESPACE
|
||||
static const uint qt_meta_data_SvmToyWindow[] = {
|
||||
|
||||
// content:
|
||||
6, // revision
|
||||
0, // classname
|
||||
0, 0, // classinfo
|
||||
5, 14, // methods
|
||||
0, 0, // properties
|
||||
0, 0, // enums/sets
|
||||
0, 0, // constructors
|
||||
0, // flags
|
||||
0, // signalCount
|
||||
|
||||
// slots: signature, parameters, type, tag, flags
|
||||
14, 13, 13, 13, 0x08,
|
||||
43, 13, 13, 13, 0x08,
|
||||
64, 13, 13, 13, 0x08,
|
||||
87, 13, 13, 13, 0x08,
|
||||
109, 13, 13, 13, 0x08,
|
||||
|
||||
0 // eod
|
||||
};
|
||||
|
||||
static const char qt_meta_stringdata_SvmToyWindow[] = {
|
||||
"SvmToyWindow\0\0button_change_icon_clicked()\0"
|
||||
"button_run_clicked()\0button_clear_clicked()\0"
|
||||
"button_save_clicked()\0button_load_clicked()\0"
|
||||
};
|
||||
|
||||
void SvmToyWindow::qt_static_metacall(QObject *_o, QMetaObject::Call _c, int _id, void **_a)
|
||||
{
|
||||
if (_c == QMetaObject::InvokeMetaMethod) {
|
||||
Q_ASSERT(staticMetaObject.cast(_o));
|
||||
SvmToyWindow *_t = static_cast<SvmToyWindow *>(_o);
|
||||
switch (_id) {
|
||||
case 0: _t->button_change_icon_clicked(); break;
|
||||
case 1: _t->button_run_clicked(); break;
|
||||
case 2: _t->button_clear_clicked(); break;
|
||||
case 3: _t->button_save_clicked(); break;
|
||||
case 4: _t->button_load_clicked(); break;
|
||||
default: ;
|
||||
}
|
||||
}
|
||||
Q_UNUSED(_a);
|
||||
}
|
||||
|
||||
const QMetaObjectExtraData SvmToyWindow::staticMetaObjectExtraData = {
|
||||
0, qt_static_metacall
|
||||
};
|
||||
|
||||
const QMetaObject SvmToyWindow::staticMetaObject = {
|
||||
{ &QWidget::staticMetaObject, qt_meta_stringdata_SvmToyWindow,
|
||||
qt_meta_data_SvmToyWindow, &staticMetaObjectExtraData }
|
||||
};
|
||||
|
||||
#ifdef Q_NO_DATA_RELOCATION
|
||||
const QMetaObject &SvmToyWindow::getStaticMetaObject() { return staticMetaObject; }
|
||||
#endif //Q_NO_DATA_RELOCATION
|
||||
|
||||
const QMetaObject *SvmToyWindow::metaObject() const
|
||||
{
|
||||
return QObject::d_ptr->metaObject ? QObject::d_ptr->metaObject : &staticMetaObject;
|
||||
}
|
||||
|
||||
void *SvmToyWindow::qt_metacast(const char *_clname)
|
||||
{
|
||||
if (!_clname) return 0;
|
||||
if (!strcmp(_clname, qt_meta_stringdata_SvmToyWindow))
|
||||
return static_cast<void*>(const_cast< SvmToyWindow*>(this));
|
||||
return QWidget::qt_metacast(_clname);
|
||||
}
|
||||
|
||||
int SvmToyWindow::qt_metacall(QMetaObject::Call _c, int _id, void **_a)
|
||||
{
|
||||
_id = QWidget::qt_metacall(_c, _id, _a);
|
||||
if (_id < 0)
|
||||
return _id;
|
||||
if (_c == QMetaObject::InvokeMetaMethod) {
|
||||
if (_id < 5)
|
||||
qt_static_metacall(this, _c, _id, _a);
|
||||
_id -= 5;
|
||||
}
|
||||
return _id;
|
||||
}
|
||||
QT_END_MOC_NAMESPACE
|
482
libsvm-3.36/svm-toy/windows/svm-toy.cpp
Normal file
482
libsvm-3.36/svm-toy/windows/svm-toy.cpp
Normal file
@@ -0,0 +1,482 @@
|
||||
#include <windows.h>
|
||||
#include <windowsx.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
#include <list>
|
||||
#include "../../svm.h"
|
||||
using namespace std;
|
||||
|
||||
#define DEFAULT_PARAM "-t 2 -c 100"
|
||||
#define XLEN 500
|
||||
#define YLEN 500
|
||||
#define DrawLine(dc,x1,y1,x2,y2,c) \
|
||||
do { \
|
||||
HPEN hpen = CreatePen(PS_SOLID,0,c); \
|
||||
HPEN horig = SelectPen(dc,hpen); \
|
||||
MoveToEx(dc,x1,y1,NULL); \
|
||||
LineTo(dc,x2,y2); \
|
||||
SelectPen(dc,horig); \
|
||||
DeletePen(hpen); \
|
||||
} while(0)
|
||||
|
||||
using namespace std;
|
||||
|
||||
COLORREF colors[] =
|
||||
{
|
||||
RGB(0,0,0),
|
||||
RGB(0,120,120),
|
||||
RGB(120,120,0),
|
||||
RGB(120,0,120),
|
||||
RGB(0,200,200),
|
||||
RGB(200,200,0),
|
||||
RGB(200,0,200)
|
||||
};
|
||||
|
||||
HWND main_window;
|
||||
HBITMAP buffer;
|
||||
HDC window_dc;
|
||||
HDC buffer_dc;
|
||||
HBRUSH brush1, brush2, brush3;
|
||||
HWND edit;
|
||||
|
||||
enum {
|
||||
ID_BUTTON_CHANGE, ID_BUTTON_RUN, ID_BUTTON_CLEAR,
|
||||
ID_BUTTON_LOAD, ID_BUTTON_SAVE, ID_EDIT
|
||||
};
|
||||
|
||||
struct point {
|
||||
double x, y;
|
||||
signed char value;
|
||||
};
|
||||
|
||||
list<point> point_list;
|
||||
int current_value = 1;
|
||||
|
||||
LRESULT CALLBACK WndProc(HWND, UINT, WPARAM, LPARAM);
|
||||
|
||||
int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance,
|
||||
PSTR szCmdLine, int iCmdShow)
|
||||
{
|
||||
static char szAppName[] = "SvmToy";
|
||||
MSG msg;
|
||||
WNDCLASSEX wndclass;
|
||||
|
||||
wndclass.cbSize = sizeof(wndclass);
|
||||
wndclass.style = CS_HREDRAW | CS_VREDRAW;
|
||||
wndclass.lpfnWndProc = WndProc;
|
||||
wndclass.cbClsExtra = 0;
|
||||
wndclass.cbWndExtra = 0;
|
||||
wndclass.hInstance = hInstance;
|
||||
wndclass.hIcon = LoadIcon(NULL, IDI_APPLICATION);
|
||||
wndclass.hCursor = LoadCursor(NULL, IDC_ARROW);
|
||||
wndclass.hbrBackground = (HBRUSH) GetStockObject(BLACK_BRUSH);
|
||||
wndclass.lpszMenuName = NULL;
|
||||
wndclass.lpszClassName = szAppName;
|
||||
wndclass.hIconSm = LoadIcon(NULL, IDI_APPLICATION);
|
||||
|
||||
RegisterClassEx(&wndclass);
|
||||
|
||||
main_window = CreateWindow(szAppName, // window class name
|
||||
"SVM Toy", // window caption
|
||||
WS_OVERLAPPEDWINDOW,// window style
|
||||
CW_USEDEFAULT, // initial x position
|
||||
CW_USEDEFAULT, // initial y position
|
||||
XLEN, // initial x size
|
||||
YLEN+52, // initial y size
|
||||
NULL, // parent window handle
|
||||
NULL, // window menu handle
|
||||
hInstance, // program instance handle
|
||||
NULL); // creation parameters
|
||||
|
||||
ShowWindow(main_window, iCmdShow);
|
||||
UpdateWindow(main_window);
|
||||
|
||||
CreateWindow("button", "Change", WS_CHILD | WS_VISIBLE | BS_PUSHBUTTON,
|
||||
0, YLEN, 50, 25, main_window, (HMENU) ID_BUTTON_CHANGE, hInstance, NULL);
|
||||
CreateWindow("button", "Run", WS_CHILD | WS_VISIBLE | BS_PUSHBUTTON,
|
||||
50, YLEN, 50, 25, main_window, (HMENU) ID_BUTTON_RUN, hInstance, NULL);
|
||||
CreateWindow("button", "Clear", WS_CHILD | WS_VISIBLE | BS_PUSHBUTTON,
|
||||
100, YLEN, 50, 25, main_window, (HMENU) ID_BUTTON_CLEAR, hInstance, NULL);
|
||||
CreateWindow("button", "Save", WS_CHILD | WS_VISIBLE | BS_PUSHBUTTON,
|
||||
150, YLEN, 50, 25, main_window, (HMENU) ID_BUTTON_SAVE, hInstance, NULL);
|
||||
CreateWindow("button", "Load", WS_CHILD | WS_VISIBLE | BS_PUSHBUTTON,
|
||||
200, YLEN, 50, 25, main_window, (HMENU) ID_BUTTON_LOAD, hInstance, NULL);
|
||||
|
||||
edit = CreateWindow("edit", NULL, WS_CHILD | WS_VISIBLE,
|
||||
250, YLEN, 250, 25, main_window, (HMENU) ID_EDIT, hInstance, NULL);
|
||||
|
||||
Edit_SetText(edit,DEFAULT_PARAM);
|
||||
|
||||
brush1 = CreateSolidBrush(colors[4]);
|
||||
brush2 = CreateSolidBrush(colors[5]);
|
||||
brush3 = CreateSolidBrush(colors[6]);
|
||||
|
||||
window_dc = GetDC(main_window);
|
||||
buffer = CreateCompatibleBitmap(window_dc, XLEN, YLEN);
|
||||
buffer_dc = CreateCompatibleDC(window_dc);
|
||||
SelectObject(buffer_dc, buffer);
|
||||
PatBlt(buffer_dc, 0, 0, XLEN, YLEN, BLACKNESS);
|
||||
|
||||
while (GetMessage(&msg, NULL, 0, 0)) {
|
||||
TranslateMessage(&msg);
|
||||
DispatchMessage(&msg);
|
||||
}
|
||||
return msg.wParam;
|
||||
}
|
||||
|
||||
int getfilename( HWND hWnd , char *filename, int len, int save)
|
||||
{
|
||||
OPENFILENAME OpenFileName;
|
||||
memset(&OpenFileName,0,sizeof(OpenFileName));
|
||||
filename[0]='\0';
|
||||
|
||||
OpenFileName.lStructSize = sizeof(OPENFILENAME);
|
||||
OpenFileName.hwndOwner = hWnd;
|
||||
OpenFileName.lpstrFile = filename;
|
||||
OpenFileName.nMaxFile = len;
|
||||
OpenFileName.Flags = 0;
|
||||
|
||||
return save?GetSaveFileName(&OpenFileName):GetOpenFileName(&OpenFileName);
|
||||
}
|
||||
|
||||
void clear_all()
|
||||
{
|
||||
point_list.clear();
|
||||
PatBlt(buffer_dc, 0, 0, XLEN, YLEN, BLACKNESS);
|
||||
InvalidateRect(main_window, 0, 0);
|
||||
}
|
||||
|
||||
HBRUSH choose_brush(int v)
|
||||
{
|
||||
if(v==1) return brush1;
|
||||
else if(v==2) return brush2;
|
||||
else return brush3;
|
||||
}
|
||||
|
||||
void draw_point(const point & p)
|
||||
{
|
||||
RECT rect;
|
||||
rect.left = int(p.x*XLEN);
|
||||
rect.top = int(p.y*YLEN);
|
||||
rect.right = int(p.x*XLEN) + 3;
|
||||
rect.bottom = int(p.y*YLEN) + 3;
|
||||
FillRect(window_dc, &rect, choose_brush(p.value));
|
||||
FillRect(buffer_dc, &rect, choose_brush(p.value));
|
||||
}
|
||||
|
||||
void draw_all_points()
|
||||
{
|
||||
for(list<point>::iterator p = point_list.begin(); p != point_list.end(); p++)
|
||||
draw_point(*p);
|
||||
}
|
||||
|
||||
void button_run_clicked()
|
||||
{
|
||||
// guard
|
||||
if(point_list.empty()) return;
|
||||
|
||||
svm_parameter param;
|
||||
int i,j;
|
||||
|
||||
// default values
|
||||
param.svm_type = C_SVC;
|
||||
param.kernel_type = RBF;
|
||||
param.degree = 3;
|
||||
param.gamma = 0;
|
||||
param.coef0 = 0;
|
||||
param.nu = 0.5;
|
||||
param.cache_size = 100;
|
||||
param.C = 1;
|
||||
param.eps = 1e-3;
|
||||
param.p = 0.1;
|
||||
param.shrinking = 1;
|
||||
param.probability = 0;
|
||||
param.nr_weight = 0;
|
||||
param.weight_label = NULL;
|
||||
param.weight = NULL;
|
||||
|
||||
// parse options
|
||||
char str[1024];
|
||||
Edit_GetLine(edit, 0, str, sizeof(str));
|
||||
const char *p = str;
|
||||
|
||||
while (1) {
|
||||
while (*p && *p != '-')
|
||||
p++;
|
||||
|
||||
if (*p == '\0')
|
||||
break;
|
||||
|
||||
p++;
|
||||
switch (*p++) {
|
||||
case 's':
|
||||
param.svm_type = atoi(p);
|
||||
break;
|
||||
case 't':
|
||||
param.kernel_type = atoi(p);
|
||||
break;
|
||||
case 'd':
|
||||
param.degree = atoi(p);
|
||||
break;
|
||||
case 'g':
|
||||
param.gamma = atof(p);
|
||||
break;
|
||||
case 'r':
|
||||
param.coef0 = atof(p);
|
||||
break;
|
||||
case 'n':
|
||||
param.nu = atof(p);
|
||||
break;
|
||||
case 'm':
|
||||
param.cache_size = atof(p);
|
||||
break;
|
||||
case 'c':
|
||||
param.C = atof(p);
|
||||
break;
|
||||
case 'e':
|
||||
param.eps = atof(p);
|
||||
break;
|
||||
case 'p':
|
||||
param.p = atof(p);
|
||||
break;
|
||||
case 'h':
|
||||
param.shrinking = atoi(p);
|
||||
break;
|
||||
case 'b':
|
||||
param.probability = atoi(p);
|
||||
break;
|
||||
case 'w':
|
||||
++param.nr_weight;
|
||||
param.weight_label = (int *)realloc(param.weight_label,sizeof(int)*param.nr_weight);
|
||||
param.weight = (double *)realloc(param.weight,sizeof(double)*param.nr_weight);
|
||||
param.weight_label[param.nr_weight-1] = atoi(p);
|
||||
while(*p && !isspace(*p)) ++p;
|
||||
param.weight[param.nr_weight-1] = atof(p);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// build problem
|
||||
svm_problem prob;
|
||||
|
||||
prob.l = point_list.size();
|
||||
prob.y = new double[prob.l];
|
||||
|
||||
if(param.kernel_type == PRECOMPUTED)
|
||||
{
|
||||
}
|
||||
else if(param.svm_type == EPSILON_SVR ||
|
||||
param.svm_type == NU_SVR)
|
||||
{
|
||||
if(param.gamma == 0) param.gamma = 1;
|
||||
svm_node *x_space = new svm_node[2 * prob.l];
|
||||
prob.x = new svm_node *[prob.l];
|
||||
|
||||
i = 0;
|
||||
for (list<point>::iterator q = point_list.begin(); q != point_list.end(); q++, i++)
|
||||
{
|
||||
x_space[2 * i].index = 1;
|
||||
x_space[2 * i].value = q->x;
|
||||
x_space[2 * i + 1].index = -1;
|
||||
prob.x[i] = &x_space[2 * i];
|
||||
prob.y[i] = q->y;
|
||||
}
|
||||
|
||||
// build model & classify
|
||||
svm_model *model = svm_train(&prob, ¶m);
|
||||
svm_node x[2];
|
||||
x[0].index = 1;
|
||||
x[1].index = -1;
|
||||
int *j = new int[XLEN];
|
||||
|
||||
for (i = 0; i < XLEN; i++)
|
||||
{
|
||||
x[0].value = (double) i / XLEN;
|
||||
j[i] = (int)(YLEN*svm_predict(model, x));
|
||||
}
|
||||
|
||||
DrawLine(buffer_dc,0,0,0,YLEN,colors[0]);
|
||||
DrawLine(window_dc,0,0,0,YLEN,colors[0]);
|
||||
|
||||
int p = (int)(param.p * YLEN);
|
||||
for(int i=1; i < XLEN; i++)
|
||||
{
|
||||
DrawLine(buffer_dc,i,0,i,YLEN,colors[0]);
|
||||
DrawLine(window_dc,i,0,i,YLEN,colors[0]);
|
||||
|
||||
DrawLine(buffer_dc,i-1,j[i-1],i,j[i],colors[5]);
|
||||
DrawLine(window_dc,i-1,j[i-1],i,j[i],colors[5]);
|
||||
|
||||
if(param.svm_type == EPSILON_SVR)
|
||||
{
|
||||
DrawLine(buffer_dc,i-1,j[i-1]+p,i,j[i]+p,colors[2]);
|
||||
DrawLine(window_dc,i-1,j[i-1]+p,i,j[i]+p,colors[2]);
|
||||
|
||||
DrawLine(buffer_dc,i-1,j[i-1]-p,i,j[i]-p,colors[2]);
|
||||
DrawLine(window_dc,i-1,j[i-1]-p,i,j[i]-p,colors[2]);
|
||||
}
|
||||
}
|
||||
|
||||
svm_free_and_destroy_model(&model);
|
||||
delete[] j;
|
||||
delete[] x_space;
|
||||
delete[] prob.x;
|
||||
delete[] prob.y;
|
||||
}
|
||||
else
|
||||
{
|
||||
if(param.gamma == 0) param.gamma = 0.5;
|
||||
svm_node *x_space = new svm_node[3 * prob.l];
|
||||
prob.x = new svm_node *[prob.l];
|
||||
|
||||
i = 0;
|
||||
for (list<point>::iterator q = point_list.begin(); q != point_list.end(); q++, i++)
|
||||
{
|
||||
x_space[3 * i].index = 1;
|
||||
x_space[3 * i].value = q->x;
|
||||
x_space[3 * i + 1].index = 2;
|
||||
x_space[3 * i + 1].value = q->y;
|
||||
x_space[3 * i + 2].index = -1;
|
||||
prob.x[i] = &x_space[3 * i];
|
||||
prob.y[i] = q->value;
|
||||
}
|
||||
|
||||
// build model & classify
|
||||
svm_model *model = svm_train(&prob, ¶m);
|
||||
svm_node x[3];
|
||||
x[0].index = 1;
|
||||
x[1].index = 2;
|
||||
x[2].index = -1;
|
||||
|
||||
for (i = 0; i < XLEN; i++)
|
||||
for (j = 0; j < YLEN; j++) {
|
||||
x[0].value = (double) i / XLEN;
|
||||
x[1].value = (double) j / YLEN;
|
||||
double d = svm_predict(model, x);
|
||||
if (param.svm_type == ONE_CLASS && d<0) d=2;
|
||||
SetPixel(window_dc, i, j, colors[(int)d]);
|
||||
SetPixel(buffer_dc, i, j, colors[(int)d]);
|
||||
}
|
||||
|
||||
svm_free_and_destroy_model(&model);
|
||||
delete[] x_space;
|
||||
delete[] prob.x;
|
||||
delete[] prob.y;
|
||||
}
|
||||
free(param.weight_label);
|
||||
free(param.weight);
|
||||
draw_all_points();
|
||||
}
|
||||
|
||||
LRESULT CALLBACK WndProc(HWND hwnd, UINT iMsg, WPARAM wParam, LPARAM lParam)
|
||||
{
|
||||
HDC hdc;
|
||||
PAINTSTRUCT ps;
|
||||
|
||||
switch (iMsg) {
|
||||
case WM_LBUTTONDOWN:
|
||||
{
|
||||
int x = LOWORD(lParam);
|
||||
int y = HIWORD(lParam);
|
||||
point p = {(double)x/XLEN, (double)y/YLEN, current_value};
|
||||
point_list.push_back(p);
|
||||
draw_point(p);
|
||||
}
|
||||
return 0;
|
||||
case WM_PAINT:
|
||||
{
|
||||
hdc = BeginPaint(hwnd, &ps);
|
||||
BitBlt(hdc, 0, 0, XLEN, YLEN, buffer_dc, 0, 0, SRCCOPY);
|
||||
EndPaint(hwnd, &ps);
|
||||
}
|
||||
return 0;
|
||||
case WM_COMMAND:
|
||||
{
|
||||
int id = LOWORD(wParam);
|
||||
switch (id) {
|
||||
case ID_BUTTON_CHANGE:
|
||||
++current_value;
|
||||
if(current_value > 3) current_value = 1;
|
||||
break;
|
||||
case ID_BUTTON_RUN:
|
||||
button_run_clicked();
|
||||
break;
|
||||
case ID_BUTTON_CLEAR:
|
||||
clear_all();
|
||||
break;
|
||||
case ID_BUTTON_SAVE:
|
||||
{
|
||||
char filename[1024];
|
||||
if(getfilename(hwnd,filename,1024,1))
|
||||
{
|
||||
FILE *fp = fopen(filename,"w");
|
||||
|
||||
char str[1024];
|
||||
Edit_GetLine(edit, 0, str, sizeof(str));
|
||||
const char *p = str;
|
||||
const char* svm_type_str = strstr(p, "-s ");
|
||||
int svm_type = C_SVC;
|
||||
if(svm_type_str != NULL)
|
||||
sscanf(svm_type_str, "-s %d", &svm_type);
|
||||
|
||||
if(fp)
|
||||
{
|
||||
if(svm_type == EPSILON_SVR || svm_type == NU_SVR)
|
||||
{
|
||||
for(list<point>::iterator p = point_list.begin(); p != point_list.end();p++)
|
||||
fprintf(fp,"%f 1:%f\n", p->y, p->x);
|
||||
}
|
||||
else
|
||||
{
|
||||
for(list<point>::iterator p = point_list.begin(); p != point_list.end();p++)
|
||||
fprintf(fp,"%d 1:%f 2:%f\n", p->value, p->x, p->y);
|
||||
}
|
||||
fclose(fp);
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
case ID_BUTTON_LOAD:
|
||||
{
|
||||
char filename[1024];
|
||||
if(getfilename(hwnd,filename,1024,0))
|
||||
{
|
||||
FILE *fp = fopen(filename,"r");
|
||||
if(fp)
|
||||
{
|
||||
clear_all();
|
||||
char buf[4096];
|
||||
while(fgets(buf,sizeof(buf),fp))
|
||||
{
|
||||
int v;
|
||||
double x,y;
|
||||
if(sscanf(buf,"%d%*d:%lf%*d:%lf",&v,&x,&y)==3)
|
||||
{
|
||||
point p = {x,y,v};
|
||||
point_list.push_back(p);
|
||||
}
|
||||
else if(sscanf(buf,"%lf%*d:%lf",&y,&x)==2)
|
||||
{
|
||||
point p = {x,y,current_value};
|
||||
point_list.push_back(p);
|
||||
}
|
||||
else
|
||||
break;
|
||||
}
|
||||
fclose(fp);
|
||||
draw_all_points();
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
case WM_DESTROY:
|
||||
PostQuitMessage(0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
return DefWindowProc(hwnd, iMsg, wParam, lParam);
|
||||
}
|
BIN
libsvm-3.36/svm-train
Executable file
BIN
libsvm-3.36/svm-train
Executable file
Binary file not shown.
380
libsvm-3.36/svm-train.c
Normal file
380
libsvm-3.36/svm-train.c
Normal file
@@ -0,0 +1,380 @@
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
#include <errno.h>
|
||||
#include "svm.h"
|
||||
#define Malloc(type,n) (type *)malloc((n)*sizeof(type))
|
||||
|
||||
void print_null(const char *s) {}
|
||||
|
||||
void exit_with_help()
|
||||
{
|
||||
printf(
|
||||
"Usage: svm-train [options] training_set_file [model_file]\n"
|
||||
"options:\n"
|
||||
"-s svm_type : set type of SVM (default 0)\n"
|
||||
" 0 -- C-SVC (multi-class classification)\n"
|
||||
" 1 -- nu-SVC (multi-class classification)\n"
|
||||
" 2 -- one-class SVM\n"
|
||||
" 3 -- epsilon-SVR (regression)\n"
|
||||
" 4 -- nu-SVR (regression)\n"
|
||||
"-t kernel_type : set type of kernel function (default 2)\n"
|
||||
" 0 -- linear: u'*v\n"
|
||||
" 1 -- polynomial: (gamma*u'*v + coef0)^degree\n"
|
||||
" 2 -- radial basis function: exp(-gamma*|u-v|^2)\n"
|
||||
" 3 -- sigmoid: tanh(gamma*u'*v + coef0)\n"
|
||||
" 4 -- precomputed kernel (kernel values in training_set_file)\n"
|
||||
"-d degree : set degree in kernel function (default 3)\n"
|
||||
"-g gamma : set gamma in kernel function (default 1/num_features)\n"
|
||||
"-r coef0 : set coef0 in kernel function (default 0)\n"
|
||||
"-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n"
|
||||
"-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n"
|
||||
"-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n"
|
||||
"-m cachesize : set cache memory size in MB (default 100)\n"
|
||||
"-e epsilon : set tolerance of termination criterion (default 0.001)\n"
|
||||
"-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n"
|
||||
"-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n"
|
||||
"-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n"
|
||||
"-v n: n-fold cross validation mode\n"
|
||||
"-q : quiet mode (no outputs)\n"
|
||||
);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
void exit_input_error(int line_num)
|
||||
{
|
||||
fprintf(stderr,"Wrong input format at line %d\n", line_num);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
void parse_command_line(int argc, char **argv, char *input_file_name, char *model_file_name);
|
||||
void read_problem(const char *filename);
|
||||
void do_cross_validation();
|
||||
|
||||
struct svm_parameter param; // set by parse_command_line
|
||||
struct svm_problem prob; // set by read_problem
|
||||
struct svm_model *model;
|
||||
struct svm_node *x_space;
|
||||
int cross_validation;
|
||||
int nr_fold;
|
||||
|
||||
static char *line = NULL;
|
||||
static int max_line_len;
|
||||
|
||||
static char* readline(FILE *input)
|
||||
{
|
||||
int len;
|
||||
|
||||
if(fgets(line,max_line_len,input) == NULL)
|
||||
return NULL;
|
||||
|
||||
while(strrchr(line,'\n') == NULL)
|
||||
{
|
||||
max_line_len *= 2;
|
||||
line = (char *) realloc(line,max_line_len);
|
||||
len = (int) strlen(line);
|
||||
if(fgets(line+len,max_line_len-len,input) == NULL)
|
||||
break;
|
||||
}
|
||||
return line;
|
||||
}
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
char input_file_name[1024];
|
||||
char model_file_name[1024];
|
||||
const char *error_msg;
|
||||
|
||||
parse_command_line(argc, argv, input_file_name, model_file_name);
|
||||
read_problem(input_file_name);
|
||||
error_msg = svm_check_parameter(&prob,¶m);
|
||||
|
||||
if(error_msg)
|
||||
{
|
||||
fprintf(stderr,"ERROR: %s\n",error_msg);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if(cross_validation)
|
||||
{
|
||||
do_cross_validation();
|
||||
}
|
||||
else
|
||||
{
|
||||
model = svm_train(&prob,¶m);
|
||||
if(svm_save_model(model_file_name,model))
|
||||
{
|
||||
fprintf(stderr, "can't save model to file %s\n", model_file_name);
|
||||
exit(1);
|
||||
}
|
||||
svm_free_and_destroy_model(&model);
|
||||
}
|
||||
svm_destroy_param(¶m);
|
||||
free(prob.y);
|
||||
free(prob.x);
|
||||
free(x_space);
|
||||
free(line);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void do_cross_validation()
|
||||
{
|
||||
int i;
|
||||
int total_correct = 0;
|
||||
double total_error = 0;
|
||||
double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
|
||||
double *target = Malloc(double,prob.l);
|
||||
|
||||
svm_cross_validation(&prob,¶m,nr_fold,target);
|
||||
if(param.svm_type == EPSILON_SVR ||
|
||||
param.svm_type == NU_SVR)
|
||||
{
|
||||
for(i=0;i<prob.l;i++)
|
||||
{
|
||||
double y = prob.y[i];
|
||||
double v = target[i];
|
||||
total_error += (v-y)*(v-y);
|
||||
sumv += v;
|
||||
sumy += y;
|
||||
sumvv += v*v;
|
||||
sumyy += y*y;
|
||||
sumvy += v*y;
|
||||
}
|
||||
printf("Cross Validation Mean squared error = %g\n",total_error/prob.l);
|
||||
printf("Cross Validation Squared correlation coefficient = %g\n",
|
||||
((prob.l*sumvy-sumv*sumy)*(prob.l*sumvy-sumv*sumy))/
|
||||
((prob.l*sumvv-sumv*sumv)*(prob.l*sumyy-sumy*sumy))
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
for(i=0;i<prob.l;i++)
|
||||
if(target[i] == prob.y[i])
|
||||
++total_correct;
|
||||
printf("Cross Validation Accuracy = %g%%\n",100.0*total_correct/prob.l);
|
||||
}
|
||||
free(target);
|
||||
}
|
||||
|
||||
void parse_command_line(int argc, char **argv, char *input_file_name, char *model_file_name)
|
||||
{
|
||||
int i;
|
||||
void (*print_func)(const char*) = NULL; // default printing to stdout
|
||||
|
||||
// default values
|
||||
param.svm_type = C_SVC;
|
||||
param.kernel_type = RBF;
|
||||
param.degree = 3;
|
||||
param.gamma = 0; // 1/num_features
|
||||
param.coef0 = 0;
|
||||
param.nu = 0.5;
|
||||
param.cache_size = 100;
|
||||
param.C = 1;
|
||||
param.eps = 1e-3;
|
||||
param.p = 0.1;
|
||||
param.shrinking = 1;
|
||||
param.probability = 0;
|
||||
param.nr_weight = 0;
|
||||
param.weight_label = NULL;
|
||||
param.weight = NULL;
|
||||
cross_validation = 0;
|
||||
|
||||
// parse options
|
||||
for(i=1;i<argc;i++)
|
||||
{
|
||||
if(argv[i][0] != '-') break;
|
||||
if(++i>=argc)
|
||||
exit_with_help();
|
||||
switch(argv[i-1][1])
|
||||
{
|
||||
case 's':
|
||||
param.svm_type = atoi(argv[i]);
|
||||
break;
|
||||
case 't':
|
||||
param.kernel_type = atoi(argv[i]);
|
||||
break;
|
||||
case 'd':
|
||||
param.degree = atoi(argv[i]);
|
||||
break;
|
||||
case 'g':
|
||||
param.gamma = atof(argv[i]);
|
||||
break;
|
||||
case 'r':
|
||||
param.coef0 = atof(argv[i]);
|
||||
break;
|
||||
case 'n':
|
||||
param.nu = atof(argv[i]);
|
||||
break;
|
||||
case 'm':
|
||||
param.cache_size = atof(argv[i]);
|
||||
break;
|
||||
case 'c':
|
||||
param.C = atof(argv[i]);
|
||||
break;
|
||||
case 'e':
|
||||
param.eps = atof(argv[i]);
|
||||
break;
|
||||
case 'p':
|
||||
param.p = atof(argv[i]);
|
||||
break;
|
||||
case 'h':
|
||||
param.shrinking = atoi(argv[i]);
|
||||
break;
|
||||
case 'b':
|
||||
param.probability = atoi(argv[i]);
|
||||
break;
|
||||
case 'q':
|
||||
print_func = &print_null;
|
||||
i--;
|
||||
break;
|
||||
case 'v':
|
||||
cross_validation = 1;
|
||||
nr_fold = atoi(argv[i]);
|
||||
if(nr_fold < 2)
|
||||
{
|
||||
fprintf(stderr,"n-fold cross validation: n must >= 2\n");
|
||||
exit_with_help();
|
||||
}
|
||||
break;
|
||||
case 'w':
|
||||
++param.nr_weight;
|
||||
param.weight_label = (int *)realloc(param.weight_label,sizeof(int)*param.nr_weight);
|
||||
param.weight = (double *)realloc(param.weight,sizeof(double)*param.nr_weight);
|
||||
param.weight_label[param.nr_weight-1] = atoi(&argv[i-1][2]);
|
||||
param.weight[param.nr_weight-1] = atof(argv[i]);
|
||||
break;
|
||||
default:
|
||||
fprintf(stderr,"Unknown option: -%c\n", argv[i-1][1]);
|
||||
exit_with_help();
|
||||
}
|
||||
}
|
||||
|
||||
svm_set_print_string_function(print_func);
|
||||
|
||||
// determine filenames
|
||||
|
||||
if(i>=argc)
|
||||
exit_with_help();
|
||||
|
||||
strcpy(input_file_name, argv[i]);
|
||||
|
||||
if(i<argc-1)
|
||||
strcpy(model_file_name,argv[i+1]);
|
||||
else
|
||||
{
|
||||
char *p = strrchr(argv[i],'/');
|
||||
if(p==NULL)
|
||||
p = argv[i];
|
||||
else
|
||||
++p;
|
||||
sprintf(model_file_name,"%s.model",p);
|
||||
}
|
||||
}
|
||||
|
||||
// read in a problem (in svmlight format)
|
||||
|
||||
void read_problem(const char *filename)
|
||||
{
|
||||
int max_index, inst_max_index, i;
|
||||
size_t elements, j;
|
||||
FILE *fp = fopen(filename,"r");
|
||||
char *endptr;
|
||||
char *idx, *val, *label;
|
||||
|
||||
if(fp == NULL)
|
||||
{
|
||||
fprintf(stderr,"can't open input file %s\n",filename);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
prob.l = 0;
|
||||
elements = 0;
|
||||
|
||||
max_line_len = 1024;
|
||||
line = Malloc(char,max_line_len);
|
||||
while(readline(fp)!=NULL)
|
||||
{
|
||||
char *p = strtok(line," \t"); // label
|
||||
|
||||
// features
|
||||
while(1)
|
||||
{
|
||||
p = strtok(NULL," \t");
|
||||
if(p == NULL || *p == '\n') // check '\n' as ' ' may be after the last feature
|
||||
break;
|
||||
++elements;
|
||||
}
|
||||
++elements;
|
||||
++prob.l;
|
||||
}
|
||||
rewind(fp);
|
||||
|
||||
prob.y = Malloc(double,prob.l);
|
||||
prob.x = Malloc(struct svm_node *,prob.l);
|
||||
x_space = Malloc(struct svm_node,elements);
|
||||
|
||||
max_index = 0;
|
||||
j=0;
|
||||
for(i=0;i<prob.l;i++)
|
||||
{
|
||||
inst_max_index = -1; // strtol gives 0 if wrong format, and precomputed kernel has <index> start from 0
|
||||
readline(fp);
|
||||
prob.x[i] = &x_space[j];
|
||||
label = strtok(line," \t\n");
|
||||
if(label == NULL) // empty line
|
||||
exit_input_error(i+1);
|
||||
|
||||
prob.y[i] = strtod(label,&endptr);
|
||||
if(endptr == label || *endptr != '\0')
|
||||
exit_input_error(i+1);
|
||||
|
||||
while(1)
|
||||
{
|
||||
idx = strtok(NULL,":");
|
||||
val = strtok(NULL," \t");
|
||||
|
||||
if(val == NULL)
|
||||
break;
|
||||
|
||||
errno = 0;
|
||||
x_space[j].index = (int) strtol(idx,&endptr,10);
|
||||
if(endptr == idx || errno != 0 || *endptr != '\0' || x_space[j].index <= inst_max_index)
|
||||
exit_input_error(i+1);
|
||||
else
|
||||
inst_max_index = x_space[j].index;
|
||||
|
||||
errno = 0;
|
||||
x_space[j].value = strtod(val,&endptr);
|
||||
if(endptr == val || errno != 0 || (*endptr != '\0' && !isspace(*endptr)))
|
||||
exit_input_error(i+1);
|
||||
|
||||
++j;
|
||||
}
|
||||
|
||||
if(inst_max_index > max_index)
|
||||
max_index = inst_max_index;
|
||||
x_space[j++].index = -1;
|
||||
}
|
||||
|
||||
if(param.gamma == 0 && max_index > 0)
|
||||
param.gamma = 1.0/max_index;
|
||||
|
||||
if(param.kernel_type == PRECOMPUTED)
|
||||
for(i=0;i<prob.l;i++)
|
||||
{
|
||||
if (prob.x[i][0].index != 0)
|
||||
{
|
||||
fprintf(stderr,"Wrong input format: first column must be 0:sample_serial_number\n");
|
||||
exit(1);
|
||||
}
|
||||
if ((int)prob.x[i][0].value <= 0 || (int)prob.x[i][0].value > max_index)
|
||||
{
|
||||
fprintf(stderr,"Wrong input format: sample_serial_number out of range\n");
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
fclose(fp);
|
||||
}
|
3312
libsvm-3.36/svm.cpp
Normal file
3312
libsvm-3.36/svm.cpp
Normal file
File diff suppressed because it is too large
Load Diff
21
libsvm-3.36/svm.def
Normal file
21
libsvm-3.36/svm.def
Normal file
@@ -0,0 +1,21 @@
|
||||
LIBRARY libsvm
|
||||
EXPORTS
|
||||
svm_train @1
|
||||
svm_cross_validation @2
|
||||
svm_save_model @3
|
||||
svm_load_model @4
|
||||
svm_get_svm_type @5
|
||||
svm_get_nr_class @6
|
||||
svm_get_labels @7
|
||||
svm_get_svr_probability @8
|
||||
svm_predict_values @9
|
||||
svm_predict @10
|
||||
svm_predict_probability @11
|
||||
svm_free_model_content @12
|
||||
svm_free_and_destroy_model @13
|
||||
svm_destroy_param @14
|
||||
svm_check_parameter @15
|
||||
svm_check_probability_model @16
|
||||
svm_set_print_string_function @17
|
||||
svm_get_sv_indices @18
|
||||
svm_get_nr_sv @19
|
105
libsvm-3.36/svm.h
Normal file
105
libsvm-3.36/svm.h
Normal file
@@ -0,0 +1,105 @@
|
||||
#ifndef _LIBSVM_H
|
||||
#define _LIBSVM_H
|
||||
|
||||
#define LIBSVM_VERSION 336
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
extern int libsvm_version;
|
||||
|
||||
struct svm_node
|
||||
{
|
||||
int index;
|
||||
double value;
|
||||
};
|
||||
|
||||
struct svm_problem
|
||||
{
|
||||
int l;
|
||||
double *y;
|
||||
struct svm_node **x;
|
||||
};
|
||||
|
||||
enum { C_SVC, NU_SVC, ONE_CLASS, EPSILON_SVR, NU_SVR }; /* svm_type */
|
||||
enum { LINEAR, POLY, RBF, SIGMOID, PRECOMPUTED }; /* kernel_type */
|
||||
|
||||
struct svm_parameter
|
||||
{
|
||||
int svm_type;
|
||||
int kernel_type;
|
||||
int degree; /* for poly */
|
||||
double gamma; /* for poly/rbf/sigmoid */
|
||||
double coef0; /* for poly/sigmoid */
|
||||
|
||||
/* these are for training only */
|
||||
double cache_size; /* in MB */
|
||||
double eps; /* stopping criteria */
|
||||
double C; /* for C_SVC, EPSILON_SVR and NU_SVR */
|
||||
int nr_weight; /* for C_SVC */
|
||||
int *weight_label; /* for C_SVC */
|
||||
double* weight; /* for C_SVC */
|
||||
double nu; /* for NU_SVC, ONE_CLASS, and NU_SVR */
|
||||
double p; /* for EPSILON_SVR */
|
||||
int shrinking; /* use the shrinking heuristics */
|
||||
int probability; /* do probability estimates */
|
||||
};
|
||||
|
||||
//
|
||||
// svm_model
|
||||
//
|
||||
struct svm_model
|
||||
{
|
||||
struct svm_parameter param; /* parameter */
|
||||
int nr_class; /* number of classes, = 2 in regression/one class svm */
|
||||
int l; /* total #SV */
|
||||
struct svm_node **SV; /* SVs (SV[l]) */
|
||||
double **sv_coef; /* coefficients for SVs in decision functions (sv_coef[k-1][l]) */
|
||||
double *rho; /* constants in decision functions (rho[k*(k-1)/2]) */
|
||||
double *probA; /* pariwise probability information */
|
||||
double *probB;
|
||||
double *prob_density_marks; /* probability information for ONE_CLASS */
|
||||
int *sv_indices; /* sv_indices[0,...,nSV-1] are values in [1,...,num_traning_data] to indicate SVs in the training set */
|
||||
|
||||
/* for classification only */
|
||||
|
||||
int *label; /* label of each class (label[k]) */
|
||||
int *nSV; /* number of SVs for each class (nSV[k]) */
|
||||
/* nSV[0] + nSV[1] + ... + nSV[k-1] = l */
|
||||
/* XXX */
|
||||
int free_sv; /* 1 if svm_model is created by svm_load_model*/
|
||||
/* 0 if svm_model is created by svm_train */
|
||||
};
|
||||
|
||||
struct svm_model *svm_train(const struct svm_problem *prob, const struct svm_parameter *param);
|
||||
void svm_cross_validation(const struct svm_problem *prob, const struct svm_parameter *param, int nr_fold, double *target);
|
||||
|
||||
int svm_save_model(const char *model_file_name, const struct svm_model *model);
|
||||
struct svm_model *svm_load_model(const char *model_file_name);
|
||||
|
||||
int svm_get_svm_type(const struct svm_model *model);
|
||||
int svm_get_nr_class(const struct svm_model *model);
|
||||
void svm_get_labels(const struct svm_model *model, int *label);
|
||||
void svm_get_sv_indices(const struct svm_model *model, int *sv_indices);
|
||||
int svm_get_nr_sv(const struct svm_model *model);
|
||||
double svm_get_svr_probability(const struct svm_model *model);
|
||||
|
||||
double svm_predict_values(const struct svm_model *model, const struct svm_node *x, double* dec_values);
|
||||
double svm_predict(const struct svm_model *model, const struct svm_node *x);
|
||||
double svm_predict_probability(const struct svm_model *model, const struct svm_node *x, double* prob_estimates);
|
||||
|
||||
void svm_free_model_content(struct svm_model *model_ptr);
|
||||
void svm_free_and_destroy_model(struct svm_model **model_ptr_ptr);
|
||||
void svm_destroy_param(struct svm_parameter *param);
|
||||
|
||||
const char *svm_check_parameter(const struct svm_problem *prob, const struct svm_parameter *param);
|
||||
int svm_check_probability_model(const struct svm_model *model);
|
||||
|
||||
void svm_set_print_string_function(void (*print_func)(const char *));
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif /* _LIBSVM_H */
|
210
libsvm-3.36/tools/README
Normal file
210
libsvm-3.36/tools/README
Normal file
@@ -0,0 +1,210 @@
|
||||
This directory includes some useful codes:
|
||||
|
||||
1. subset selection tools.
|
||||
2. parameter selection tools.
|
||||
3. LIBSVM format checking tools
|
||||
|
||||
Part I: Subset selection tools
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
Training large data is time consuming. Sometimes one should work on a
|
||||
smaller subset first. The python script subset.py randomly selects a
|
||||
specified number of samples. For classification data, we provide a
|
||||
stratified selection to ensure the same class distribution in the
|
||||
subset.
|
||||
|
||||
Usage: subset.py [options] dataset number [output1] [output2]
|
||||
|
||||
This script selects a subset of the given data set.
|
||||
|
||||
options:
|
||||
-s method : method of selection (default 0)
|
||||
0 -- stratified selection (classification only)
|
||||
1 -- random selection
|
||||
|
||||
output1 : the subset (optional)
|
||||
output2 : the rest of data (optional)
|
||||
|
||||
If output1 is omitted, the subset will be printed on the screen.
|
||||
|
||||
Example
|
||||
=======
|
||||
|
||||
> python subset.py heart_scale 100 file1 file2
|
||||
|
||||
From heart_scale 100 samples are randomly selected and stored in
|
||||
file1. All remaining instances are stored in file2.
|
||||
|
||||
|
||||
Part II: Parameter Selection Tools
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
grid.py is a parameter selection tool for C-SVM classification using
|
||||
the RBF (radial basis function) kernel. It uses cross validation (CV)
|
||||
technique to estimate the accuracy of each parameter combination in
|
||||
the specified range and helps you to decide the best parameters for
|
||||
your problem.
|
||||
|
||||
grid.py directly executes libsvm binaries (so no python binding is needed)
|
||||
for cross validation and then draw contour of CV accuracy using gnuplot.
|
||||
You must have libsvm and gnuplot installed before using it. The package
|
||||
gnuplot is available at http://www.gnuplot.info/
|
||||
|
||||
On Mac OSX, the precompiled gnuplot file needs the library Aquarterm,
|
||||
which thus must be installed as well. In addition, this version of
|
||||
gnuplot does not support png, so you need to change "set term png
|
||||
transparent small" and use other image formats. For example, you may
|
||||
have "set term pbm small color".
|
||||
|
||||
Usage: grid.py [grid_options] [svm_options] dataset
|
||||
|
||||
grid_options :
|
||||
-log2c {begin,end,step | "null"} : set the range of c (default -5,15,2)
|
||||
begin,end,step -- c_range = 2^{begin,...,begin+k*step,...,end}
|
||||
"null" -- do not grid with c
|
||||
-log2g {begin,end,step | "null"} : set the range of g (default 3,-15,-2)
|
||||
begin,end,step -- g_range = 2^{begin,...,begin+k*step,...,end}
|
||||
"null" -- do not grid with g
|
||||
-v n : n-fold cross validation (default 5)
|
||||
-svmtrain pathname : set svm executable path and name
|
||||
-gnuplot {pathname | "null"} :
|
||||
pathname -- set gnuplot executable path and name
|
||||
"null" -- do not plot
|
||||
-out {pathname | "null"} : (default dataset.out)
|
||||
pathname -- set output file path and name
|
||||
"null" -- do not output file
|
||||
-png pathname : set graphic output file path and name (default dataset.png)
|
||||
-resume [pathname] : resume the grid task using an existing output file (default pathname is dataset.out)
|
||||
Use this option only if some parameters have been checked for the SAME data.
|
||||
|
||||
svm_options : additional options for svm-train
|
||||
|
||||
The program conducts v-fold cross validation using parameter C (and gamma)
|
||||
= 2^begin, 2^(begin+step), ..., 2^end.
|
||||
|
||||
You can specify where the libsvm executable and gnuplot are using the
|
||||
-svmtrain and -gnuplot parameters.
|
||||
|
||||
For windows users, please use pgnuplot.exe. If you are using gnuplot
|
||||
3.7.1, please upgrade to version 3.7.3 or higher. The version 3.7.1
|
||||
has a bug. If you use cygwin on windows, please use gunplot-x11.
|
||||
|
||||
If the task is terminated accidentally or you would like to change the
|
||||
range of parameters, you can apply '-resume' to save time by re-using
|
||||
previous results. You may specify the output file of a previous run
|
||||
or use the default (i.e., dataset.out) without giving a name. Please
|
||||
note that the same condition must be used in two runs. For example,
|
||||
you cannot use '-v 10' earlier and resume the task with '-v 5'.
|
||||
|
||||
The value of some options can be "null." For example, `-log2c -1,0,1
|
||||
-log2 "null"' means that C=2^-1,2^0,2^1 and g=LIBSVM's default gamma
|
||||
value. That is, you do not conduct parameter selection on gamma.
|
||||
|
||||
Example
|
||||
=======
|
||||
|
||||
> python grid.py -log2c -5,5,1 -log2g -4,0,1 -v 5 -m 300 heart_scale
|
||||
|
||||
Users (in particular MS Windows users) may need to specify the path of
|
||||
executable files. You can either change paths in the beginning of
|
||||
grid.py or specify them in the command line. For example,
|
||||
|
||||
> grid.py -log2c -5,5,1 -svmtrain "c:\Program Files\libsvm\windows\svm-train.exe" -gnuplot c:\tmp\gnuplot\binary\pgnuplot.exe -v 10 heart_scale
|
||||
|
||||
Output: two files
|
||||
dataset.png: the CV accuracy contour plot generated by gnuplot
|
||||
dataset.out: the CV accuracy at each (log2(C),log2(gamma))
|
||||
|
||||
The following example saves running time by loading the output file of a previous run.
|
||||
|
||||
> python grid.py -log2c -7,7,1 -log2g -5,2,1 -v 5 -resume heart_scale.out heart_scale
|
||||
|
||||
Parallel grid search
|
||||
====================
|
||||
|
||||
You can conduct a parallel grid search by dispatching jobs to a
|
||||
cluster of computers which share the same file system. First, you add
|
||||
machine names in grid.py:
|
||||
|
||||
ssh_workers = ["linux1", "linux5", "linux5"]
|
||||
|
||||
and then setup your ssh so that the authentication works without
|
||||
asking a password.
|
||||
|
||||
The same machine (e.g., linux5 here) can be listed more than once if
|
||||
it has multiple CPUs or has more RAM. If the local machine is the
|
||||
best, you can also enlarge the nr_local_worker. For example:
|
||||
|
||||
nr_local_worker = 2
|
||||
|
||||
Example:
|
||||
|
||||
> python grid.py heart_scale
|
||||
[local] -1 -1 78.8889 (best c=0.5, g=0.5, rate=78.8889)
|
||||
[linux5] -1 -7 83.3333 (best c=0.5, g=0.0078125, rate=83.3333)
|
||||
[linux5] 5 -1 77.037 (best c=0.5, g=0.0078125, rate=83.3333)
|
||||
[linux1] 5 -7 83.3333 (best c=0.5, g=0.0078125, rate=83.3333)
|
||||
.
|
||||
.
|
||||
.
|
||||
|
||||
If -log2c, -log2g, or -v is not specified, default values are used.
|
||||
|
||||
If your system uses telnet instead of ssh, you list the computer names
|
||||
in telnet_workers.
|
||||
|
||||
Calling grid in Python
|
||||
======================
|
||||
|
||||
In addition to using grid.py as a command-line tool, you can use it as a
|
||||
Python module.
|
||||
|
||||
>>> rate, param = find_parameters(dataset, options)
|
||||
|
||||
You need to specify `dataset' and `options' (default ''). See the following example.
|
||||
|
||||
> python
|
||||
|
||||
>>> from grid import *
|
||||
>>> rate, param = find_parameters('../heart_scale', '-log2c -1,1,1 -log2g -1,1,1')
|
||||
[local] 0.0 0.0 rate=74.8148 (best c=1.0, g=1.0, rate=74.8148)
|
||||
[local] 0.0 -1.0 rate=77.037 (best c=1.0, g=0.5, rate=77.037)
|
||||
.
|
||||
.
|
||||
[local] -1.0 -1.0 rate=78.8889 (best c=0.5, g=0.5, rate=78.8889)
|
||||
.
|
||||
.
|
||||
>>> rate
|
||||
78.8889
|
||||
>>> param
|
||||
{'c': 0.5, 'g': 0.5}
|
||||
|
||||
|
||||
Part III: LIBSVM format checking tools
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
`svm-train' conducts only a simple check of the input data. To do a
|
||||
detailed check, we provide a python script `checkdata.py.'
|
||||
|
||||
Usage: checkdata.py dataset
|
||||
|
||||
Exit status (returned value): 1 if there are errors, 0 otherwise.
|
||||
|
||||
This tool is written by Rong-En Fan at National Taiwan University.
|
||||
|
||||
Example
|
||||
=======
|
||||
|
||||
> cat bad_data
|
||||
1 3:1 2:4
|
||||
> python checkdata.py bad_data
|
||||
line 1: feature indices must be in an ascending order, previous/current features 3:1 2:4
|
||||
Found 1 lines with error.
|
||||
|
||||
|
108
libsvm-3.36/tools/checkdata.py
Executable file
108
libsvm-3.36/tools/checkdata.py
Executable file
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
#
|
||||
# A format checker for LIBSVM
|
||||
#
|
||||
|
||||
#
|
||||
# Copyright (c) 2007, Rong-En Fan
|
||||
#
|
||||
# All rights reserved.
|
||||
#
|
||||
# This program is distributed under the same license of the LIBSVM package.
|
||||
#
|
||||
|
||||
from sys import argv, exit
|
||||
import os.path
|
||||
|
||||
def err(line_no, msg):
|
||||
print("line {0}: {1}".format(line_no, msg))
|
||||
|
||||
# works like float() but does not accept nan and inf
|
||||
def my_float(x):
|
||||
if x.lower().find("nan") != -1 or x.lower().find("inf") != -1:
|
||||
raise ValueError
|
||||
|
||||
return float(x)
|
||||
|
||||
def main():
|
||||
if len(argv) != 2:
|
||||
print("Usage: {0} dataset".format(argv[0]))
|
||||
exit(1)
|
||||
|
||||
dataset = argv[1]
|
||||
|
||||
if not os.path.exists(dataset):
|
||||
print("dataset {0} not found".format(dataset))
|
||||
exit(1)
|
||||
|
||||
line_no = 1
|
||||
error_line_count = 0
|
||||
for line in open(dataset, 'r'):
|
||||
line_error = False
|
||||
|
||||
# each line must end with a newline character
|
||||
if line[-1] != '\n':
|
||||
err(line_no, "missing a newline character in the end")
|
||||
line_error = True
|
||||
|
||||
nodes = line.split()
|
||||
|
||||
# check label
|
||||
try:
|
||||
label = nodes.pop(0)
|
||||
|
||||
if label.find(',') != -1:
|
||||
# multi-label format
|
||||
try:
|
||||
for l in label.split(','):
|
||||
l = my_float(l)
|
||||
except:
|
||||
err(line_no, "label {0} is not a valid multi-label form".format(label))
|
||||
line_error = True
|
||||
else:
|
||||
try:
|
||||
label = my_float(label)
|
||||
except:
|
||||
err(line_no, "label {0} is not a number".format(label))
|
||||
line_error = True
|
||||
except:
|
||||
err(line_no, "missing label, perhaps an empty line?")
|
||||
line_error = True
|
||||
|
||||
# check features
|
||||
prev_index = -1
|
||||
for i in range(len(nodes)):
|
||||
try:
|
||||
(index, value) = nodes[i].split(':')
|
||||
|
||||
index = int(index)
|
||||
value = my_float(value)
|
||||
|
||||
# precomputed kernel's index starts from 0 and LIBSVM
|
||||
# checks it. Hence, don't treat index 0 as an error.
|
||||
if index < 0:
|
||||
err(line_no, "feature index must be positive; wrong feature {0}".format(nodes[i]))
|
||||
line_error = True
|
||||
elif index <= prev_index:
|
||||
err(line_no, "feature indices must be in an ascending order, previous/current features {0} {1}".format(nodes[i-1], nodes[i]))
|
||||
line_error = True
|
||||
prev_index = index
|
||||
except:
|
||||
err(line_no, "feature '{0}' not an <index>:<value> pair, <index> integer, <value> real number ".format(nodes[i]))
|
||||
line_error = True
|
||||
|
||||
line_no += 1
|
||||
|
||||
if line_error:
|
||||
error_line_count += 1
|
||||
|
||||
if error_line_count > 0:
|
||||
print("Found {0} lines with error.".format(error_line_count))
|
||||
return 1
|
||||
else:
|
||||
print("No error.")
|
||||
return 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
exit(main())
|
79
libsvm-3.36/tools/easy.py
Executable file
79
libsvm-3.36/tools/easy.py
Executable file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
import os
|
||||
from subprocess import *
|
||||
|
||||
if len(sys.argv) <= 1:
|
||||
print('Usage: {0} training_file [testing_file]'.format(sys.argv[0]))
|
||||
raise SystemExit
|
||||
|
||||
# svm, grid, and gnuplot executable files
|
||||
|
||||
is_win32 = (sys.platform == 'win32')
|
||||
if not is_win32:
|
||||
svmscale_exe = "../svm-scale"
|
||||
svmtrain_exe = "../svm-train"
|
||||
svmpredict_exe = "../svm-predict"
|
||||
grid_py = "./grid.py"
|
||||
gnuplot_exe = "/usr/bin/gnuplot"
|
||||
else:
|
||||
# example for windows
|
||||
svmscale_exe = r"..\windows\svm-scale.exe"
|
||||
svmtrain_exe = r"..\windows\svm-train.exe"
|
||||
svmpredict_exe = r"..\windows\svm-predict.exe"
|
||||
gnuplot_exe = r"c:\tmp\gnuplot\binary\pgnuplot.exe"
|
||||
grid_py = r".\grid.py"
|
||||
|
||||
assert os.path.exists(svmscale_exe),"svm-scale executable not found"
|
||||
assert os.path.exists(svmtrain_exe),"svm-train executable not found"
|
||||
assert os.path.exists(svmpredict_exe),"svm-predict executable not found"
|
||||
assert os.path.exists(gnuplot_exe),"gnuplot executable not found"
|
||||
assert os.path.exists(grid_py),"grid.py not found"
|
||||
|
||||
train_pathname = sys.argv[1]
|
||||
assert os.path.exists(train_pathname),"training file not found"
|
||||
file_name = os.path.split(train_pathname)[1]
|
||||
scaled_file = file_name + ".scale"
|
||||
model_file = file_name + ".model"
|
||||
range_file = file_name + ".range"
|
||||
|
||||
if len(sys.argv) > 2:
|
||||
test_pathname = sys.argv[2]
|
||||
file_name = os.path.split(test_pathname)[1]
|
||||
assert os.path.exists(test_pathname),"testing file not found"
|
||||
scaled_test_file = file_name + ".scale"
|
||||
predict_test_file = file_name + ".predict"
|
||||
|
||||
cmd = '{0} -s "{1}" "{2}" > "{3}"'.format(svmscale_exe, range_file, train_pathname, scaled_file)
|
||||
print('Scaling training data...')
|
||||
Popen(cmd, shell = True, stdout = PIPE).communicate()
|
||||
|
||||
cmd = '{0} -svmtrain "{1}" -gnuplot "{2}" "{3}"'.format(grid_py, svmtrain_exe, gnuplot_exe, scaled_file)
|
||||
print('Cross validation...')
|
||||
f = Popen(cmd, shell = True, stdout = PIPE).stdout
|
||||
|
||||
line = ''
|
||||
while True:
|
||||
last_line = line
|
||||
line = f.readline()
|
||||
if not line: break
|
||||
c,g,rate = map(float,last_line.split())
|
||||
|
||||
print('Best c={0}, g={1} CV rate={2}'.format(c,g,rate))
|
||||
|
||||
cmd = '{0} -c {1} -g {2} "{3}" "{4}"'.format(svmtrain_exe,c,g,scaled_file,model_file)
|
||||
print('Training...')
|
||||
Popen(cmd, shell = True, stdout = PIPE).communicate()
|
||||
|
||||
print('Output model: {0}'.format(model_file))
|
||||
if len(sys.argv) > 2:
|
||||
cmd = '{0} -r "{1}" "{2}" > "{3}"'.format(svmscale_exe, range_file, test_pathname, scaled_test_file)
|
||||
print('Scaling testing data...')
|
||||
Popen(cmd, shell = True, stdout = PIPE).communicate()
|
||||
|
||||
cmd = '{0} "{1}" "{2}" "{3}"'.format(svmpredict_exe, scaled_test_file, model_file, predict_test_file)
|
||||
print('Testing...')
|
||||
Popen(cmd, shell = True).communicate()
|
||||
|
||||
print('Output prediction: {0}'.format(predict_test_file))
|
500
libsvm-3.36/tools/grid.py
Executable file
500
libsvm-3.36/tools/grid.py
Executable file
@@ -0,0 +1,500 @@
|
||||
#!/usr/bin/env python
|
||||
__all__ = ['find_parameters']
|
||||
|
||||
import os, sys, traceback, getpass, time, re
|
||||
from threading import Thread
|
||||
from subprocess import *
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
from Queue import Queue
|
||||
else:
|
||||
from queue import Queue
|
||||
|
||||
telnet_workers = []
|
||||
ssh_workers = []
|
||||
nr_local_worker = 1
|
||||
|
||||
class GridOption:
|
||||
def __init__(self, dataset_pathname, options):
|
||||
dirname = os.path.dirname(__file__)
|
||||
if sys.platform != 'win32':
|
||||
self.svmtrain_pathname = os.path.join(dirname, '../svm-train')
|
||||
self.gnuplot_pathname = '/usr/bin/gnuplot'
|
||||
else:
|
||||
# example for windows
|
||||
self.svmtrain_pathname = os.path.join(dirname, r'..\windows\svm-train.exe')
|
||||
# svmtrain_pathname = r'c:\Program Files\libsvm\windows\svm-train.exe'
|
||||
self.gnuplot_pathname = r'c:\tmp\gnuplot\binary\pgnuplot.exe'
|
||||
self.fold = 5
|
||||
self.c_begin, self.c_end, self.c_step = -5, 15, 2
|
||||
self.g_begin, self.g_end, self.g_step = 3, -15, -2
|
||||
self.grid_with_c, self.grid_with_g = True, True
|
||||
self.dataset_pathname = dataset_pathname
|
||||
self.dataset_title = os.path.split(dataset_pathname)[1]
|
||||
self.out_pathname = '{0}.out'.format(self.dataset_title)
|
||||
self.png_pathname = '{0}.png'.format(self.dataset_title)
|
||||
self.pass_through_string = ' '
|
||||
self.resume_pathname = None
|
||||
self.parse_options(options)
|
||||
|
||||
def parse_options(self, options):
|
||||
if type(options) == str:
|
||||
options = options.split()
|
||||
i = 0
|
||||
pass_through_options = []
|
||||
|
||||
while i < len(options):
|
||||
if options[i] == '-log2c':
|
||||
i = i + 1
|
||||
if options[i] == 'null':
|
||||
self.grid_with_c = False
|
||||
else:
|
||||
self.c_begin, self.c_end, self.c_step = map(float,options[i].split(','))
|
||||
elif options[i] == '-log2g':
|
||||
i = i + 1
|
||||
if options[i] == 'null':
|
||||
self.grid_with_g = False
|
||||
else:
|
||||
self.g_begin, self.g_end, self.g_step = map(float,options[i].split(','))
|
||||
elif options[i] == '-v':
|
||||
i = i + 1
|
||||
self.fold = options[i]
|
||||
elif options[i] in ('-c','-g'):
|
||||
raise ValueError('Use -log2c and -log2g.')
|
||||
elif options[i] == '-svmtrain':
|
||||
i = i + 1
|
||||
self.svmtrain_pathname = options[i]
|
||||
elif options[i] == '-gnuplot':
|
||||
i = i + 1
|
||||
if options[i] == 'null':
|
||||
self.gnuplot_pathname = None
|
||||
else:
|
||||
self.gnuplot_pathname = options[i]
|
||||
elif options[i] == '-out':
|
||||
i = i + 1
|
||||
if options[i] == 'null':
|
||||
self.out_pathname = None
|
||||
else:
|
||||
self.out_pathname = options[i]
|
||||
elif options[i] == '-png':
|
||||
i = i + 1
|
||||
self.png_pathname = options[i]
|
||||
elif options[i] == '-resume':
|
||||
if i == (len(options)-1) or options[i+1].startswith('-'):
|
||||
self.resume_pathname = self.dataset_title + '.out'
|
||||
else:
|
||||
i = i + 1
|
||||
self.resume_pathname = options[i]
|
||||
else:
|
||||
pass_through_options.append(options[i])
|
||||
i = i + 1
|
||||
|
||||
self.pass_through_string = ' '.join(pass_through_options)
|
||||
if not os.path.exists(self.svmtrain_pathname):
|
||||
raise IOError('svm-train executable not found')
|
||||
if not os.path.exists(self.dataset_pathname):
|
||||
raise IOError('dataset not found')
|
||||
if self.resume_pathname and not os.path.exists(self.resume_pathname):
|
||||
raise IOError('file for resumption not found')
|
||||
if not self.grid_with_c and not self.grid_with_g:
|
||||
raise ValueError('-log2c and -log2g should not be null simultaneously')
|
||||
if self.gnuplot_pathname and not os.path.exists(self.gnuplot_pathname):
|
||||
sys.stderr.write('gnuplot executable not found\n')
|
||||
self.gnuplot_pathname = None
|
||||
|
||||
def redraw(db,best_param,gnuplot,options,tofile=False):
|
||||
if len(db) == 0: return
|
||||
begin_level = round(max(x[2] for x in db)) - 3
|
||||
step_size = 0.5
|
||||
|
||||
best_log2c,best_log2g,best_rate = best_param
|
||||
|
||||
# if newly obtained c, g, or cv values are the same,
|
||||
# then stop redrawing the contour.
|
||||
if all(x[0] == db[0][0] for x in db): return
|
||||
if all(x[1] == db[0][1] for x in db): return
|
||||
if all(x[2] == db[0][2] for x in db): return
|
||||
|
||||
if tofile:
|
||||
gnuplot.write(b"set term png transparent small linewidth 2 medium enhanced\n")
|
||||
gnuplot.write("set output \"{0}\"\n".format(options.png_pathname.replace('\\','\\\\')).encode())
|
||||
#gnuplot.write(b"set term postscript color solid\n")
|
||||
#gnuplot.write("set output \"{0}.ps\"\n".format(options.dataset_title).encode().encode())
|
||||
elif sys.platform == 'win32':
|
||||
gnuplot.write(b"set term windows\n")
|
||||
else:
|
||||
gnuplot.write( b"set term x11\n")
|
||||
gnuplot.write(b"set xlabel \"log2(C)\"\n")
|
||||
gnuplot.write(b"set ylabel \"log2(gamma)\"\n")
|
||||
gnuplot.write("set xrange [{0}:{1}]\n".format(options.c_begin,options.c_end).encode())
|
||||
gnuplot.write("set yrange [{0}:{1}]\n".format(options.g_begin,options.g_end).encode())
|
||||
gnuplot.write(b"set contour\n")
|
||||
gnuplot.write("set cntrparam levels incremental {0},{1},100\n".format(begin_level,step_size).encode())
|
||||
gnuplot.write(b"unset surface\n")
|
||||
gnuplot.write(b"unset ztics\n")
|
||||
gnuplot.write(b"set view 0,0\n")
|
||||
gnuplot.write("set title \"{0}\"\n".format(options.dataset_title).encode())
|
||||
gnuplot.write(b"unset label\n")
|
||||
gnuplot.write("set label \"Best log2(C) = {0} log2(gamma) = {1} accuracy = {2}%\" \
|
||||
at screen 0.5,0.85 center\n". \
|
||||
format(best_log2c, best_log2g, best_rate).encode())
|
||||
gnuplot.write("set label \"C = {0} gamma = {1}\""
|
||||
" at screen 0.5,0.8 center\n".format(2**best_log2c, 2**best_log2g).encode())
|
||||
gnuplot.write(b"set key at screen 0.9,0.9\n")
|
||||
gnuplot.write(b"splot \"-\" with lines\n")
|
||||
|
||||
db.sort(key = lambda x:(x[0], -x[1]))
|
||||
|
||||
prevc = db[0][0]
|
||||
for line in db:
|
||||
if prevc != line[0]:
|
||||
gnuplot.write(b"\n")
|
||||
prevc = line[0]
|
||||
gnuplot.write("{0[0]} {0[1]} {0[2]}\n".format(line).encode())
|
||||
gnuplot.write(b"e\n")
|
||||
gnuplot.write(b"\n") # force gnuplot back to prompt when term set failure
|
||||
gnuplot.flush()
|
||||
|
||||
|
||||
def calculate_jobs(options):
|
||||
|
||||
def range_f(begin,end,step):
|
||||
# like range, but works on non-integer too
|
||||
seq = []
|
||||
while True:
|
||||
if step > 0 and begin > end: break
|
||||
if step < 0 and begin < end: break
|
||||
seq.append(begin)
|
||||
begin = begin + step
|
||||
return seq
|
||||
|
||||
def permute_sequence(seq):
|
||||
n = len(seq)
|
||||
if n <= 1: return seq
|
||||
|
||||
mid = int(n/2)
|
||||
left = permute_sequence(seq[:mid])
|
||||
right = permute_sequence(seq[mid+1:])
|
||||
|
||||
ret = [seq[mid]]
|
||||
while left or right:
|
||||
if left: ret.append(left.pop(0))
|
||||
if right: ret.append(right.pop(0))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
c_seq = permute_sequence(range_f(options.c_begin,options.c_end,options.c_step))
|
||||
g_seq = permute_sequence(range_f(options.g_begin,options.g_end,options.g_step))
|
||||
|
||||
if not options.grid_with_c:
|
||||
c_seq = [None]
|
||||
if not options.grid_with_g:
|
||||
g_seq = [None]
|
||||
|
||||
nr_c = float(len(c_seq))
|
||||
nr_g = float(len(g_seq))
|
||||
i, j = 0, 0
|
||||
jobs = []
|
||||
|
||||
while i < nr_c or j < nr_g:
|
||||
if i/nr_c < j/nr_g:
|
||||
# increase C resolution
|
||||
line = []
|
||||
for k in range(0,j):
|
||||
line.append((c_seq[i],g_seq[k]))
|
||||
i = i + 1
|
||||
jobs.append(line)
|
||||
else:
|
||||
# increase g resolution
|
||||
line = []
|
||||
for k in range(0,i):
|
||||
line.append((c_seq[k],g_seq[j]))
|
||||
j = j + 1
|
||||
jobs.append(line)
|
||||
|
||||
resumed_jobs = {}
|
||||
|
||||
if options.resume_pathname is None:
|
||||
return jobs, resumed_jobs
|
||||
|
||||
for line in open(options.resume_pathname, 'r'):
|
||||
line = line.strip()
|
||||
rst = re.findall(r'rate=([0-9.]+)',line)
|
||||
if not rst:
|
||||
continue
|
||||
rate = float(rst[0])
|
||||
|
||||
c, g = None, None
|
||||
rst = re.findall(r'log2c=([0-9.-]+)',line)
|
||||
if rst:
|
||||
c = float(rst[0])
|
||||
rst = re.findall(r'log2g=([0-9.-]+)',line)
|
||||
if rst:
|
||||
g = float(rst[0])
|
||||
|
||||
resumed_jobs[(c,g)] = rate
|
||||
|
||||
return jobs, resumed_jobs
|
||||
|
||||
|
||||
class WorkerStopToken: # used to notify the worker to stop or if a worker is dead
|
||||
pass
|
||||
|
||||
class Worker(Thread):
|
||||
def __init__(self,name,job_queue,result_queue,options):
|
||||
Thread.__init__(self)
|
||||
self.name = name
|
||||
self.job_queue = job_queue
|
||||
self.result_queue = result_queue
|
||||
self.options = options
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
(cexp,gexp) = self.job_queue.get()
|
||||
if cexp is WorkerStopToken:
|
||||
self.job_queue.put((cexp,gexp))
|
||||
# print('worker {0} stop.'.format(self.name))
|
||||
break
|
||||
try:
|
||||
c, g = None, None
|
||||
if cexp != None:
|
||||
c = 2.0**cexp
|
||||
if gexp != None:
|
||||
g = 2.0**gexp
|
||||
rate = self.run_one(c,g)
|
||||
if rate is None: raise RuntimeError('get no rate')
|
||||
except:
|
||||
# we failed, let others do that and we just quit
|
||||
|
||||
traceback.print_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])
|
||||
|
||||
self.job_queue.put((cexp,gexp))
|
||||
sys.stderr.write('worker {0} quit.\n'.format(self.name))
|
||||
break
|
||||
else:
|
||||
self.result_queue.put((self.name,cexp,gexp,rate))
|
||||
|
||||
def get_cmd(self,c,g):
|
||||
options=self.options
|
||||
cmdline = '"' + options.svmtrain_pathname + '"'
|
||||
if options.grid_with_c:
|
||||
cmdline += ' -c {0} '.format(c)
|
||||
if options.grid_with_g:
|
||||
cmdline += ' -g {0} '.format(g)
|
||||
cmdline += ' -v {0} {1} {2} '.format\
|
||||
(options.fold,options.pass_through_string,'"' + options.dataset_pathname + '"')
|
||||
return cmdline
|
||||
|
||||
class LocalWorker(Worker):
|
||||
def run_one(self,c,g):
|
||||
cmdline = self.get_cmd(c,g)
|
||||
result = Popen(cmdline,shell=True,stdout=PIPE,stderr=PIPE,stdin=PIPE).stdout
|
||||
for line in result.readlines():
|
||||
if str(line).find('Cross') != -1:
|
||||
return float(line.split()[-1][0:-1])
|
||||
|
||||
class SSHWorker(Worker):
|
||||
def __init__(self,name,job_queue,result_queue,host,options):
|
||||
Worker.__init__(self,name,job_queue,result_queue,options)
|
||||
self.host = host
|
||||
self.cwd = os.getcwd()
|
||||
def run_one(self,c,g):
|
||||
cmdline = 'ssh -x -t -t {0} "cd {1}; {2}"'.format\
|
||||
(self.host,self.cwd,self.get_cmd(c,g))
|
||||
result = Popen(cmdline,shell=True,stdout=PIPE,stderr=PIPE,stdin=PIPE).stdout
|
||||
for line in result.readlines():
|
||||
if str(line).find('Cross') != -1:
|
||||
return float(line.split()[-1][0:-1])
|
||||
|
||||
class TelnetWorker(Worker):
|
||||
def __init__(self,name,job_queue,result_queue,host,username,password,options):
|
||||
Worker.__init__(self,name,job_queue,result_queue,options)
|
||||
self.host = host
|
||||
self.username = username
|
||||
self.password = password
|
||||
def run(self):
|
||||
import telnetlib
|
||||
self.tn = tn = telnetlib.Telnet(self.host)
|
||||
tn.read_until('login: ')
|
||||
tn.write(self.username + '\n')
|
||||
tn.read_until('Password: ')
|
||||
tn.write(self.password + '\n')
|
||||
|
||||
# XXX: how to know whether login is successful?
|
||||
tn.read_until(self.username)
|
||||
#
|
||||
print('login ok', self.host)
|
||||
tn.write('cd '+os.getcwd()+'\n')
|
||||
Worker.run(self)
|
||||
tn.write('exit\n')
|
||||
def run_one(self,c,g):
|
||||
cmdline = self.get_cmd(c,g)
|
||||
result = self.tn.write(cmdline+'\n')
|
||||
(idx,matchm,output) = self.tn.expect(['Cross.*\n'])
|
||||
for line in output.split('\n'):
|
||||
if str(line).find('Cross') != -1:
|
||||
return float(line.split()[-1][0:-1])
|
||||
|
||||
def find_parameters(dataset_pathname, options=''):
|
||||
|
||||
def update_param(c,g,rate,best_c,best_g,best_rate,worker,resumed):
|
||||
if (rate > best_rate) or (rate==best_rate and g==best_g and c<best_c):
|
||||
best_rate,best_c,best_g = rate,c,g
|
||||
stdout_str = '[{0}] {1} {2} (best '.format\
|
||||
(worker,' '.join(str(x) for x in [c,g] if x is not None),rate)
|
||||
output_str = ''
|
||||
if c != None:
|
||||
stdout_str += 'c={0}, '.format(2.0**best_c)
|
||||
output_str += 'log2c={0} '.format(c)
|
||||
if g != None:
|
||||
stdout_str += 'g={0}, '.format(2.0**best_g)
|
||||
output_str += 'log2g={0} '.format(g)
|
||||
stdout_str += 'rate={0})'.format(best_rate)
|
||||
print(stdout_str)
|
||||
if options.out_pathname and not resumed:
|
||||
output_str += 'rate={0}\n'.format(rate)
|
||||
result_file.write(output_str)
|
||||
result_file.flush()
|
||||
|
||||
return best_c,best_g,best_rate
|
||||
|
||||
options = GridOption(dataset_pathname, options);
|
||||
|
||||
if options.gnuplot_pathname:
|
||||
gnuplot = Popen(options.gnuplot_pathname,stdin = PIPE,stdout=PIPE,stderr=PIPE).stdin
|
||||
else:
|
||||
gnuplot = None
|
||||
|
||||
# put jobs in queue
|
||||
|
||||
jobs,resumed_jobs = calculate_jobs(options)
|
||||
job_queue = Queue(0)
|
||||
result_queue = Queue(0)
|
||||
|
||||
for (c,g) in resumed_jobs:
|
||||
result_queue.put(('resumed',c,g,resumed_jobs[(c,g)]))
|
||||
|
||||
for line in jobs:
|
||||
for (c,g) in line:
|
||||
if (c,g) not in resumed_jobs:
|
||||
job_queue.put((c,g))
|
||||
|
||||
# hack the queue to become a stack --
|
||||
# this is important when some thread
|
||||
# failed and re-put a job. It we still
|
||||
# use FIFO, the job will be put
|
||||
# into the end of the queue, and the graph
|
||||
# will only be updated in the end
|
||||
|
||||
job_queue._put = job_queue.queue.appendleft
|
||||
|
||||
# fire telnet workers
|
||||
|
||||
if telnet_workers:
|
||||
nr_telnet_worker = len(telnet_workers)
|
||||
username = getpass.getuser()
|
||||
password = getpass.getpass()
|
||||
for host in telnet_workers:
|
||||
worker = TelnetWorker(host,job_queue,result_queue,
|
||||
host,username,password,options)
|
||||
worker.start()
|
||||
|
||||
# fire ssh workers
|
||||
|
||||
if ssh_workers:
|
||||
for host in ssh_workers:
|
||||
worker = SSHWorker(host,job_queue,result_queue,host,options)
|
||||
worker.start()
|
||||
|
||||
# fire local workers
|
||||
|
||||
for i in range(nr_local_worker):
|
||||
worker = LocalWorker('local',job_queue,result_queue,options)
|
||||
worker.start()
|
||||
|
||||
# gather results
|
||||
|
||||
done_jobs = {}
|
||||
|
||||
if options.out_pathname:
|
||||
if options.resume_pathname:
|
||||
result_file = open(options.out_pathname, 'a')
|
||||
else:
|
||||
result_file = open(options.out_pathname, 'w')
|
||||
|
||||
|
||||
db = []
|
||||
best_rate = -1
|
||||
best_c,best_g = None,None
|
||||
|
||||
for (c,g) in resumed_jobs:
|
||||
rate = resumed_jobs[(c,g)]
|
||||
best_c,best_g,best_rate = update_param(c,g,rate,best_c,best_g,best_rate,'resumed',True)
|
||||
|
||||
for line in jobs:
|
||||
for (c,g) in line:
|
||||
while (c,g) not in done_jobs:
|
||||
(worker,c1,g1,rate1) = result_queue.get()
|
||||
done_jobs[(c1,g1)] = rate1
|
||||
if (c1,g1) not in resumed_jobs:
|
||||
best_c,best_g,best_rate = update_param(c1,g1,rate1,best_c,best_g,best_rate,worker,False)
|
||||
db.append((c,g,done_jobs[(c,g)]))
|
||||
if gnuplot and options.grid_with_c and options.grid_with_g:
|
||||
redraw(db,[best_c, best_g, best_rate],gnuplot,options)
|
||||
redraw(db,[best_c, best_g, best_rate],gnuplot,options,True)
|
||||
|
||||
|
||||
if options.out_pathname:
|
||||
result_file.close()
|
||||
job_queue.put((WorkerStopToken,None))
|
||||
best_param, best_cg = {}, []
|
||||
if best_c != None:
|
||||
best_param['c'] = 2.0**best_c
|
||||
best_cg += [2.0**best_c]
|
||||
if best_g != None:
|
||||
best_param['g'] = 2.0**best_g
|
||||
best_cg += [2.0**best_g]
|
||||
print('{0} {1}'.format(' '.join(map(str,best_cg)), best_rate))
|
||||
|
||||
return best_rate, best_param
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
def exit_with_help():
|
||||
print("""\
|
||||
Usage: grid.py [grid_options] [svm_options] dataset
|
||||
|
||||
grid_options :
|
||||
-log2c {begin,end,step | "null"} : set the range of c (default -5,15,2)
|
||||
begin,end,step -- c_range = 2^{begin,...,begin+k*step,...,end}
|
||||
"null" -- do not grid with c
|
||||
-log2g {begin,end,step | "null"} : set the range of g (default 3,-15,-2)
|
||||
begin,end,step -- g_range = 2^{begin,...,begin+k*step,...,end}
|
||||
"null" -- do not grid with g
|
||||
-v n : n-fold cross validation (default 5)
|
||||
-svmtrain pathname : set svm executable path and name
|
||||
-gnuplot {pathname | "null"} :
|
||||
pathname -- set gnuplot executable path and name
|
||||
"null" -- do not plot
|
||||
-out {pathname | "null"} : (default dataset.out)
|
||||
pathname -- set output file path and name
|
||||
"null" -- do not output file
|
||||
-png pathname : set graphic output file path and name (default dataset.png)
|
||||
-resume [pathname] : resume the grid task using an existing output file (default pathname is dataset.out)
|
||||
This is experimental. Try this option only if some parameters have been checked for the SAME data.
|
||||
|
||||
svm_options : additional options for svm-train""")
|
||||
sys.exit(1)
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
exit_with_help()
|
||||
dataset_pathname = sys.argv[-1]
|
||||
options = sys.argv[1:-1]
|
||||
try:
|
||||
find_parameters(dataset_pathname, options)
|
||||
except (IOError,ValueError) as e:
|
||||
sys.stderr.write(str(e) + '\n')
|
||||
sys.stderr.write('Try "grid.py" for more information.\n')
|
||||
sys.exit(1)
|
120
libsvm-3.36/tools/subset.py
Executable file
120
libsvm-3.36/tools/subset.py
Executable file
@@ -0,0 +1,120 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os, sys, math, random
|
||||
from collections import defaultdict
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
xrange = range
|
||||
|
||||
def exit_with_help(argv):
|
||||
print("""\
|
||||
Usage: {0} [options] dataset subset_size [output1] [output2]
|
||||
|
||||
This script randomly selects a subset of the dataset.
|
||||
|
||||
options:
|
||||
-s method : method of selection (default 0)
|
||||
0 -- stratified selection (classification only)
|
||||
1 -- random selection
|
||||
|
||||
output1 : the subset (optional)
|
||||
output2 : rest of the data (optional)
|
||||
If output1 is omitted, the subset will be printed on the screen.""".format(argv[0]))
|
||||
exit(1)
|
||||
|
||||
def process_options(argv):
|
||||
argc = len(argv)
|
||||
if argc < 3:
|
||||
exit_with_help(argv)
|
||||
|
||||
# default method is stratified selection
|
||||
method = 0
|
||||
subset_file = sys.stdout
|
||||
rest_file = None
|
||||
|
||||
i = 1
|
||||
while i < argc:
|
||||
if argv[i][0] != "-":
|
||||
break
|
||||
if argv[i] == "-s":
|
||||
i = i + 1
|
||||
method = int(argv[i])
|
||||
if method not in [0,1]:
|
||||
print("Unknown selection method {0}".format(method))
|
||||
exit_with_help(argv)
|
||||
i = i + 1
|
||||
|
||||
dataset = argv[i]
|
||||
subset_size = int(argv[i+1])
|
||||
if i+2 < argc:
|
||||
subset_file = open(argv[i+2],'w')
|
||||
if i+3 < argc:
|
||||
rest_file = open(argv[i+3],'w')
|
||||
|
||||
return dataset, subset_size, method, subset_file, rest_file
|
||||
|
||||
def random_selection(dataset, subset_size):
|
||||
l = sum(1 for line in open(dataset,'r'))
|
||||
return sorted(random.sample(xrange(l), subset_size))
|
||||
|
||||
def stratified_selection(dataset, subset_size):
|
||||
labels = [line.split(None,1)[0] for line in open(dataset)]
|
||||
label_linenums = defaultdict(list)
|
||||
for i, label in enumerate(labels):
|
||||
label_linenums[label] += [i]
|
||||
|
||||
l = len(labels)
|
||||
remaining = subset_size
|
||||
ret = []
|
||||
|
||||
# classes with fewer data are sampled first; otherwise
|
||||
# some rare classes may not be selected
|
||||
for label in sorted(label_linenums, key=lambda x: len(label_linenums[x])):
|
||||
linenums = label_linenums[label]
|
||||
label_size = len(linenums)
|
||||
# at least one instance per class
|
||||
s = int(min(remaining, max(1, math.ceil(label_size*(float(subset_size)/l)))))
|
||||
if s == 0:
|
||||
sys.stderr.write('''\
|
||||
Error: failed to have at least one instance per class
|
||||
1. You may have regression data.
|
||||
2. Your classification data is unbalanced or too small.
|
||||
Please use -s 1.
|
||||
''')
|
||||
sys.exit(-1)
|
||||
remaining -= s
|
||||
ret += [linenums[i] for i in random.sample(xrange(label_size), s)]
|
||||
return sorted(ret)
|
||||
|
||||
def main(argv=sys.argv):
|
||||
dataset, subset_size, method, subset_file, rest_file = process_options(argv)
|
||||
#uncomment the following line to fix the random seed
|
||||
#random.seed(0)
|
||||
selected_lines = []
|
||||
|
||||
if method == 0:
|
||||
selected_lines = stratified_selection(dataset, subset_size)
|
||||
elif method == 1:
|
||||
selected_lines = random_selection(dataset, subset_size)
|
||||
|
||||
#select instances based on selected_lines
|
||||
dataset = open(dataset,'r')
|
||||
prev_selected_linenum = -1
|
||||
for i in xrange(len(selected_lines)):
|
||||
for cnt in xrange(selected_lines[i]-prev_selected_linenum-1):
|
||||
line = dataset.readline()
|
||||
if rest_file:
|
||||
rest_file.write(line)
|
||||
subset_file.write(dataset.readline())
|
||||
prev_selected_linenum = selected_lines[i]
|
||||
subset_file.close()
|
||||
|
||||
if rest_file:
|
||||
for line in dataset:
|
||||
rest_file.write(line)
|
||||
rest_file.close()
|
||||
dataset.close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv)
|
||||
|
BIN
libsvm-3.36/windows/libsvmread.mexw64
Executable file
BIN
libsvm-3.36/windows/libsvmread.mexw64
Executable file
Binary file not shown.
BIN
libsvm-3.36/windows/libsvmwrite.mexw64
Executable file
BIN
libsvm-3.36/windows/libsvmwrite.mexw64
Executable file
Binary file not shown.
BIN
libsvm-3.36/windows/svmpredict.mexw64
Executable file
BIN
libsvm-3.36/windows/svmpredict.mexw64
Executable file
Binary file not shown.
BIN
libsvm-3.36/windows/svmtrain.mexw64
Executable file
BIN
libsvm-3.36/windows/svmtrain.mexw64
Executable file
Binary file not shown.
Reference in New Issue
Block a user