use of smile.math.kernel.GaussianKernel in project smile by haifengl.
the class SVMDemo method learn.
@Override
public double[][] learn(double[] x, double[] y) {
try {
gamma = Double.parseDouble(gammaField.getText().trim());
if (gamma <= 0) {
JOptionPane.showMessageDialog(this, "Invalid ˠ: " + gamma, "Error", JOptionPane.ERROR_MESSAGE);
return null;
}
} catch (Exception ex) {
JOptionPane.showMessageDialog(this, "Invalid ˠ: " + gammaField.getText(), "Error", JOptionPane.ERROR_MESSAGE);
return null;
}
try {
C = Double.parseDouble(cField.getText().trim());
if (C < 0) {
JOptionPane.showMessageDialog(this, "Invalid C: " + C, "Error", JOptionPane.ERROR_MESSAGE);
return null;
}
} catch (Exception ex) {
JOptionPane.showMessageDialog(this, "Invalid C: " + cField.getText(), "Error", JOptionPane.ERROR_MESSAGE);
return null;
}
double[][] data = dataset[datasetIndex].toArray(new double[dataset[datasetIndex].size()][]);
int[] label = dataset[datasetIndex].toArray(new int[dataset[datasetIndex].size()]);
SVM<double[]> svm = new SVM<>(new GaussianKernel(gamma), C);
svm.learn(data, label);
svm.finish();
for (int i = 0; i < label.length; i++) {
label[i] = svm.predict(data[i]);
}
double trainError = error(label, label);
System.out.format("training error = %.2f%%\n", 100 * trainError);
double[][] z = new double[y.length][x.length];
for (int i = 0; i < y.length; i++) {
for (int j = 0; j < x.length; j++) {
double[] p = { x[j], y[i] };
z[i][j] = svm.predict(p);
}
}
return z;
}
use of smile.math.kernel.GaussianKernel in project smile by haifengl.
the class KPCATest method testKPCAK.
/**
* Test of learn method, of class PCA.
*/
@Test
public void testKPCAK() {
System.out.println("learn k");
ArffParser arffParser = new ArffParser();
arffParser.setResponseIndex(4);
try {
AttributeDataset iris = arffParser.parse(smile.data.parser.IOUtils.getTestDataFile("weka/iris.arff"));
double[][] x = iris.toArray(new double[iris.size()][]);
KPCA<double[]> kpca = new KPCA(x, new GaussianKernel(Math.sqrt(2.5)), 29);
assertTrue(Math.equals(latent, kpca.getVariances(), 1E-3));
double[][] points = kpca.project(x);
points[0] = kpca.project(x[0]);
assertTrue(Math.equals(points, kpca.getCoordinates(), 1E-7));
/*
for (int j = 0; j < points[0].length; j++) {
double sign = Math.signum(points[0][j] / scores[0][j]);
for (int i = 0; i < points.length; i++) {
points[i][j] *= sign;
}
}
assertTrue(Math.equals(scores, points, 1E-1));
*/
} catch (Exception ex) {
System.err.println(ex);
}
}
use of smile.math.kernel.GaussianKernel in project smile by haifengl.
the class GaussianProcessRegressionTest method testKin8nm.
/**
* Test of learn method, of class GaussianProcessRegression.
*/
@Test
public void testKin8nm() {
System.out.println("kin8nm");
ArffParser parser = new ArffParser();
parser.setResponseIndex(8);
try {
AttributeDataset data = parser.parse(smile.data.parser.IOUtils.getTestDataFile("weka/regression/kin8nm.arff"));
double[] y = data.toArray(new double[data.size()]);
double[][] x = data.toArray(new double[data.size()][]);
int[] perm = Math.permutate(x.length);
double[][] datax = new double[4000][];
double[] datay = new double[datax.length];
for (int i = 0; i < datax.length; i++) {
datax[i] = x[perm[i]];
datay[i] = y[perm[i]];
}
int n = datax.length;
int k = 10;
CrossValidation cv = new CrossValidation(n, k);
double rss = 0.0;
double sparseRSS30 = 0.0;
for (int i = 0; i < k; i++) {
double[][] trainx = Math.slice(datax, cv.train[i]);
double[] trainy = Math.slice(datay, cv.train[i]);
double[][] testx = Math.slice(datax, cv.test[i]);
double[] testy = Math.slice(datay, cv.test[i]);
GaussianProcessRegression<double[]> rkhs = new GaussianProcessRegression<>(trainx, trainy, new GaussianKernel(34.97), 0.1);
KMeans kmeans = new KMeans(trainx, 30, 10);
double[][] centers = kmeans.centroids();
double r0 = 0.0;
for (int l = 0; l < centers.length; l++) {
for (int j = 0; j < l; j++) {
r0 += Math.distance(centers[l], centers[j]);
}
}
r0 /= (2 * centers.length);
System.out.println("Kernel width = " + r0);
GaussianProcessRegression<double[]> sparse30 = new GaussianProcessRegression<>(trainx, trainy, centers, new GaussianKernel(r0), 0.1);
for (int j = 0; j < testx.length; j++) {
double r = testy[j] - rkhs.predict(testx[j]);
rss += r * r;
r = testy[j] - sparse30.predict(testx[j]);
sparseRSS30 += r * r;
}
}
System.out.println("Regular 10-CV MSE = " + rss / n);
System.out.println("Sparse (30) 10-CV MSE = " + sparseRSS30 / n);
} catch (Exception ex) {
System.err.println(ex);
}
}
use of smile.math.kernel.GaussianKernel in project smile by haifengl.
the class GaussianProcessRegressionTest method testCPU.
/**
* Test of learn method, of class GaussianProcessRegression.
*/
@Test
public void testCPU() {
System.out.println("CPU");
ArffParser parser = new ArffParser();
parser.setResponseIndex(6);
try {
AttributeDataset data = parser.parse(smile.data.parser.IOUtils.getTestDataFile("weka/cpu.arff"));
double[] datay = data.toArray(new double[data.size()]);
double[][] datax = data.toArray(new double[data.size()][]);
Math.standardize(datax);
int n = datax.length;
int k = 10;
CrossValidation cv = new CrossValidation(n, k);
double rss = 0.0;
double sparseRSS30 = 0.0;
double nystromRSS30 = 0.0;
for (int i = 0; i < k; i++) {
double[][] trainx = Math.slice(datax, cv.train[i]);
double[] trainy = Math.slice(datay, cv.train[i]);
double[][] testx = Math.slice(datax, cv.test[i]);
double[] testy = Math.slice(datay, cv.test[i]);
GaussianProcessRegression<double[]> rkhs = new GaussianProcessRegression<>(trainx, trainy, new GaussianKernel(47.02), 0.1);
KMeans kmeans = new KMeans(trainx, 30, 10);
double[][] centers = kmeans.centroids();
double r0 = 0.0;
for (int l = 0; l < centers.length; l++) {
for (int j = 0; j < l; j++) {
r0 += Math.distance(centers[l], centers[j]);
}
}
r0 /= (2 * centers.length);
System.out.println("Kernel width = " + r0);
GaussianProcessRegression<double[]> sparse30 = new GaussianProcessRegression<>(trainx, trainy, centers, new GaussianKernel(r0), 0.1);
GaussianProcessRegression<double[]> nystrom30 = new GaussianProcessRegression<>(trainx, trainy, centers, new GaussianKernel(r0), 0.1, true);
for (int j = 0; j < testx.length; j++) {
double r = testy[j] - rkhs.predict(testx[j]);
rss += r * r;
r = testy[j] - sparse30.predict(testx[j]);
sparseRSS30 += r * r;
r = testy[j] - nystrom30.predict(testx[j]);
nystromRSS30 += r * r;
}
}
System.out.println("Regular 10-CV MSE = " + rss / n);
System.out.println("Sparse (30) 10-CV MSE = " + sparseRSS30 / n);
System.out.println("Nystrom (30) 10-CV MSE = " + nystromRSS30 / n);
} catch (Exception ex) {
System.err.println(ex);
}
}
use of smile.math.kernel.GaussianKernel in project smile by haifengl.
the class GaussianProcessRegressionTest method testLearn.
/**
* Test of learn method, of class RKHSRegression.
*/
@Test
public void testLearn() {
System.out.println("learn");
double[][] longley = { { 234.289, 235.6, 159.0, 107.608, 1947, 60.323 }, { 259.426, 232.5, 145.6, 108.632, 1948, 61.122 }, { 258.054, 368.2, 161.6, 109.773, 1949, 60.171 }, { 284.599, 335.1, 165.0, 110.929, 1950, 61.187 }, { 328.975, 209.9, 309.9, 112.075, 1951, 63.221 }, { 346.999, 193.2, 359.4, 113.270, 1952, 63.639 }, { 365.385, 187.0, 354.7, 115.094, 1953, 64.989 }, { 363.112, 357.8, 335.0, 116.219, 1954, 63.761 }, { 397.469, 290.4, 304.8, 117.388, 1955, 66.019 }, { 419.180, 282.2, 285.7, 118.734, 1956, 67.857 }, { 442.769, 293.6, 279.8, 120.445, 1957, 68.169 }, { 444.546, 468.1, 263.7, 121.950, 1958, 66.513 }, { 482.704, 381.3, 255.2, 123.366, 1959, 68.655 }, { 502.601, 393.1, 251.4, 125.368, 1960, 69.564 }, { 518.173, 480.6, 257.2, 127.852, 1961, 69.331 }, { 554.894, 400.7, 282.7, 130.081, 1962, 70.551 } };
double[] y = { 83.0, 88.5, 88.2, 89.5, 96.2, 98.1, 99.0, 100.0, 101.2, 104.6, 108.4, 110.8, 112.6, 114.2, 115.7, 116.9 };
Math.standardize(longley);
int n = longley.length;
LOOCV loocv = new LOOCV(n);
double rss = 0.0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(longley, loocv.train[i]);
double[] trainy = Math.slice(y, loocv.train[i]);
GaussianProcessRegression<double[]> rkhs = new GaussianProcessRegression<>(trainx, trainy, new GaussianKernel(8.0), 0.2);
double r = y[loocv.test[i]] - rkhs.predict(longley[loocv.test[i]]);
rss += r * r;
}
System.out.println("MSE = " + rss / n);
}
Aggregations