use of smile.validation.LOOCV in project smile by haifengl.
the class RegressionTreeTest method testLongley.
/**
* Test of predict method, of class RegressionTree.
*/
@Test
public void testLongley() {
System.out.println("longley");
double[][] longley = { { 234.289, 235.6, 159.0, 107.608, 1947, 60.323 }, { 259.426, 232.5, 145.6, 108.632, 1948, 61.122 }, { 258.054, 368.2, 161.6, 109.773, 1949, 60.171 }, { 284.599, 335.1, 165.0, 110.929, 1950, 61.187 }, { 328.975, 209.9, 309.9, 112.075, 1951, 63.221 }, { 346.999, 193.2, 359.4, 113.270, 1952, 63.639 }, { 365.385, 187.0, 354.7, 115.094, 1953, 64.989 }, { 363.112, 357.8, 335.0, 116.219, 1954, 63.761 }, { 397.469, 290.4, 304.8, 117.388, 1955, 66.019 }, { 419.180, 282.2, 285.7, 118.734, 1956, 67.857 }, { 442.769, 293.6, 279.8, 120.445, 1957, 68.169 }, { 444.546, 468.1, 263.7, 121.950, 1958, 66.513 }, { 482.704, 381.3, 255.2, 123.366, 1959, 68.655 }, { 502.601, 393.1, 251.4, 125.368, 1960, 69.564 }, { 518.173, 480.6, 257.2, 127.852, 1961, 69.331 }, { 554.894, 400.7, 282.7, 130.081, 1962, 70.551 } };
double[] y = { 83.0, 88.5, 88.2, 89.5, 96.2, 98.1, 99.0, 100.0, 101.2, 104.6, 108.4, 110.8, 112.6, 114.2, 115.7, 116.9 };
int n = longley.length;
LOOCV loocv = new LOOCV(n);
double rss = 0.0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(longley, loocv.train[i]);
double[] trainy = Math.slice(y, loocv.train[i]);
RegressionTree tree = new RegressionTree(trainx, trainy, 3);
double r = y[loocv.test[i]] - tree.predict(longley[loocv.test[i]]);
rss += r * r;
}
System.out.println("MSE = " + rss / n);
assertEquals(41.933087445771115, rss / n, 1E-4);
}
use of smile.validation.LOOCV in project smile by haifengl.
the class AdaBoostTest method testIris.
/**
* Test of learn method, of class AdaBoost.
*/
@Test
public void testIris() {
System.out.println("Iris");
ArffParser arffParser = new ArffParser();
arffParser.setResponseIndex(4);
try {
AttributeDataset iris = arffParser.parse(smile.data.parser.IOUtils.getTestDataFile("weka/iris.arff"));
double[][] x = iris.toArray(new double[iris.size()][]);
int[] y = iris.toArray(new int[iris.size()]);
for (int i = 0; i < y.length; i++) {
if (y[i] != 0)
y[i] = 1;
}
int n = x.length;
LOOCV loocv = new LOOCV(n);
int error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
AdaBoost forest = new AdaBoost(iris.attributes(), trainx, trainy, 200);
if (y[loocv.test[i]] != forest.predict(x[loocv.test[i]]))
error++;
}
System.out.println("AdaBoost error = " + error);
assertEquals(0, error);
} catch (Exception ex) {
System.err.println(ex);
}
}
use of smile.validation.LOOCV in project smile by haifengl.
the class GaussianProcessRegressionTest method testLearn.
/**
* Test of learn method, of class RKHSRegression.
*/
@Test
public void testLearn() {
System.out.println("learn");
double[][] longley = { { 234.289, 235.6, 159.0, 107.608, 1947, 60.323 }, { 259.426, 232.5, 145.6, 108.632, 1948, 61.122 }, { 258.054, 368.2, 161.6, 109.773, 1949, 60.171 }, { 284.599, 335.1, 165.0, 110.929, 1950, 61.187 }, { 328.975, 209.9, 309.9, 112.075, 1951, 63.221 }, { 346.999, 193.2, 359.4, 113.270, 1952, 63.639 }, { 365.385, 187.0, 354.7, 115.094, 1953, 64.989 }, { 363.112, 357.8, 335.0, 116.219, 1954, 63.761 }, { 397.469, 290.4, 304.8, 117.388, 1955, 66.019 }, { 419.180, 282.2, 285.7, 118.734, 1956, 67.857 }, { 442.769, 293.6, 279.8, 120.445, 1957, 68.169 }, { 444.546, 468.1, 263.7, 121.950, 1958, 66.513 }, { 482.704, 381.3, 255.2, 123.366, 1959, 68.655 }, { 502.601, 393.1, 251.4, 125.368, 1960, 69.564 }, { 518.173, 480.6, 257.2, 127.852, 1961, 69.331 }, { 554.894, 400.7, 282.7, 130.081, 1962, 70.551 } };
double[] y = { 83.0, 88.5, 88.2, 89.5, 96.2, 98.1, 99.0, 100.0, 101.2, 104.6, 108.4, 110.8, 112.6, 114.2, 115.7, 116.9 };
Math.standardize(longley);
int n = longley.length;
LOOCV loocv = new LOOCV(n);
double rss = 0.0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(longley, loocv.train[i]);
double[] trainy = Math.slice(y, loocv.train[i]);
GaussianProcessRegression<double[]> rkhs = new GaussianProcessRegression<>(trainx, trainy, new GaussianKernel(8.0), 0.2);
double r = y[loocv.test[i]] - rkhs.predict(longley[loocv.test[i]]);
rss += r * r;
}
System.out.println("MSE = " + rss / n);
}
use of smile.validation.LOOCV in project smile by haifengl.
the class QDATest method testLearn.
/**
* Test of learn method, of class QDA.
*/
@Test
public void testLearn() {
System.out.println("learn");
ArffParser arffParser = new ArffParser();
arffParser.setResponseIndex(4);
try {
AttributeDataset iris = arffParser.parse(smile.data.parser.IOUtils.getTestDataFile("weka/iris.arff"));
double[][] x = iris.toArray(new double[iris.size()][]);
int[] y = iris.toArray(new int[iris.size()]);
int n = x.length;
LOOCV loocv = new LOOCV(n);
int error = 0;
double[] posteriori = new double[3];
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
QDA qda = new QDA(trainx, trainy);
if (y[loocv.test[i]] != qda.predict(x[loocv.test[i]], posteriori))
error++;
//System.out.println(posteriori[0]+"\t"+posteriori[1]+"\t"+posteriori[2]);
}
System.out.println("QDA error = " + error);
assertEquals(4, error);
} catch (Exception ex) {
System.err.println(ex);
}
}
use of smile.validation.LOOCV in project smile by haifengl.
the class RDATest method testLearn.
/**
* Test of learn method, of class RDA.
*/
@Test
public void testLearn() {
System.out.println("learn");
ArffParser arffParser = new ArffParser();
arffParser.setResponseIndex(4);
try {
AttributeDataset iris = arffParser.parse(smile.data.parser.IOUtils.getTestDataFile("weka/iris.arff"));
double[][] x = iris.toArray(new double[iris.size()][]);
int[] y = iris.toArray(new int[iris.size()]);
int n = x.length;
LOOCV loocv = new LOOCV(n);
int error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
RDA rda = new RDA(trainx, trainy, 0.0);
if (y[loocv.test[i]] != rda.predict(x[loocv.test[i]]))
error++;
}
System.out.println("RDA (0.0) error = " + error);
assertEquals(22, error);
error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
RDA rda = new RDA(trainx, trainy, 0.1);
if (y[loocv.test[i]] != rda.predict(x[loocv.test[i]]))
error++;
}
System.out.println("RDA (0.1) error = " + error);
assertEquals(24, error);
error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
RDA rda = new RDA(trainx, trainy, 0.2);
if (y[loocv.test[i]] != rda.predict(x[loocv.test[i]]))
error++;
}
System.out.println("RDA (0.2) error = " + error);
assertEquals(20, error);
error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
RDA rda = new RDA(trainx, trainy, 0.3);
if (y[loocv.test[i]] != rda.predict(x[loocv.test[i]]))
error++;
}
System.out.println("RDA (0.3) error = " + error);
assertEquals(19, error);
error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
RDA rda = new RDA(trainx, trainy, 0.4);
if (y[loocv.test[i]] != rda.predict(x[loocv.test[i]]))
error++;
}
System.out.println("RDA (0.4) error = " + error);
assertEquals(16, error);
error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
RDA rda = new RDA(trainx, trainy, 0.5);
if (y[loocv.test[i]] != rda.predict(x[loocv.test[i]]))
error++;
}
System.out.println("RDA (0.5) error = " + error);
assertEquals(12, error);
error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
RDA rda = new RDA(trainx, trainy, 0.6);
if (y[loocv.test[i]] != rda.predict(x[loocv.test[i]]))
error++;
}
System.out.println("RDA (0.6) error = " + error);
assertEquals(11, error);
error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
RDA rda = new RDA(trainx, trainy, 0.7);
if (y[loocv.test[i]] != rda.predict(x[loocv.test[i]]))
error++;
}
System.out.println("RDA (0.7) error = " + error);
assertEquals(9, error);
error = 0;
double[] posteriori = new double[3];
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
RDA rda = new RDA(trainx, trainy, 0.8);
if (y[loocv.test[i]] != rda.predict(x[loocv.test[i]], posteriori))
error++;
//System.out.println(posteriori[0]+"\t"+posteriori[1]+"\t"+posteriori[2]);
}
System.out.println("RDA (0.8) error = " + error);
assertEquals(6, error);
error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
RDA rda = new RDA(trainx, trainy, 0.9);
if (y[loocv.test[i]] != rda.predict(x[loocv.test[i]]))
error++;
}
System.out.println("RDA (0.9) error = " + error);
assertEquals(3, error);
error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
RDA rda = new RDA(trainx, trainy, 1.0);
if (y[loocv.test[i]] != rda.predict(x[loocv.test[i]]))
error++;
}
System.out.println("RDA (1.0) error = " + error);
assertEquals(4, error);
} catch (Exception ex) {
System.err.println(ex);
}
}
Aggregations