use of smile.validation.Accuracy in project smile by haifengl.
the class GAFeatureSelectionTest method testLearn.
/**
* Test of learn method, of class GAFeatureSelection.
*/
@Test
public void testLearn() {
System.out.println("learn");
int size = 100;
int generation = 20;
ClassifierTrainer<double[]> trainer = new LDA.Trainer();
ClassificationMeasure measure = new Accuracy();
DelimitedTextParser parser = new DelimitedTextParser();
parser.setResponseIndex(new NominalAttribute("class"), 0);
try {
AttributeDataset train = parser.parse("USPS Train", smile.data.parser.IOUtils.getTestDataFile("usps/zip.train"));
AttributeDataset test = parser.parse("USPS Test", smile.data.parser.IOUtils.getTestDataFile("usps/zip.test"));
double[][] x = train.toArray(new double[train.size()][]);
int[] y = train.toArray(new int[train.size()]);
double[][] testx = test.toArray(new double[test.size()][]);
int[] testy = test.toArray(new int[test.size()]);
GAFeatureSelection instance = new GAFeatureSelection();
BitString[] result = instance.learn(size, generation, trainer, measure, x, y, testx, testy);
for (BitString bits : result) {
System.out.format("%.2f%% %d ", 100 * bits.fitness(), Math.sum(bits.bits()));
for (int i = 0; i < x[0].length; i++) {
System.out.print(bits.bits()[i] + " ");
}
System.out.println();
}
assertTrue(result[result.length - 1].fitness() > 0.88);
} catch (Exception ex) {
System.err.println(ex);
}
}
use of smile.validation.Accuracy in project smile by haifengl.
the class SumSquaresRatioTest method testLearn.
/**
* Test of learn method, of class SumSquaresRatio.
*/
@Test
public void testLearn() {
System.out.println("USPS");
try {
DelimitedTextParser parser = new DelimitedTextParser();
parser.setResponseIndex(new NominalAttribute("class"), 0);
AttributeDataset train = parser.parse("USPS Train", smile.data.parser.IOUtils.getTestDataFile("usps/zip.train"));
AttributeDataset test = parser.parse("USPS Test", smile.data.parser.IOUtils.getTestDataFile("usps/zip.test"));
double[][] x = train.toArray(new double[train.size()][]);
int[] y = train.toArray(new int[train.size()]);
double[][] testx = test.toArray(new double[test.size()][]);
int[] testy = test.toArray(new int[test.size()]);
SumSquaresRatio ssr = new SumSquaresRatio();
double[] score = ssr.rank(x, y);
int[] index = QuickSort.sort(score);
int p = 135;
int n = x.length;
double[][] xx = new double[n][p];
for (int j = 0; j < p; j++) {
for (int i = 0; i < n; i++) {
xx[i][j] = x[i][index[255 - j]];
}
}
int testn = testx.length;
double[][] testxx = new double[testn][p];
for (int j = 0; j < p; j++) {
for (int i = 0; i < testn; i++) {
testxx[i][j] = testx[i][index[255 - j]];
}
}
LDA lda = new LDA(xx, y);
int[] prediction = new int[testn];
for (int i = 0; i < testn; i++) {
prediction[i] = lda.predict(testxx[i]);
}
double accuracy = new Accuracy().measure(testy, prediction);
System.out.format("SSR %.2f%%%n", 100 * accuracy);
} catch (Exception ex) {
System.err.println(ex);
}
}
use of smile.validation.Accuracy in project smile by haifengl.
the class AdaBoost method test.
/**
* Test the model on a validation dataset.
*
* @param x the test data set.
* @param y the test data response values.
* @return accuracies with first 1, 2, ..., decision trees.
*/
public double[] test(double[][] x, int[] y) {
int T = trees.length;
double[] accuracy = new double[T];
int n = x.length;
int[] label = new int[n];
Accuracy measure = new Accuracy();
if (k == 2) {
double[] prediction = new double[n];
for (int i = 0; i < T; i++) {
for (int j = 0; j < n; j++) {
prediction[j] += alpha[i] * trees[i].predict(x[j]);
label[j] = prediction[j] > 0 ? 1 : 0;
}
accuracy[i] = measure.measure(y, label);
}
} else {
double[][] prediction = new double[n][k];
for (int i = 0; i < T; i++) {
for (int j = 0; j < n; j++) {
prediction[j][trees[i].predict(x[j])] += alpha[i];
label[j] = Math.whichMax(prediction[j]);
}
accuracy[i] = measure.measure(y, label);
}
}
return accuracy;
}
use of smile.validation.Accuracy in project smile by haifengl.
the class GradientTreeBoost method test.
/**
* Test the model on a validation dataset.
*
* @param x the test data set.
* @param y the test data response values.
* @return accuracies with first 1, 2, ..., decision trees.
*/
public double[] test(double[][] x, int[] y) {
double[] accuracy = new double[ntrees];
int n = x.length;
int[] label = new int[n];
Accuracy measure = new Accuracy();
if (k == 2) {
double[] prediction = new double[n];
Arrays.fill(prediction, b);
for (int i = 0; i < ntrees; i++) {
for (int j = 0; j < n; j++) {
prediction[j] += shrinkage * trees[i].predict(x[j]);
label[j] = prediction[j] > 0 ? 1 : 0;
}
accuracy[i] = measure.measure(y, label);
}
} else {
double[][] prediction = new double[n][k];
for (int i = 0; i < ntrees; i++) {
for (int j = 0; j < n; j++) {
for (int l = 0; l < k; l++) {
prediction[j][l] += shrinkage * forest[l][i].predict(x[j]);
}
label[j] = Math.whichMax(prediction[j]);
}
accuracy[i] = measure.measure(y, label);
}
}
return accuracy;
}
use of smile.validation.Accuracy in project smile by haifengl.
the class RandomForest method test.
/**
* Test the model on a validation dataset.
*
* @param x the test data set.
* @param y the test data response values.
* @return accuracies with first 1, 2, ..., decision trees.
*/
public double[] test(double[][] x, int[] y) {
int T = trees.size();
double[] accuracy = new double[T];
int n = x.length;
int[] label = new int[n];
int[][] prediction = new int[n][k];
Accuracy measure = new Accuracy();
for (int i = 0; i < T; i++) {
for (int j = 0; j < n; j++) {
prediction[j][trees.get(i).tree.predict(x[j])]++;
label[j] = Math.whichMax(prediction[j]);
}
accuracy[i] = measure.measure(y, label);
}
return accuracy;
}
Aggregations