use of smile.data.AttributeDataset in project smile by haifengl.
the class NeuralNetworkTest method testSegmentLMS.
/**
* Test of learn method, of class NeuralNetwork.
*/
@Test
public void testSegmentLMS() {
System.out.println("Segment LMS");
ArffParser parser = new ArffParser();
parser.setResponseIndex(19);
try {
AttributeDataset train = parser.parse(smile.data.parser.IOUtils.getTestDataFile("weka/segment-challenge.arff"));
AttributeDataset test = parser.parse(smile.data.parser.IOUtils.getTestDataFile("weka/segment-test.arff"));
double[][] x = train.toArray(new double[0][]);
int[] y = train.toArray(new int[0]);
double[][] testx = test.toArray(new double[0][]);
int[] testy = test.toArray(new int[0]);
int p = x[0].length;
double[] mu = Math.colMin(x);
double[] sd = Math.colMax(x);
for (int i = 0; i < x.length; i++) {
for (int j = 0; j < p; j++) {
x[i][j] = (x[i][j] - mu[j]) / sd[j];
}
}
for (int i = 0; i < testx.length; i++) {
for (int j = 0; j < p; j++) {
testx[i][j] = (testx[i][j] - mu[j]) / sd[j];
}
}
NeuralNetwork net = new NeuralNetwork(NeuralNetwork.ErrorFunction.LEAST_MEAN_SQUARES, NeuralNetwork.ActivationFunction.LOGISTIC_SIGMOID, x[0].length, 30, Math.max(y) + 1);
for (int j = 0; j < 30; j++) {
net.learn(x, y);
}
int error = 0;
for (int i = 0; i < testx.length; i++) {
if (net.predict(testx[i]) != testy[i]) {
error++;
}
}
System.out.format("Segment error rate = %.2f%%%n", 100.0 * error / testx.length);
} catch (Exception ex) {
System.err.println(ex);
}
}
use of smile.data.AttributeDataset in project smile by haifengl.
the class DecisionTreeTest method testIris.
/**
* Test of learn method, of class DecisionTree.
*/
@Test
public void testIris() {
System.out.println("Iris");
ArffParser arffParser = new ArffParser();
arffParser.setResponseIndex(4);
try {
AttributeDataset iris = arffParser.parse(smile.data.parser.IOUtils.getTestDataFile("weka/iris.arff"));
double[][] x = iris.toArray(new double[iris.size()][]);
int[] y = iris.toArray(new int[iris.size()]);
int n = x.length;
LOOCV loocv = new LOOCV(n);
int error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
DecisionTree tree = new DecisionTree(iris.attributes(), trainx, trainy, 4);
if (y[loocv.test[i]] != tree.predict(x[loocv.test[i]]))
error++;
}
System.out.println("Decision Tree error = " + error);
assertEquals(7, error);
} catch (Exception ex) {
System.err.println(ex);
}
}
use of smile.data.AttributeDataset in project smile by haifengl.
the class DecisionTreeTest method testUSPS.
/**
* Test of learn method, of class DecisionTree.
*/
@Test
public void testUSPS() {
System.out.println("USPS");
DelimitedTextParser parser = new DelimitedTextParser();
parser.setResponseIndex(new NominalAttribute("class"), 0);
try {
AttributeDataset train = parser.parse("USPS Train", smile.data.parser.IOUtils.getTestDataFile("usps/zip.train"));
AttributeDataset test = parser.parse("USPS Test", smile.data.parser.IOUtils.getTestDataFile("usps/zip.test"));
double[][] x = train.toArray(new double[train.size()][]);
int[] y = train.toArray(new int[train.size()]);
double[][] testx = test.toArray(new double[test.size()][]);
int[] testy = test.toArray(new int[test.size()]);
DecisionTree tree = new DecisionTree(x, y, 350, DecisionTree.SplitRule.ENTROPY);
int error = 0;
for (int i = 0; i < testx.length; i++) {
if (tree.predict(testx[i]) != testy[i]) {
error++;
}
}
System.out.format("USPS error rate = %.2f%%%n", 100.0 * error / testx.length);
assertEquals(328, error);
} catch (Exception ex) {
System.err.println(ex);
}
}
use of smile.data.AttributeDataset in project smile by haifengl.
the class DecisionTreeTest method testUSPSNominal.
/**
* Test of learn method, of class DecisionTree.
*/
@Test
public void testUSPSNominal() {
System.out.println("USPS nominal");
DelimitedTextParser parser = new DelimitedTextParser();
parser.setResponseIndex(new NominalAttribute("class"), 0);
try {
AttributeDataset train = parser.parse("USPS Train", smile.data.parser.IOUtils.getTestDataFile("usps/zip.train"));
AttributeDataset test = parser.parse("USPS Test", smile.data.parser.IOUtils.getTestDataFile("usps/zip.test"));
double[][] x = train.toArray(new double[train.size()][]);
int[] y = train.toArray(new int[train.size()]);
double[][] testx = test.toArray(new double[test.size()][]);
int[] testy = test.toArray(new int[test.size()]);
for (double[] xi : x) {
for (int i = 0; i < xi.length; i++) {
xi[i] = Math.round(255 * (xi[i] + 1) / 2);
}
}
for (double[] xi : testx) {
for (int i = 0; i < xi.length; i++) {
xi[i] = Math.round(127 + 127 * xi[i]);
}
}
Attribute[] attributes = new Attribute[256];
String[] values = new String[attributes.length];
for (int i = 0; i < attributes.length; i++) {
values[i] = String.valueOf(i);
}
for (int i = 0; i < attributes.length; i++) {
attributes[i] = new NominalAttribute("V" + i, values);
}
DecisionTree tree = new DecisionTree(attributes, x, y, 350, 2, DecisionTree.SplitRule.ENTROPY);
int error = 0;
for (int i = 0; i < testx.length; i++) {
if (tree.predict(testx[i]) != testy[i]) {
error++;
}
}
System.out.format("USPS error rate = %.2f%%%n", 100.0 * error / testx.length);
double[] importance = tree.importance();
int[] index = QuickSort.sort(importance);
for (int i = importance.length; i-- > 0; ) {
System.out.format("%s importance is %.4f%n", train.attributes()[index[i]], importance[i]);
}
assertEquals(324, error);
} catch (Exception ex) {
System.err.println(ex);
}
}
use of smile.data.AttributeDataset in project smile by haifengl.
the class FLDTest method testPredict.
/**
* Test of predict method, of class FDA.
*/
@Test
public void testPredict() {
System.out.println("IRIS");
ArffParser arffParser = new ArffParser();
arffParser.setResponseIndex(4);
try {
AttributeDataset iris = arffParser.parse(smile.data.parser.IOUtils.getTestDataFile("weka/iris.arff"));
double[][] x = iris.toArray(new double[iris.size()][]);
int[] y = iris.toArray(new int[iris.size()]);
int n = x.length;
LOOCV loocv = new LOOCV(n);
int error = 0;
for (int i = 0; i < n; i++) {
double[][] trainx = Math.slice(x, loocv.train[i]);
int[] trainy = Math.slice(y, loocv.train[i]);
FLD fisher = new FLD(trainx, trainy);
if (y[loocv.test[i]] != fisher.predict(x[loocv.test[i]]))
error++;
}
System.out.println("FLD error = " + error);
assertEquals(5, error);
} catch (Exception ex) {
System.err.println(ex);
}
}
Aggregations