use of org.apache.ignite.ml.nn.architecture.MLPArchitecture in project ignite by apache.
the class MLPLocalTrainerExample method main.
/**
* Executes example.
*
* @param args Command line arguments, none required.
*/
public static void main(String[] args) {
// IMPL NOTE based on MLPLocalTrainerTest#testXORRProp
System.out.println(">>> Local multilayer perceptron example started.");
Matrix xorInputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
System.out.println("\n>>> Input data:");
Tracer.showAscii(xorInputs);
Matrix xorOutputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
SimpleMLPLocalBatchTrainerInput trainerInput = new SimpleMLPLocalBatchTrainerInput(conf, new Random(1234L), xorInputs, xorOutputs, 4);
System.out.println("\n>>> Perform training.");
MultilayerPerceptron mlp = new MLPLocalBatchTrainer<>(LossFunctions.MSE, RPropUpdateCalculator::new, 0.0001, 16000).train(trainerInput);
System.out.println("\n>>> Apply model.");
Matrix predict = mlp.apply(xorInputs);
System.out.println("\n>>> Predicted data:");
Tracer.showAscii(predict);
System.out.println("\n>>> Reference expected data:");
Tracer.showAscii(xorOutputs);
System.out.println("\n>>> Difference estimate: " + xorOutputs.getRow(0).minus(predict.getRow(0)).kNorm(2));
System.out.println("\n>>> Local multilayer perceptron example completed.");
}
use of org.apache.ignite.ml.nn.architecture.MLPArchitecture in project ignite by apache.
the class MLPLocalTrainerTest method xorTest.
/**
* Common method for testing 'XOR' with various updaters.
* @param updaterSupplier Updater supplier.
* @param <P> Updater parameters type.
*/
private <P> void xorTest(IgniteSupplier<ParameterUpdateCalculator<? super MultilayerPerceptron, P>> updaterSupplier) {
Matrix xorInputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
Matrix xorOutputs = new DenseLocalOnHeapMatrix(new double[][] { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } }, StorageConstants.ROW_STORAGE_MODE).transpose();
MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(10, true, Activators.RELU).withAddedLayer(1, false, Activators.SIGMOID);
SimpleMLPLocalBatchTrainerInput trainerInput = new SimpleMLPLocalBatchTrainerInput(conf, new Random(123L), xorInputs, xorOutputs, 4);
MultilayerPerceptron mlp = new MLPLocalBatchTrainer<>(LossFunctions.MSE, updaterSupplier, 0.0001, 16000).train(trainerInput);
Matrix predict = mlp.apply(xorInputs);
Tracer.showAscii(predict);
X.println(xorOutputs.getRow(0).minus(predict.getRow(0)).kNorm(2) + "");
TestUtils.checkIsInEpsilonNeighbourhood(xorOutputs.getRow(0), predict.getRow(0), 1E-1);
}
use of org.apache.ignite.ml.nn.architecture.MLPArchitecture in project ignite by apache.
the class MLPTest method setParamsFlattening.
/**
* Test methods related to parameters flattening.
*/
@Test
public void setParamsFlattening() {
int inputSize = 3;
int firstLayerNeuronsCnt = 2;
int secondLayerNeurons = 1;
DenseLocalOnHeapVector paramsVector = new DenseLocalOnHeapVector(new double[] { // First layer weight matrix.
1.0, // First layer weight matrix.
2.0, // First layer weight matrix.
3.0, // First layer weight matrix.
4.0, // First layer weight matrix.
5.0, // First layer weight matrix.
6.0, // Second layer weight matrix.
7.0, // Second layer weight matrix.
8.0, // Second layer biases.
9.0 });
DenseLocalOnHeapMatrix firstLayerWeights = new DenseLocalOnHeapMatrix(new double[][] { { 1.0, 2.0, 3.0 }, { 4.0, 5.0, 6.0 } });
DenseLocalOnHeapMatrix secondLayerWeights = new DenseLocalOnHeapMatrix(new double[][] { { 7.0, 8.0 } });
DenseLocalOnHeapVector secondLayerBiases = new DenseLocalOnHeapVector(new double[] { 9.0 });
MLPArchitecture conf = new MLPArchitecture(inputSize).withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID).withAddedLayer(secondLayerNeurons, true, Activators.SIGMOID);
MultilayerPerceptron mlp = new MultilayerPerceptron(conf, new MLPConstInitializer(100, 200));
mlp.setParameters(paramsVector);
Assert.assertEquals(paramsVector, mlp.parameters());
Assert.assertEquals(mlp.weights(1), firstLayerWeights);
Assert.assertEquals(mlp.weights(2), secondLayerWeights);
Assert.assertEquals(mlp.biases(2), secondLayerBiases);
}
use of org.apache.ignite.ml.nn.architecture.MLPArchitecture in project ignite by apache.
the class MLPTest method testXOR.
/**
* Test that MLP with parameters that should produce function close to 'XOR' is close to 'XOR' on 'XOR' domain.
*/
@Test
public void testXOR() {
MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(2, true, Activators.SIGMOID).withAddedLayer(1, true, Activators.SIGMOID);
MultilayerPerceptron mlp = new MultilayerPerceptron(conf, new MLPConstInitializer(1, 2));
mlp.setWeights(1, new DenseLocalOnHeapMatrix(new double[][] { { 20.0, 20.0 }, { -20.0, -20.0 } }));
mlp.setBiases(1, new DenseLocalOnHeapVector(new double[] { -10.0, 30.0 }));
mlp.setWeights(2, new DenseLocalOnHeapMatrix(new double[][] { { 20.0, 20.0 } }));
mlp.setBiases(2, new DenseLocalOnHeapVector(new double[] { -30.0 }));
Matrix input = new DenseLocalOnHeapMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } }).transpose();
Matrix predict = mlp.apply(input);
Vector truth = new DenseLocalOnHeapVector(new double[] { 0.0, 1.0, 1.0, 0.0 });
TestUtils.checkIsInEpsilonNeighbourhood(predict.getRow(0), truth, 1E-4);
}
use of org.apache.ignite.ml.nn.architecture.MLPArchitecture in project ignite by apache.
the class MLPTest method testStackedMLP.
/**
* Test that two layer MLP is equivalent to it's subparts stacked on each other.
*/
@Test
public void testStackedMLP() {
int firstLayerNeuronsCnt = 3;
int secondLayerNeuronsCnt = 2;
MLPConstInitializer initer = new MLPConstInitializer(1, 2);
MLPArchitecture conf = new MLPArchitecture(4).withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID).withAddedLayer(secondLayerNeuronsCnt, false, Activators.SIGMOID);
MultilayerPerceptron mlp = new MultilayerPerceptron(conf, initer);
MLPArchitecture mlpLayer1Conf = new MLPArchitecture(4).withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID);
MLPArchitecture mlpLayer2Conf = new MLPArchitecture(firstLayerNeuronsCnt).withAddedLayer(secondLayerNeuronsCnt, false, Activators.SIGMOID);
MultilayerPerceptron mlp1 = new MultilayerPerceptron(mlpLayer1Conf, initer);
MultilayerPerceptron mlp2 = new MultilayerPerceptron(mlpLayer2Conf, initer);
MultilayerPerceptron stackedMLP = mlp1.add(mlp2);
Matrix predict = mlp.apply(new DenseLocalOnHeapMatrix(new double[][] { { 1, 2, 3, 4 } }).transpose());
Matrix stackedPredict = stackedMLP.apply(new DenseLocalOnHeapMatrix(new double[][] { { 1, 2, 3, 4 } }).transpose());
Assert.assertEquals(predict, stackedPredict);
}
Aggregations