use of org.apache.ignite.ml.math.primitives.matrix.Matrix in project ignite by apache.
the class MultilayerPerceptron method differentiateNonlinearity.
/**
* Differentiate non-linearity.
*
* @param linearOut Linear output of current layer.
* @param nonlinearity Non-linearity of current layer.
* @return Gradients matrix.
*/
private Matrix differentiateNonlinearity(Matrix linearOut, IgniteDifferentiableDoubleToDoubleFunction nonlinearity) {
Matrix diff = linearOut.copy();
diff.map(nonlinearity::differential);
return diff;
}
use of org.apache.ignite.ml.math.primitives.matrix.Matrix in project ignite by apache.
the class ReplicatedVectorMatrix method assignRow.
/**
* {@inheritDoc}
*/
@Override
public Matrix assignRow(int row, Vector vec) {
int rows = asCol ? vector.size() : replicationCnt;
int cols = asCol ? replicationCnt : vector.size();
int times = asCol ? cols : rows;
Matrix res = new DenseMatrix(rows, cols);
IgniteBiConsumer<Integer, Vector> replicantAssigner = asCol ? res::assignColumn : res::assignRow;
IgniteBiConsumer<Integer, Vector> assigner = res::assignRow;
assign(replicantAssigner, assigner, vector, vec, times, row);
return res;
}
use of org.apache.ignite.ml.math.primitives.matrix.Matrix in project ignite by apache.
the class MultilayerPerceptron method forwardPass.
/**
* Perform forward pass and if needed write state of outputs of each layer.
*
* @param val Value to perform computation on.
* @param state State object to write state into.
* @param writeState Flag indicating need to write state.
*/
public Matrix forwardPass(Matrix val, MLPState state, boolean writeState) {
Matrix res = val;
if (below != null)
res = below.forwardPass(val, state, writeState);
for (int i = 1; i < architecture.layersCount(); i++) {
MLPLayer curLayer = layers.get(i - 1);
res = curLayer.weights.times(res);
TransformationLayerArchitecture layerCfg = this.architecture.transformationLayerArchitecture(i);
if (layerCfg.hasBias()) {
ReplicatedVectorMatrix biasesMatrix = new ReplicatedVectorMatrix(biases(i), res.columnSize(), true);
res = res.plus(biasesMatrix);
}
state.linearOutput.add(res);
// If we do not write state, we can overwrite result.
if (writeState)
res = res.copy();
res = res.map(layerCfg.activationFunction());
state.activatorsOutput.add(res);
}
return res;
}
use of org.apache.ignite.ml.math.primitives.matrix.Matrix in project ignite by apache.
the class MLPTest method testXOR.
/**
* Test that MLP with parameters that should produce function close to 'XOR' is close to 'XOR' on 'XOR' domain.
*/
@Test
public void testXOR() {
MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(2, true, Activators.SIGMOID).withAddedLayer(1, true, Activators.SIGMOID);
MultilayerPerceptron mlp1 = new MultilayerPerceptron(conf, new MLPConstInitializer(1, 2));
mlp1.setWeights(1, new DenseMatrix(new double[][] { { 20.0, 20.0 }, { -20.0, -20.0 } }));
mlp1.setBiases(1, new DenseVector(new double[] { -10.0, 30.0 }));
MultilayerPerceptron mlp2 = mlp1.setWeights(2, new DenseMatrix(new double[][] { { 20.0, 20.0 } }));
MultilayerPerceptron mlp = mlp2.setBiases(2, new DenseVector(new double[] { -30.0 }));
Matrix input = new DenseMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } });
Matrix predict = mlp.predict(input);
Matrix truth = new DenseMatrix(new double[][] { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } });
TestUtils.checkIsInEpsilonNeighbourhood(predict.getRow(0), truth.getRow(0), 1E-4);
}
use of org.apache.ignite.ml.math.primitives.matrix.Matrix in project ignite by apache.
the class MLPTest method testStackedMLP.
/**
* Test that two layer MLP is equivalent to it's subparts stacked on each other.
*/
@Test
public void testStackedMLP() {
int firstLayerNeuronsCnt = 3;
int secondLayerNeuronsCnt = 2;
MLPConstInitializer initer = new MLPConstInitializer(1, 2);
MLPArchitecture conf = new MLPArchitecture(4).withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID).withAddedLayer(secondLayerNeuronsCnt, false, Activators.SIGMOID);
MultilayerPerceptron mlp = new MultilayerPerceptron(conf, initer);
MLPArchitecture mlpLayer1Conf = new MLPArchitecture(4).withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID);
MLPArchitecture mlpLayer2Conf = new MLPArchitecture(firstLayerNeuronsCnt).withAddedLayer(secondLayerNeuronsCnt, false, Activators.SIGMOID);
MultilayerPerceptron mlp1 = new MultilayerPerceptron(mlpLayer1Conf, initer);
MultilayerPerceptron mlp2 = new MultilayerPerceptron(mlpLayer2Conf, initer);
MultilayerPerceptron stackedMLP = mlp1.add(mlp2);
Matrix predict = mlp.predict(new DenseMatrix(new double[][] { { 1 }, { 2 }, { 3 }, { 4 } }).transpose());
Matrix stackedPredict = stackedMLP.predict(new DenseMatrix(new double[][] { { 1 }, { 2 }, { 3 }, { 4 } }).transpose());
Assert.assertEquals(predict, stackedPredict);
}
Aggregations