use of org.apache.ignite.ml.math.primitives.vector.impl.DenseVector in project ignite by apache.
the class LossFunctionsTest method testL2.
/**
*/
@Test
public void testL2() {
IgniteDifferentiableVectorToDoubleFunction f = LossFunctions.L2.apply(new DenseVector(new double[] { 2.0, 1.0 }));
assertNotNull(f);
test(new double[] { 1.0, 3.0 }, f);
}
use of org.apache.ignite.ml.math.primitives.vector.impl.DenseVector in project ignite by apache.
the class MLPTest method testXOR.
/**
* Test that MLP with parameters that should produce function close to 'XOR' is close to 'XOR' on 'XOR' domain.
*/
@Test
public void testXOR() {
MLPArchitecture conf = new MLPArchitecture(2).withAddedLayer(2, true, Activators.SIGMOID).withAddedLayer(1, true, Activators.SIGMOID);
MultilayerPerceptron mlp1 = new MultilayerPerceptron(conf, new MLPConstInitializer(1, 2));
mlp1.setWeights(1, new DenseMatrix(new double[][] { { 20.0, 20.0 }, { -20.0, -20.0 } }));
mlp1.setBiases(1, new DenseVector(new double[] { -10.0, 30.0 }));
MultilayerPerceptron mlp2 = mlp1.setWeights(2, new DenseMatrix(new double[][] { { 20.0, 20.0 } }));
MultilayerPerceptron mlp = mlp2.setBiases(2, new DenseVector(new double[] { -30.0 }));
Matrix input = new DenseMatrix(new double[][] { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 }, { 1.0, 1.0 } });
Matrix predict = mlp.predict(input);
Matrix truth = new DenseMatrix(new double[][] { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } });
TestUtils.checkIsInEpsilonNeighbourhood(predict.getRow(0), truth.getRow(0), 1E-4);
}
use of org.apache.ignite.ml.math.primitives.vector.impl.DenseVector in project ignite by apache.
the class MLPTest method setParamsFlattening.
/**
* Test methods related to parameters flattening.
*/
@Test
public void setParamsFlattening() {
int inputSize = 3;
int firstLayerNeuronsCnt = 2;
int secondLayerNeurons = 1;
DenseVector paramsVector = new DenseVector(new double[] { // First layer weight matrix.
1.0, // First layer weight matrix.
2.0, // First layer weight matrix.
3.0, // First layer weight matrix.
4.0, // First layer weight matrix.
5.0, // First layer weight matrix.
6.0, // Second layer weight matrix.
7.0, // Second layer weight matrix.
8.0, // Second layer biases.
9.0 });
DenseMatrix firstLayerWeights = new DenseMatrix(new double[][] { { 1.0, 2.0, 3.0 }, { 4.0, 5.0, 6.0 } });
DenseMatrix secondLayerWeights = new DenseMatrix(new double[][] { { 7.0, 8.0 } });
DenseVector secondLayerBiases = new DenseVector(new double[] { 9.0 });
MLPArchitecture conf = new MLPArchitecture(inputSize).withAddedLayer(firstLayerNeuronsCnt, false, Activators.SIGMOID).withAddedLayer(secondLayerNeurons, true, Activators.SIGMOID);
MultilayerPerceptron mlp = new MultilayerPerceptron(conf, new MLPConstInitializer(100, 200));
mlp.setParameters(paramsVector);
Assert.assertEquals(paramsVector, mlp.parameters());
Assert.assertEquals(mlp.weights(1), firstLayerWeights);
Assert.assertEquals(mlp.weights(2), secondLayerWeights);
Assert.assertEquals(mlp.biases(2), secondLayerBiases);
}
use of org.apache.ignite.ml.math.primitives.vector.impl.DenseVector in project ignite by apache.
the class StringEncoderPreprocessorTest method testApply.
/**
* Tests {@code apply()} method.
*/
@Test
public void testApply() {
Vector[] data = new Vector[] { new DenseVector(new Serializable[] { "1", "Moscow", "A" }), new DenseVector(new Serializable[] { "2", "Moscow", "B" }), new DenseVector(new Serializable[] { "2", "Moscow", "B" }) };
Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<>(0, 1, 2);
StringEncoderPreprocessor<Integer, Vector> preprocessor = new StringEncoderPreprocessor<Integer, Vector>(new HashMap[] { new HashMap() {
{
put("1", 1);
put("2", 0);
}
}, new HashMap() {
{
put("Moscow", 0);
}
}, new HashMap() {
{
put("A", 1);
put("B", 0);
}
} }, vectorizer, new HashSet() {
{
add(0);
add(1);
add(2);
}
});
double[][] postProcessedData = new double[][] { { 1.0, 0.0, 1.0 }, { 0.0, 0.0, 0.0 }, { 0.0, 0.0, 0.0 } };
for (int i = 0; i < data.length; i++) assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8);
}
use of org.apache.ignite.ml.math.primitives.vector.impl.DenseVector in project ignite by apache.
the class TargetEncoderPreprocessorTest method testApply.
/**
* Tests {@code apply()} method.
*/
@Test
public void testApply() {
Vector[] data = new Vector[] { new DenseVector(new Serializable[] { "1", "Moscow", "A" }), new DenseVector(new Serializable[] { "2", "Moscow", "B" }), new DenseVector(new Serializable[] { "3", "Moscow", "B" }) };
Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<>(0, 1, 2);
TargetEncoderPreprocessor<Integer, Vector> preprocessor = new TargetEncoderPreprocessor<>(new TargetEncodingMeta[] { // feature 0
new TargetEncodingMeta().withGlobalMean(0.5).withCategoryMean(new HashMap<String, Double>() {
{
// category "1" avg mean = 1.0
put("1", 1.0);
// category "2" avg mean = 0.0
put("2", 0.0);
}
}), // feature 1
new TargetEncodingMeta().withGlobalMean(0.1).withCategoryMean(new HashMap<String, Double>() {
}), // feature 2
new TargetEncodingMeta().withGlobalMean(0.1).withCategoryMean(new HashMap<String, Double>() {
{
// category "A" avg mean 1.0
put("A", 1.0);
// category "B" avg mean 2.0
put("B", 2.0);
}
}) }, vectorizer, new HashSet<Integer>() {
{
add(0);
add(1);
add(2);
}
});
double[][] postProcessedData = new double[][] { { // "1" contains in dict => use category mean 1.0
1.0, // "Moscow" not contains in dict => use global 0.1
0.1, // "A" contains in dict => use category mean 1.0
1.0 }, { // "2" contains in dict => use category mean 0.0
0.0, // "Moscow" not contains in dict => use global 0.1
0.1, // "B" contains in dict => use category mean 2.0
2.0 }, { // "3" not contains in dict => use global mean 0.5
0.5, // "Moscow" not contains in dict => use global 0.1
0.1, // "B" contains in dict => use category mean 2.0
2.0 } };
for (int i = 0; i < data.length; i++) assertArrayEquals(postProcessedData[i], preprocessor.apply(i, data[i]).features().asArray(), 1e-8);
}
Aggregations