Search in sources :

Example 6 with DataNormalization

use of org.nd4j.linalg.dataset.api.preprocessor.DataNormalization in project deeplearning4j by deeplearning4j.

the class GradientCheckTests method testGradientMLP2LayerIrisL1L2Simple.

@Test
public void testGradientMLP2LayerIrisL1L2Simple() {
    //As above (testGradientMLP2LayerIrisSimple()) but with L2, L1, and both L2/L1 applied
    //Need to run gradient through updater, so that L2 can be applied
    String[] activFns = { "sigmoid", "tanh" };
    //If true: run some backprop steps first
    boolean[] characteristic = { false, true };
    LossFunction[] lossFunctions = { LossFunction.MCXENT, LossFunction.MSE };
    //i.e., lossFunctions[i] used with outputActivations[i] here
    String[] outputActivations = { "softmax", "tanh" };
    DataNormalization scaler = new NormalizerMinMaxScaler();
    DataSetIterator iter = new IrisDataSetIterator(150, 150);
    scaler.fit(iter);
    iter.setPreProcessor(scaler);
    DataSet ds = iter.next();
    INDArray input = ds.getFeatureMatrix();
    INDArray labels = ds.getLabels();
    //use l2vals[i] with l1vals[i]
    double[] l2vals = { 0.4, 0.0, 0.4, 0.4 };
    double[] l1vals = { 0.0, 0.0, 0.5, 0.0 };
    double[] biasL2 = { 0.0, 0.0, 0.0, 0.2 };
    double[] biasL1 = { 0.0, 0.0, 0.6, 0.0 };
    for (String afn : activFns) {
        for (boolean doLearningFirst : characteristic) {
            for (int i = 0; i < lossFunctions.length; i++) {
                for (int k = 0; k < l2vals.length; k++) {
                    LossFunction lf = lossFunctions[i];
                    String outputActivation = outputActivations[i];
                    double l2 = l2vals[k];
                    double l1 = l1vals[k];
                    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().regularization(true).l2(l2).l1(l1).l2Bias(biasL2[k]).l1Bias(biasL1[k]).optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).seed(12345L).list().layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).updater(Updater.NONE).activation(afn).build()).layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3).weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)).updater(Updater.NONE).activation(outputActivation).build()).pretrain(false).backprop(true).build();
                    MultiLayerNetwork mln = new MultiLayerNetwork(conf);
                    mln.init();
                    if (doLearningFirst) {
                        //Run a number of iterations of learning
                        mln.setInput(ds.getFeatures());
                        mln.setLabels(ds.getLabels());
                        mln.computeGradientAndScore();
                        double scoreBefore = mln.score();
                        for (int j = 0; j < 10; j++) mln.fit(ds);
                        mln.computeGradientAndScore();
                        double scoreAfter = mln.score();
                        //Can't test in 'characteristic mode of operation' if not learning
                        String msg = "testGradMLP2LayerIrisSimple() - score did not (sufficiently) decrease during learning - activationFn=" + afn + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst + ", l2=" + l2 + ", l1=" + l1 + " (before=" + scoreBefore + ", scoreAfter=" + scoreAfter + ")";
                        assertTrue(msg, scoreAfter < 0.8 * scoreBefore);
                    }
                    if (PRINT_RESULTS) {
                        System.out.println("testGradientMLP2LayerIrisSimpleRandom() - activationFn=" + afn + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst + ", l2=" + l2 + ", l1=" + l1);
                        for (int j = 0; j < mln.getnLayers(); j++) System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());
                    }
                    boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
                    String msg = "testGradMLP2LayerIrisSimple() - activationFn=" + afn + ", lossFn=" + lf + ", outputActivation=" + outputActivation + ", doLearningFirst=" + doLearningFirst + ", l2=" + l2 + ", l1=" + l1;
                    assertTrue(msg, gradOK);
                }
            }
        }
    }
}
Also used : NormalizerMinMaxScaler(org.nd4j.linalg.dataset.api.preprocessor.NormalizerMinMaxScaler) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSet(org.nd4j.linalg.dataset.DataSet) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) DataNormalization(org.nd4j.linalg.dataset.api.preprocessor.DataNormalization) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) NormalDistribution(org.deeplearning4j.nn.conf.distribution.NormalDistribution) LossFunction(org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) IrisDataSetIterator(org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator) DataSetIterator(org.nd4j.linalg.dataset.api.iterator.DataSetIterator) Test(org.junit.Test)

Example 7 with DataNormalization

use of org.nd4j.linalg.dataset.api.preprocessor.DataNormalization in project deeplearning4j by deeplearning4j.

the class RBMTests method testIrisRectifiedHidden.

@Test
public void testIrisRectifiedHidden() {
    IrisDataFetcher fetcher = new IrisDataFetcher();
    fetcher.fetch(150);
    DataNormalization norm = new NormalizerStandardize();
    DataSet d = fetcher.next();
    norm.fit(d);
    norm.transform(d);
    INDArray params = Nd4j.create(1, 4 * 3 + 4 + 3);
    RBM rbm = getRBMLayer(4, 3, HiddenUnit.RECTIFIED, VisibleUnit.LINEAR, params, true, false, 1, LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD);
    rbm.fit(d.getFeatureMatrix());
}
Also used : DataNormalization(org.nd4j.linalg.dataset.api.preprocessor.DataNormalization) INDArray(org.nd4j.linalg.api.ndarray.INDArray) DataSet(org.nd4j.linalg.dataset.DataSet) IrisDataFetcher(org.deeplearning4j.datasets.fetchers.IrisDataFetcher) NormalizerStandardize(org.nd4j.linalg.dataset.api.preprocessor.NormalizerStandardize) Test(org.junit.Test)

Example 8 with DataNormalization

use of org.nd4j.linalg.dataset.api.preprocessor.DataNormalization in project deeplearning4j by deeplearning4j.

the class RBMTests method testIrisGaussianHidden.

@Test
public void testIrisGaussianHidden() {
    IrisDataFetcher fetcher = new IrisDataFetcher();
    fetcher.fetch(150);
    DataNormalization norm = new NormalizerStandardize();
    DataSet d = fetcher.next();
    norm.fit(d);
    norm.transform(d);
    INDArray params = Nd4j.create(1, 4 * 3 + 4 + 3);
    RBM rbm = getRBMLayer(4, 3, HiddenUnit.GAUSSIAN, VisibleUnit.GAUSSIAN, params, true, false, 1, LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD);
    rbm.fit(d.getFeatureMatrix());
}
Also used : DataNormalization(org.nd4j.linalg.dataset.api.preprocessor.DataNormalization) INDArray(org.nd4j.linalg.api.ndarray.INDArray) DataSet(org.nd4j.linalg.dataset.DataSet) IrisDataFetcher(org.deeplearning4j.datasets.fetchers.IrisDataFetcher) NormalizerStandardize(org.nd4j.linalg.dataset.api.preprocessor.NormalizerStandardize) Test(org.junit.Test)

Example 9 with DataNormalization

use of org.nd4j.linalg.dataset.api.preprocessor.DataNormalization in project deeplearning4j by deeplearning4j.

the class ModelSerializer method restoreNormalizerFromFileDeprecated.

/**
     * @deprecated
     *
     * This method restores normalizer from a given persisted model file serialized with Java object serialization
     *
     * @param file
     * @return
     */
private static DataNormalization restoreNormalizerFromFileDeprecated(File file) {
    try (ZipFile zipFile = new ZipFile(file)) {
        ZipEntry norm = zipFile.getEntry(NORMALIZER_BIN);
        // checking for file existence
        if (norm == null)
            return null;
        InputStream stream = zipFile.getInputStream(norm);
        ObjectInputStream ois = new ObjectInputStream(stream);
        try {
            DataNormalization normalizer = (DataNormalization) ois.readObject();
            return normalizer;
        } catch (ClassNotFoundException e) {
            throw new RuntimeException(e);
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
Also used : DataNormalization(org.nd4j.linalg.dataset.api.preprocessor.DataNormalization) ZipFile(java.util.zip.ZipFile) ZipEntry(java.util.zip.ZipEntry)

Aggregations

DataNormalization (org.nd4j.linalg.dataset.api.preprocessor.DataNormalization)9 Test (org.junit.Test)8 INDArray (org.nd4j.linalg.api.ndarray.INDArray)8 DataSet (org.nd4j.linalg.dataset.DataSet)8 IrisDataSetIterator (org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator)6 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)6 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)6 DataSetIterator (org.nd4j.linalg.dataset.api.iterator.DataSetIterator)6 NormalizerMinMaxScaler (org.nd4j.linalg.dataset.api.preprocessor.NormalizerMinMaxScaler)6 NormalDistribution (org.deeplearning4j.nn.conf.distribution.NormalDistribution)5 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)4 LossFunction (org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction)4 NormalizerStandardize (org.nd4j.linalg.dataset.api.preprocessor.NormalizerStandardize)3 IrisDataFetcher (org.deeplearning4j.datasets.fetchers.IrisDataFetcher)2 ZipEntry (java.util.zip.ZipEntry)1 ZipFile (java.util.zip.ZipFile)1