Search in sources :

Example 1 with MultiLayerPerceptron

use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.

the class PMMLNeuralNetworkTranslator method addInputLayer.

/**
 * Writes the PMML input layer of the MLP.
 *
 * @param nnModel
 *            the Neural Network model.
 * @param mlp
 *            the underlying {@link MultiLayerPerceptron}.
 */
protected void addInputLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp) {
    Layer inputlayer = mlp.getLayer(0);
    Perceptron[] inputperceptrons = inputlayer.getPerceptrons();
    HashMap<String, Integer> inputmap = mlp.getInputMapping();
    NeuralInputs neuralInputs = nnModel.addNewNeuralInputs();
    neuralInputs.setNumberOfInputs(BigInteger.valueOf(inputperceptrons.length));
    for (int i = 0; i < inputperceptrons.length; i++) {
        NeuralInput neuralInput = neuralInputs.addNewNeuralInput();
        neuralInput.setId(0 + "," + i);
        // search corresponding input column
        String colname = "";
        for (Entry<String, Integer> e : inputmap.entrySet()) {
            if (e.getValue().equals(i)) {
                colname = e.getKey();
            }
        }
        DerivedField df = neuralInput.addNewDerivedField();
        df.setOptype(OPTYPE.CONTINUOUS);
        df.setDataType(DATATYPE.DOUBLE);
        FieldRef fieldRef = df.addNewFieldRef();
        fieldRef.setField(m_nameMapper.getDerivedFieldName(colname));
    }
}
Also used : BigInteger(java.math.BigInteger) SigmoidPerceptron(org.knime.base.data.neural.SigmoidPerceptron) MultiLayerPerceptron(org.knime.base.data.neural.MultiLayerPerceptron) Perceptron(org.knime.base.data.neural.Perceptron) InputPerceptron(org.knime.base.data.neural.InputPerceptron) FieldRef(org.dmg.pmml.FieldRefDocument.FieldRef) NeuralInputs(org.dmg.pmml.NeuralInputsDocument.NeuralInputs) NeuralLayer(org.dmg.pmml.NeuralLayerDocument.NeuralLayer) Layer(org.knime.base.data.neural.Layer) InputLayer(org.knime.base.data.neural.InputLayer) HiddenLayer(org.knime.base.data.neural.HiddenLayer) DerivedField(org.dmg.pmml.DerivedFieldDocument.DerivedField) NeuralInput(org.dmg.pmml.NeuralInputDocument.NeuralInput)

Example 2 with MultiLayerPerceptron

use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.

the class PMMLNeuralNetworkTranslator method initializeFrom.

/**
 * {@inheritDoc}
 */
@Override
public void initializeFrom(final PMMLDocument pmmlDoc) {
    m_nameMapper = new DerivedFieldMapper(pmmlDoc);
    NeuralNetwork[] models = pmmlDoc.getPMML().getNeuralNetworkArray();
    if (models.length == 0) {
        throw new IllegalArgumentException("No neural network model" + " provided.");
    } else if (models.length > 1) {
        LOGGER.warn("Multiple neural network models found. " + "Only the first model is considered.");
    }
    NeuralNetwork nnModel = models[0];
    // ------------------------------
    // initiate Neural Input
    initInputLayer(nnModel);
    // -------------------------------
    // initiate Hidden Layer
    initiateHiddenLayers(nnModel);
    // -------------------------------
    // initiate Final Layer
    initiateFinalLayer(nnModel);
    // --------------------------------
    // initiate Neural Outputs
    initiateNeuralOutputs(nnModel);
    // --------------------------------
    // initiate Neural Network properties
    ACTIVATIONFUNCTION.Enum actFunc = nnModel.getActivationFunction();
    NNNORMALIZATIONMETHOD.Enum normMethod = nnModel.getNormalizationMethod();
    if (ACTIVATIONFUNCTION.LOGISTIC != actFunc) {
        LOGGER.error("Only logistic activation function is " + "supported in KNIME MLP.");
    }
    if (NNNORMALIZATIONMETHOD.NONE != normMethod) {
        LOGGER.error("No normalization method is " + "supported in KNIME MLP.");
    }
    MININGFUNCTION.Enum functionName = nnModel.getFunctionName();
    if (MININGFUNCTION.CLASSIFICATION == functionName) {
        m_mlpMethod = MultiLayerPerceptron.CLASSIFICATION_MODE;
    } else if (MININGFUNCTION.REGRESSION == functionName) {
        m_mlpMethod = MultiLayerPerceptron.REGRESSION_MODE;
    }
    if (m_allLayers.size() < 3) {
        throw new IllegalArgumentException("Only neural networks with 3 Layers supported in KNIME MLP.");
    }
    Layer[] allLayers = new Layer[m_allLayers.size()];
    allLayers = m_allLayers.toArray(allLayers);
    m_mlp = new MultiLayerPerceptron(allLayers);
    Architecture myarch = new Architecture(allLayers[0].getPerceptrons().length, allLayers.length - 2, allLayers[1].getPerceptrons().length, allLayers[allLayers.length - 1].getPerceptrons().length);
    m_mlp.setArchitecture(myarch);
    m_mlp.setClassMapping(m_classmap);
    m_mlp.setInputMapping(m_inputmap);
    m_mlp.setMode(m_mlpMethod);
}
Also used : ACTIVATIONFUNCTION(org.dmg.pmml.ACTIVATIONFUNCTION) Architecture(org.knime.base.data.neural.Architecture) NeuralNetwork(org.dmg.pmml.NeuralNetworkDocument.NeuralNetwork) NeuralLayer(org.dmg.pmml.NeuralLayerDocument.NeuralLayer) Layer(org.knime.base.data.neural.Layer) InputLayer(org.knime.base.data.neural.InputLayer) HiddenLayer(org.knime.base.data.neural.HiddenLayer) MultiLayerPerceptron(org.knime.base.data.neural.MultiLayerPerceptron) DerivedFieldMapper(org.knime.core.node.port.pmml.preproc.DerivedFieldMapper) MININGFUNCTION(org.dmg.pmml.MININGFUNCTION) NNNORMALIZATIONMETHOD(org.dmg.pmml.NNNORMALIZATIONMETHOD)

Example 3 with MultiLayerPerceptron

use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.

the class PMMLNeuralNetworkTranslator method addLayer.

/**
 * Writes a layer of the MLP.
 *
 * @param nnModel
 *            the NeuralNetwork model.
 * @param mlp
 *            the underlying {@link MultiLayerPerceptron}.
 * @param layer
 *            the number of the current layer.
 */
protected void addLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp, final int layer) {
    Layer curLayer = mlp.getLayer(layer);
    Perceptron[] perceptrons = curLayer.getPerceptrons();
    AttributesImpl atts = new AttributesImpl();
    atts.addAttribute(null, null, "numberOfNeurons", CDATA, "" + perceptrons.length);
    NeuralLayer neuralLayer = nnModel.addNewNeuralLayer();
    for (int i = 0; i < perceptrons.length; i++) {
        Neuron neuron = neuralLayer.addNewNeuron();
        neuron.setId(layer + "," + i);
        neuron.setBias(-1 * perceptrons[i].getThreshold());
        double[] weights = perceptrons[i].getWeights();
        int predLayerLength = weights.length;
        for (int j = 0; j < predLayerLength; j++) {
            Con con = neuron.addNewCon();
            con.setFrom((layer - 1) + "," + j);
            con.setWeight(weights[j]);
        }
    }
}
Also used : SigmoidPerceptron(org.knime.base.data.neural.SigmoidPerceptron) MultiLayerPerceptron(org.knime.base.data.neural.MultiLayerPerceptron) Perceptron(org.knime.base.data.neural.Perceptron) InputPerceptron(org.knime.base.data.neural.InputPerceptron) AttributesImpl(org.xml.sax.helpers.AttributesImpl) Neuron(org.dmg.pmml.NeuronDocument.Neuron) Con(org.dmg.pmml.ConDocument.Con) NeuralLayer(org.dmg.pmml.NeuralLayerDocument.NeuralLayer) NeuralLayer(org.dmg.pmml.NeuralLayerDocument.NeuralLayer) Layer(org.knime.base.data.neural.Layer) InputLayer(org.knime.base.data.neural.InputLayer) HiddenLayer(org.knime.base.data.neural.HiddenLayer)

Example 4 with MultiLayerPerceptron

use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.

the class PMMLNeuralNetworkTranslator method addInputLayer.

/**
 * Writes the PMML input layer of the MLP.
 *
 * @param nnModel
 *            the Neural Network model.
 * @param mlp
 *            the underlying {@link MultiLayerPerceptron}.
 */
protected void addInputLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp) {
    Layer inputlayer = mlp.getLayer(0);
    Perceptron[] inputperceptrons = inputlayer.getPerceptrons();
    HashMap<String, Integer> inputmap = mlp.getInputMapping();
    NeuralInputs neuralInputs = nnModel.addNewNeuralInputs();
    neuralInputs.setNumberOfInputs(BigInteger.valueOf(inputperceptrons.length));
    for (int i = 0; i < inputperceptrons.length; i++) {
        NeuralInput neuralInput = neuralInputs.addNewNeuralInput();
        neuralInput.setId(0 + "," + i);
        // search corresponding input column
        String colname = "";
        for (Entry<String, Integer> e : inputmap.entrySet()) {
            if (e.getValue().equals(i)) {
                colname = e.getKey();
            }
        }
        DerivedField df = neuralInput.addNewDerivedField();
        df.setOptype(OPTYPE.CONTINUOUS);
        df.setDataType(DATATYPE.DOUBLE);
        FieldRef fieldRef = df.addNewFieldRef();
        fieldRef.setField(m_nameMapper.getDerivedFieldName(colname));
    }
}
Also used : BigInteger(java.math.BigInteger) SigmoidPerceptron(org.knime.base.data.neural.SigmoidPerceptron) MultiLayerPerceptron(org.knime.base.data.neural.MultiLayerPerceptron) Perceptron(org.knime.base.data.neural.Perceptron) InputPerceptron(org.knime.base.data.neural.InputPerceptron) FieldRef(org.dmg.pmml.FieldRefDocument.FieldRef) NeuralInputs(org.dmg.pmml.NeuralInputsDocument.NeuralInputs) NeuralLayer(org.dmg.pmml.NeuralLayerDocument.NeuralLayer) Layer(org.knime.base.data.neural.Layer) InputLayer(org.knime.base.data.neural.InputLayer) HiddenLayer(org.knime.base.data.neural.HiddenLayer) DerivedField(org.dmg.pmml.DerivedFieldDocument.DerivedField) NeuralInput(org.dmg.pmml.NeuralInputDocument.NeuralInput)

Example 5 with MultiLayerPerceptron

use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.

the class RProp method train.

/**
 * Train the neural network once.
 *
 * @param nn neural net to train
 * @param samples the samples
 * @param outputs the desired outputs for these samples
 */
public void train(final MultiLayerPerceptron nn, final Double[][] samples, final Double[][] outputs) {
    m_nn = nn;
    m_samples = samples;
    m_outputs = outputs;
    init();
    double sum = 0.0;
    double y = 0.0;
    double errDer;
    double thrErrDer;
    double oldErrDer;
    double oldThrErrDer;
    /*
         * For all samples
         */
    for (int s = 0; s < samples.length; s++) {
        Double[][] sample = new Double[2][samples[0].length];
        sample[0] = m_samples[s];
        sample[1] = m_outputs[s];
        /*
             * Forward wave
             */
        for (int i = 0; i < m_output.length; i++) {
            for (int j = 0; j < m_output[i].length; j++) {
                if (i == 0) {
                    // input neuron
                    m_output[i][j] = sample[0][j];
                } else {
                    // non-input neuron
                    Perceptron p = nn.getLayer(i).getPerceptron(j);
                    sum = 0.0;
                    for (int k = 0; k < m_output[i - 1].length; k++) {
                        sum += m_output[i - 1][k] * p.getWeight(k);
                    }
                    m_output[i][j] = p.activationFunction(sum - p.getThreshold());
                }
            }
        }
        /*
             * Backward wave
             */
        for (int i = m_delta.length - 1; i >= 0; i--) {
            for (int j = 0; j < m_delta[i].length; j++) {
                y = m_output[i][j];
                if (i == m_delta.length - 1) {
                    // output neuron
                    m_delta[i][j] = (sample[1][j] - y) * y * (1 - y);
                } else {
                    // non-output neuron
                    sum = 0.0;
                    for (int k = 0; k < m_delta[i + 1].length; k++) {
                        sum += m_delta[i + 1][k] * nn.getLayer(i + 1).getPerceptron(k).getWeight(j);
                    }
                    m_delta[i][j] = y * (1 - y) * sum;
                }
            }
        }
        // Now compute error derivations
        for (int i = 0; i < m_errDers.length; i++) {
            for (int j = 0; j < m_errDers[i].length; j++) {
                for (int k = 0; k < m_errDers[i][j].length; k++) {
                    m_errDers[i][j][k] += m_output[i][k] * -m_delta[i + 1][j];
                }
            }
        }
        for (int i = 0; i < m_thrErrDers.length; i++) {
            for (int j = 0; j < m_thrErrDers[i].length; j++) {
                m_thrErrDers[i][j] += m_delta[i + 1][j];
            }
        }
    }
    // STEP 2: for all weights set delta_w
    for (int i = 1; i < nn.getLayers().length; i++) {
        for (int j = 0; j < nn.getLayer(i).getPerceptrons().length; j++) {
            for (int k = 0; k < nn.getLayer(i - 1).getPerceptrons().length; k++) {
                // Compute error derivation
                errDer = m_errDers[i - 1][j][k];
                // Also get old error derivation
                oldErrDer = m_oldErrDers[i - 1][j][k];
                if ((errDer * oldErrDer) > 0.0) {
                    m_etaIJ[i - 1][j][k] = Math.min(m_etaIJ[i - 1][j][k] * getEtaPlus(), DELTA_MAX);
                    double deltaW = -sgn(errDer) * m_etaIJ[i - 1][j][k];
                    nn.getLayer(i).getPerceptron(j).setWeight(k, nn.getLayer(i).getPerceptron(j).getWeight(k) + deltaW);
                    m_oldErrDers[i - 1][j][k] = errDer;
                } else if ((errDer * oldErrDer) < 0.0) {
                    m_etaIJ[i - 1][j][k] = Math.max(m_etaIJ[i - 1][j][k] * getEtaMinus(), DELTA_MIN);
                    m_oldErrDers[i - 1][j][k] = 0;
                } else if ((errDer * oldErrDer) == 0) {
                    double deltaW = -sgn(errDer) * m_etaIJ[i - 1][j][k];
                    nn.getLayer(i).getPerceptron(j).setWeight(k, nn.getLayer(i).getPerceptron(j).getWeight(k) + deltaW);
                    m_oldErrDers[i - 1][j][k] = errDer;
                }
            }
        }
    }
    // Thresholds
    for (int i = 1; i < nn.getLayers().length; i++) {
        for (int j = 0; j < nn.getLayer(i).getPerceptrons().length; j++) {
            // Compute error derivation
            thrErrDer = m_thrErrDers[i - 1][j];
            // Also get old error derivation
            oldThrErrDer = m_oldThrErrDers[i - 1][j];
            if ((thrErrDer * oldThrErrDer) > 0.0) {
                m_thrEtaIJ[i - 1][j] = Math.min(m_thrEtaIJ[i - 1][j] * getEtaPlus(), DELTA_MAX);
                double deltaThr = -sgn(thrErrDer) * m_thrEtaIJ[i - 1][j];
                nn.getLayer(i).getPerceptron(j).setThreshold(nn.getLayer(i).getPerceptron(j).getThreshold() + deltaThr);
                m_oldThrErrDers[i - 1][j] = thrErrDer;
            } else if ((thrErrDer * oldThrErrDer) < 0.0) {
                m_thrEtaIJ[i - 1][j] = Math.max(m_thrEtaIJ[i - 1][j] * getEtaMinus(), DELTA_MIN);
                m_oldThrErrDers[i - 1][j] = 0;
            } else if ((thrErrDer * oldThrErrDer) == 0.0) {
                double deltaThr = -sgn(thrErrDer) * m_thrEtaIJ[i - 1][j];
                nn.getLayer(i).getPerceptron(j).setThreshold(nn.getLayer(i).getPerceptron(j).getThreshold() + deltaThr);
                m_oldThrErrDers[i - 1][j] = thrErrDer;
            }
        }
    }
}
Also used : Perceptron(org.knime.base.data.neural.Perceptron) MultiLayerPerceptron(org.knime.base.data.neural.MultiLayerPerceptron)

Aggregations

MultiLayerPerceptron (org.knime.base.data.neural.MultiLayerPerceptron)10 NeuralLayer (org.dmg.pmml.NeuralLayerDocument.NeuralLayer)8 HiddenLayer (org.knime.base.data.neural.HiddenLayer)8 InputLayer (org.knime.base.data.neural.InputLayer)8 Layer (org.knime.base.data.neural.Layer)8 Perceptron (org.knime.base.data.neural.Perceptron)7 InputPerceptron (org.knime.base.data.neural.InputPerceptron)6 SigmoidPerceptron (org.knime.base.data.neural.SigmoidPerceptron)6 BigInteger (java.math.BigInteger)4 DerivedField (org.dmg.pmml.DerivedFieldDocument.DerivedField)4 FieldRef (org.dmg.pmml.FieldRefDocument.FieldRef)4 DataCell (org.knime.core.data.DataCell)3 ACTIVATIONFUNCTION (org.dmg.pmml.ACTIVATIONFUNCTION)2 Con (org.dmg.pmml.ConDocument.Con)2 MININGFUNCTION (org.dmg.pmml.MININGFUNCTION)2 NNNORMALIZATIONMETHOD (org.dmg.pmml.NNNORMALIZATIONMETHOD)2 NeuralInput (org.dmg.pmml.NeuralInputDocument.NeuralInput)2 NeuralInputs (org.dmg.pmml.NeuralInputsDocument.NeuralInputs)2 NeuralNetwork (org.dmg.pmml.NeuralNetworkDocument.NeuralNetwork)2 NeuralOutput (org.dmg.pmml.NeuralOutputDocument.NeuralOutput)2