Search in sources :

Example 6 with MultiLayerPerceptron

use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.

the class PMMLNeuralNetworkTranslator method addOutputLayer.

/**
 * Writes the PMML output layer of the MLP.
 *
 * @param nnModel
 *            the neural network model.
 * @param mlp
 *            the underlying {@link MultiLayerPerceptron}.
 * @param spec
 *            the port object spec
 */
protected void addOutputLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp, final PMMLPortObjectSpec spec) {
    int lastlayer = mlp.getNrLayers() - 1;
    String targetCol = spec.getTargetFields().iterator().next();
    Layer outputlayer = mlp.getLayer(lastlayer);
    Perceptron[] outputperceptrons = outputlayer.getPerceptrons();
    HashMap<DataCell, Integer> outputmap = mlp.getClassMapping();
    NeuralOutputs neuralOuts = nnModel.addNewNeuralOutputs();
    neuralOuts.setNumberOfOutputs(BigInteger.valueOf(outputperceptrons.length));
    for (int i = 0; i < outputperceptrons.length; i++) {
        NeuralOutput neuralOutput = neuralOuts.addNewNeuralOutput();
        neuralOutput.setOutputNeuron(lastlayer + "," + i);
        // search corresponding output value
        String colname = "";
        for (Entry<DataCell, Integer> e : outputmap.entrySet()) {
            if (e.getValue().equals(i)) {
                colname = ((StringValue) e.getKey()).getStringValue();
            }
        }
        DerivedField df = neuralOutput.addNewDerivedField();
        df.setOptype(OPTYPE.CATEGORICAL);
        df.setDataType(DATATYPE.STRING);
        if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
            df.setOptype(OPTYPE.CATEGORICAL);
            df.setDataType(DATATYPE.STRING);
        } else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
            df.setOptype(OPTYPE.CONTINUOUS);
            df.setDataType(DATATYPE.DOUBLE);
        }
        if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
            NormDiscrete normDiscrete = df.addNewNormDiscrete();
            normDiscrete.setField(targetCol);
            normDiscrete.setValue(colname);
        } else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
            FieldRef fieldRef = df.addNewFieldRef();
            fieldRef.setField(targetCol);
        }
    }
}
Also used : NeuralOutputs(org.dmg.pmml.NeuralOutputsDocument.NeuralOutputs) FieldRef(org.dmg.pmml.FieldRefDocument.FieldRef) NeuralLayer(org.dmg.pmml.NeuralLayerDocument.NeuralLayer) Layer(org.knime.base.data.neural.Layer) InputLayer(org.knime.base.data.neural.InputLayer) HiddenLayer(org.knime.base.data.neural.HiddenLayer) NeuralOutput(org.dmg.pmml.NeuralOutputDocument.NeuralOutput) BigInteger(java.math.BigInteger) NormDiscrete(org.dmg.pmml.NormDiscreteDocument.NormDiscrete) SigmoidPerceptron(org.knime.base.data.neural.SigmoidPerceptron) MultiLayerPerceptron(org.knime.base.data.neural.MultiLayerPerceptron) Perceptron(org.knime.base.data.neural.Perceptron) InputPerceptron(org.knime.base.data.neural.InputPerceptron) DataCell(org.knime.core.data.DataCell) DerivedField(org.dmg.pmml.DerivedFieldDocument.DerivedField)

Example 7 with MultiLayerPerceptron

use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.

the class PMMLNeuralNetworkTranslator method initializeFrom.

/**
 * {@inheritDoc}
 */
@Override
public void initializeFrom(final PMMLDocument pmmlDoc) {
    m_nameMapper = new DerivedFieldMapper(pmmlDoc);
    NeuralNetwork[] models = pmmlDoc.getPMML().getNeuralNetworkArray();
    if (models.length == 0) {
        throw new IllegalArgumentException("No neural network model" + " provided.");
    } else if (models.length > 1) {
        LOGGER.warn("Multiple neural network models found. " + "Only the first model is considered.");
    }
    NeuralNetwork nnModel = models[0];
    // ------------------------------
    // initiate Neural Input
    initInputLayer(nnModel);
    // -------------------------------
    // initiate Hidden Layer
    initiateHiddenLayers(nnModel);
    // -------------------------------
    // initiate Final Layer
    initiateFinalLayer(nnModel);
    // --------------------------------
    // initiate Neural Outputs
    initiateNeuralOutputs(nnModel);
    // --------------------------------
    // initiate Neural Network properties
    ACTIVATIONFUNCTION.Enum actFunc = nnModel.getActivationFunction();
    NNNORMALIZATIONMETHOD.Enum normMethod = nnModel.getNormalizationMethod();
    if (ACTIVATIONFUNCTION.LOGISTIC != actFunc) {
        LOGGER.error("Only logistic activation function is " + "supported in KNIME MLP.");
    }
    if (NNNORMALIZATIONMETHOD.NONE != normMethod) {
        LOGGER.error("No normalization method is " + "supported in KNIME MLP.");
    }
    MININGFUNCTION.Enum functionName = nnModel.getFunctionName();
    if (MININGFUNCTION.CLASSIFICATION == functionName) {
        m_mlpMethod = MultiLayerPerceptron.CLASSIFICATION_MODE;
    } else if (MININGFUNCTION.REGRESSION == functionName) {
        m_mlpMethod = MultiLayerPerceptron.REGRESSION_MODE;
    }
    if (m_allLayers.size() < 3) {
        throw new IllegalArgumentException("Only neural networks with 3 Layers supported in KNIME MLP.");
    }
    Layer[] allLayers = new Layer[m_allLayers.size()];
    allLayers = m_allLayers.toArray(allLayers);
    m_mlp = new MultiLayerPerceptron(allLayers);
    Architecture myarch = new Architecture(allLayers[0].getPerceptrons().length, allLayers.length - 2, allLayers[1].getPerceptrons().length, allLayers[allLayers.length - 1].getPerceptrons().length);
    m_mlp.setArchitecture(myarch);
    m_mlp.setClassMapping(m_classmap);
    m_mlp.setInputMapping(m_inputmap);
    m_mlp.setMode(m_mlpMethod);
}
Also used : ACTIVATIONFUNCTION(org.dmg.pmml.ACTIVATIONFUNCTION) Architecture(org.knime.base.data.neural.Architecture) NeuralNetwork(org.dmg.pmml.NeuralNetworkDocument.NeuralNetwork) NeuralLayer(org.dmg.pmml.NeuralLayerDocument.NeuralLayer) Layer(org.knime.base.data.neural.Layer) InputLayer(org.knime.base.data.neural.InputLayer) HiddenLayer(org.knime.base.data.neural.HiddenLayer) MultiLayerPerceptron(org.knime.base.data.neural.MultiLayerPerceptron) DerivedFieldMapper(org.knime.core.node.port.pmml.preproc.DerivedFieldMapper) MININGFUNCTION(org.dmg.pmml.MININGFUNCTION) NNNORMALIZATIONMETHOD(org.dmg.pmml.NNNORMALIZATIONMETHOD)

Example 8 with MultiLayerPerceptron

use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.

the class PMMLNeuralNetworkTranslator method addLayer.

/**
 * Writes a layer of the MLP.
 *
 * @param nnModel
 *            the NeuralNetwork model.
 * @param mlp
 *            the underlying {@link MultiLayerPerceptron}.
 * @param layer
 *            the number of the current layer.
 */
protected void addLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp, final int layer) {
    Layer curLayer = mlp.getLayer(layer);
    Perceptron[] perceptrons = curLayer.getPerceptrons();
    AttributesImpl atts = new AttributesImpl();
    atts.addAttribute(null, null, "numberOfNeurons", CDATA, "" + perceptrons.length);
    NeuralLayer neuralLayer = nnModel.addNewNeuralLayer();
    for (int i = 0; i < perceptrons.length; i++) {
        Neuron neuron = neuralLayer.addNewNeuron();
        neuron.setId(layer + "," + i);
        neuron.setBias(-1 * perceptrons[i].getThreshold());
        double[] weights = perceptrons[i].getWeights();
        int predLayerLength = weights.length;
        for (int j = 0; j < predLayerLength; j++) {
            Con con = neuron.addNewCon();
            con.setFrom((layer - 1) + "," + j);
            con.setWeight(weights[j]);
        }
    }
}
Also used : SigmoidPerceptron(org.knime.base.data.neural.SigmoidPerceptron) MultiLayerPerceptron(org.knime.base.data.neural.MultiLayerPerceptron) Perceptron(org.knime.base.data.neural.Perceptron) InputPerceptron(org.knime.base.data.neural.InputPerceptron) AttributesImpl(org.xml.sax.helpers.AttributesImpl) Neuron(org.dmg.pmml.NeuronDocument.Neuron) Con(org.dmg.pmml.ConDocument.Con) NeuralLayer(org.dmg.pmml.NeuralLayerDocument.NeuralLayer) NeuralLayer(org.dmg.pmml.NeuralLayerDocument.NeuralLayer) Layer(org.knime.base.data.neural.Layer) InputLayer(org.knime.base.data.neural.InputLayer) HiddenLayer(org.knime.base.data.neural.HiddenLayer)

Example 9 with MultiLayerPerceptron

use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.

the class RPropNodeModel method execute.

/**
 * The execution consists of three steps:
 * <ol>
 * <li>A neural network is build with the inputs and outputs according to
 * the input datatable, number of hidden layers as specified.</li>
 * <li>Input DataTables are converted into double-arrays so they can be
 * attached to the neural net.</li>
 * <li>The neural net is trained.</li>
 * </ol>
 *
 * {@inheritDoc}
 */
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
    // If class column is not set, it is the last column.
    DataTableSpec posSpec = (DataTableSpec) inData[INDATA].getSpec();
    if (m_classcol.getStringValue() == null) {
        m_classcol.setStringValue(posSpec.getColumnSpec(posSpec.getNumColumns() - 1).getName());
    }
    List<String> learningCols = new LinkedList<String>();
    List<String> targetCols = new LinkedList<String>();
    // Determine the number of inputs and the number of outputs. Make also
    // sure that the inputs are double values.
    int nrInputs = 0;
    int nrOutputs = 0;
    HashMap<String, Integer> inputmap = new HashMap<String, Integer>();
    HashMap<DataCell, Integer> classMap = new HashMap<DataCell, Integer>();
    for (DataColumnSpec colspec : posSpec) {
        // check for class column
        if (colspec.getName().toString().compareTo(m_classcol.getStringValue()) == 0) {
            targetCols.add(colspec.getName());
            if (colspec.getType().isCompatible(DoubleValue.class)) {
                // check if the values are in range [0,1]
                DataColumnDomain domain = colspec.getDomain();
                if (domain.hasBounds()) {
                    double lower = ((DoubleValue) domain.getLowerBound()).getDoubleValue();
                    double upper = ((DoubleValue) domain.getUpperBound()).getDoubleValue();
                    if (lower < 0 || upper > 1) {
                        throw new InvalidSettingsException("Domain range for regression in column " + colspec.getName() + " not in range [0,1]");
                    }
                }
                nrOutputs = 1;
                classMap = new HashMap<DataCell, Integer>();
                classMap.put(new StringCell(colspec.getName()), 0);
                m_regression = true;
            } else {
                m_regression = false;
                DataColumnDomain domain = colspec.getDomain();
                if (domain.hasValues()) {
                    Set<DataCell> allvalues = domain.getValues();
                    int outputneuron = 0;
                    classMap = new HashMap<DataCell, Integer>();
                    for (DataCell value : allvalues) {
                        classMap.put(value, outputneuron);
                        outputneuron++;
                    }
                    nrOutputs = allvalues.size();
                } else {
                    throw new Exception("Could not find domain values in" + "nominal column " + colspec.getName().toString());
                }
            }
        } else {
            if (!colspec.getType().isCompatible(DoubleValue.class)) {
                throw new Exception("Only double columns for input");
            }
            inputmap.put(colspec.getName(), nrInputs);
            learningCols.add(colspec.getName());
            nrInputs++;
        }
    }
    assert targetCols.size() == 1 : "Only one class column allowed.";
    m_architecture.setNrInputNeurons(nrInputs);
    m_architecture.setNrHiddenLayers(m_nrHiddenLayers.getIntValue());
    m_architecture.setNrHiddenNeurons(m_nrHiddenNeuronsperLayer.getIntValue());
    m_architecture.setNrOutputNeurons(nrOutputs);
    Random random = new Random();
    if (m_useRandomSeed.getBooleanValue()) {
        random.setSeed(m_randomSeed.getIntValue());
    }
    m_mlp = new MultiLayerPerceptron(m_architecture, random);
    if (m_regression) {
        m_mlp.setMode(MultiLayerPerceptron.REGRESSION_MODE);
    } else {
        m_mlp.setMode(MultiLayerPerceptron.CLASSIFICATION_MODE);
    }
    // Convert inputs to double arrays. Values from the class column are
    // encoded as bitvectors.
    int classColNr = posSpec.findColumnIndex(m_classcol.getStringValue());
    List<Double[]> samples = new ArrayList<Double[]>();
    List<Double[]> outputs = new ArrayList<Double[]>();
    Double[] sample = new Double[nrInputs];
    Double[] output = new Double[nrOutputs];
    final RowIterator rowIt = ((BufferedDataTable) inData[INDATA]).iterator();
    int rowcounter = 0;
    while (rowIt.hasNext()) {
        boolean add = true;
        output = new Double[nrOutputs];
        sample = new Double[nrInputs];
        DataRow row = rowIt.next();
        int nrCells = row.getNumCells();
        int index = 0;
        for (int i = 0; i < nrCells; i++) {
            if (i != classColNr) {
                if (!row.getCell(i).isMissing()) {
                    DoubleValue dc = (DoubleValue) row.getCell(i);
                    sample[index] = dc.getDoubleValue();
                    index++;
                } else {
                    if (m_ignoreMV.getBooleanValue()) {
                        add = false;
                        break;
                    } else {
                        throw new Exception("Missing values in input" + " datatable");
                    }
                }
            } else {
                if (row.getCell(i).isMissing()) {
                    add = false;
                    if (!m_ignoreMV.getBooleanValue()) {
                        throw new Exception("Missing value in class" + " column");
                    }
                    break;
                }
                if (m_regression) {
                    DoubleValue dc = (DoubleValue) row.getCell(i);
                    output[0] = dc.getDoubleValue();
                } else {
                    for (int j = 0; j < nrOutputs; j++) {
                        if (classMap.get(row.getCell(i)) == j) {
                            output[j] = new Double(1.0);
                        } else {
                            output[j] = new Double(0.0);
                        }
                    }
                }
            }
        }
        if (add) {
            samples.add(sample);
            outputs.add(output);
            rowcounter++;
        }
    }
    Double[][] samplesarr = new Double[rowcounter][nrInputs];
    Double[][] outputsarr = new Double[rowcounter][nrInputs];
    for (int i = 0; i < samplesarr.length; i++) {
        samplesarr[i] = samples.get(i);
        outputsarr[i] = outputs.get(i);
    }
    // Now finally train the network.
    m_mlp.setClassMapping(classMap);
    m_mlp.setInputMapping(inputmap);
    RProp myrprop = new RProp();
    m_errors = new double[m_nrIterations.getIntValue()];
    for (int iteration = 0; iteration < m_nrIterations.getIntValue(); iteration++) {
        exec.setProgress((double) iteration / (double) m_nrIterations.getIntValue(), "Iteration " + iteration);
        myrprop.train(m_mlp, samplesarr, outputsarr);
        double error = 0;
        for (int j = 0; j < outputsarr.length; j++) {
            double[] myoutput = m_mlp.output(samplesarr[j]);
            for (int o = 0; o < outputsarr[0].length; o++) {
                error += (myoutput[o] - outputsarr[j][o]) * (myoutput[o] - outputsarr[j][o]);
            }
        }
        m_errors[iteration] = error;
        exec.checkCanceled();
    }
    // handle the optional PMML input
    PMMLPortObject inPMMLPort = m_pmmlInEnabled ? (PMMLPortObject) inData[INMODEL] : null;
    PMMLPortObjectSpec inPMMLSpec = null;
    if (inPMMLPort != null) {
        inPMMLSpec = inPMMLPort.getSpec();
    }
    PMMLPortObjectSpec outPortSpec = createPMMLPortObjectSpec(inPMMLSpec, posSpec, learningCols, targetCols);
    PMMLPortObject outPMMLPort = new PMMLPortObject(outPortSpec, inPMMLPort, posSpec);
    outPMMLPort.addModelTranslater(new PMMLNeuralNetworkTranslator(m_mlp));
    return new PortObject[] { outPMMLPort };
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) PMMLPortObjectSpec(org.knime.core.node.port.pmml.PMMLPortObjectSpec) HashMap(java.util.HashMap) PMMLNeuralNetworkTranslator(org.knime.base.node.mine.neural.mlp2.PMMLNeuralNetworkTranslator) ArrayList(java.util.ArrayList) SettingsModelString(org.knime.core.node.defaultnodesettings.SettingsModelString) DataRow(org.knime.core.data.DataRow) DataColumnSpec(org.knime.core.data.DataColumnSpec) Random(java.util.Random) BufferedDataTable(org.knime.core.node.BufferedDataTable) PMMLPortObject(org.knime.core.node.port.pmml.PMMLPortObject) PortObject(org.knime.core.node.port.PortObject) LinkedList(java.util.LinkedList) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) IOException(java.io.IOException) MultiLayerPerceptron(org.knime.base.data.neural.MultiLayerPerceptron) SettingsModelInteger(org.knime.core.node.defaultnodesettings.SettingsModelInteger) DataColumnDomain(org.knime.core.data.DataColumnDomain) DoubleValue(org.knime.core.data.DoubleValue) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) StringCell(org.knime.core.data.def.StringCell) PMMLPortObject(org.knime.core.node.port.pmml.PMMLPortObject) RowIterator(org.knime.core.data.RowIterator) DataCell(org.knime.core.data.DataCell) RProp(org.knime.base.data.neural.methods.RProp)

Example 10 with MultiLayerPerceptron

use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.

the class PMMLNeuralNetworkTranslator method addOutputLayer.

/**
 * Writes the PMML output layer of the MLP.
 *
 * @param nnModel
 *            the neural network model.
 * @param mlp
 *            the underlying {@link MultiLayerPerceptron}.
 * @param spec
 *            the port object spec
 */
protected void addOutputLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp, final PMMLPortObjectSpec spec) {
    int lastlayer = mlp.getNrLayers() - 1;
    String targetCol = spec.getTargetFields().iterator().next();
    Layer outputlayer = mlp.getLayer(lastlayer);
    Perceptron[] outputperceptrons = outputlayer.getPerceptrons();
    HashMap<DataCell, Integer> outputmap = mlp.getClassMapping();
    NeuralOutputs neuralOuts = nnModel.addNewNeuralOutputs();
    neuralOuts.setNumberOfOutputs(BigInteger.valueOf(outputperceptrons.length));
    for (int i = 0; i < outputperceptrons.length; i++) {
        NeuralOutput neuralOutput = neuralOuts.addNewNeuralOutput();
        neuralOutput.setOutputNeuron(lastlayer + "," + i);
        // search corresponding output value
        String colname = "";
        for (Entry<DataCell, Integer> e : outputmap.entrySet()) {
            if (e.getValue().equals(i)) {
                colname = ((StringValue) e.getKey()).getStringValue();
            }
        }
        DerivedField df = neuralOutput.addNewDerivedField();
        df.setOptype(OPTYPE.CATEGORICAL);
        df.setDataType(DATATYPE.STRING);
        if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
            df.setOptype(OPTYPE.CATEGORICAL);
            df.setDataType(DATATYPE.STRING);
        } else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
            df.setOptype(OPTYPE.CONTINUOUS);
            df.setDataType(DATATYPE.DOUBLE);
        }
        if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
            NormDiscrete normDiscrete = df.addNewNormDiscrete();
            normDiscrete.setField(targetCol);
            normDiscrete.setValue(colname);
        } else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
            FieldRef fieldRef = df.addNewFieldRef();
            fieldRef.setField(targetCol);
        }
    }
}
Also used : NeuralOutputs(org.dmg.pmml.NeuralOutputsDocument.NeuralOutputs) FieldRef(org.dmg.pmml.FieldRefDocument.FieldRef) NeuralLayer(org.dmg.pmml.NeuralLayerDocument.NeuralLayer) Layer(org.knime.base.data.neural.Layer) InputLayer(org.knime.base.data.neural.InputLayer) HiddenLayer(org.knime.base.data.neural.HiddenLayer) NeuralOutput(org.dmg.pmml.NeuralOutputDocument.NeuralOutput) BigInteger(java.math.BigInteger) NormDiscrete(org.dmg.pmml.NormDiscreteDocument.NormDiscrete) SigmoidPerceptron(org.knime.base.data.neural.SigmoidPerceptron) MultiLayerPerceptron(org.knime.base.data.neural.MultiLayerPerceptron) Perceptron(org.knime.base.data.neural.Perceptron) InputPerceptron(org.knime.base.data.neural.InputPerceptron) DataCell(org.knime.core.data.DataCell) DerivedField(org.dmg.pmml.DerivedFieldDocument.DerivedField)

Aggregations

MultiLayerPerceptron (org.knime.base.data.neural.MultiLayerPerceptron)10 NeuralLayer (org.dmg.pmml.NeuralLayerDocument.NeuralLayer)8 HiddenLayer (org.knime.base.data.neural.HiddenLayer)8 InputLayer (org.knime.base.data.neural.InputLayer)8 Layer (org.knime.base.data.neural.Layer)8 Perceptron (org.knime.base.data.neural.Perceptron)7 InputPerceptron (org.knime.base.data.neural.InputPerceptron)6 SigmoidPerceptron (org.knime.base.data.neural.SigmoidPerceptron)6 BigInteger (java.math.BigInteger)4 DerivedField (org.dmg.pmml.DerivedFieldDocument.DerivedField)4 FieldRef (org.dmg.pmml.FieldRefDocument.FieldRef)4 DataCell (org.knime.core.data.DataCell)3 ACTIVATIONFUNCTION (org.dmg.pmml.ACTIVATIONFUNCTION)2 Con (org.dmg.pmml.ConDocument.Con)2 MININGFUNCTION (org.dmg.pmml.MININGFUNCTION)2 NNNORMALIZATIONMETHOD (org.dmg.pmml.NNNORMALIZATIONMETHOD)2 NeuralInput (org.dmg.pmml.NeuralInputDocument.NeuralInput)2 NeuralInputs (org.dmg.pmml.NeuralInputsDocument.NeuralInputs)2 NeuralNetwork (org.dmg.pmml.NeuralNetworkDocument.NeuralNetwork)2 NeuralOutput (org.dmg.pmml.NeuralOutputDocument.NeuralOutput)2