use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method addOutputLayer.
/**
* Writes the PMML output layer of the MLP.
*
* @param nnModel
* the neural network model.
* @param mlp
* the underlying {@link MultiLayerPerceptron}.
* @param spec
* the port object spec
*/
protected void addOutputLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp, final PMMLPortObjectSpec spec) {
int lastlayer = mlp.getNrLayers() - 1;
String targetCol = spec.getTargetFields().iterator().next();
Layer outputlayer = mlp.getLayer(lastlayer);
Perceptron[] outputperceptrons = outputlayer.getPerceptrons();
HashMap<DataCell, Integer> outputmap = mlp.getClassMapping();
NeuralOutputs neuralOuts = nnModel.addNewNeuralOutputs();
neuralOuts.setNumberOfOutputs(BigInteger.valueOf(outputperceptrons.length));
for (int i = 0; i < outputperceptrons.length; i++) {
NeuralOutput neuralOutput = neuralOuts.addNewNeuralOutput();
neuralOutput.setOutputNeuron(lastlayer + "," + i);
// search corresponding output value
String colname = "";
for (Entry<DataCell, Integer> e : outputmap.entrySet()) {
if (e.getValue().equals(i)) {
colname = ((StringValue) e.getKey()).getStringValue();
}
}
DerivedField df = neuralOutput.addNewDerivedField();
df.setOptype(OPTYPE.CATEGORICAL);
df.setDataType(DATATYPE.STRING);
if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
df.setOptype(OPTYPE.CATEGORICAL);
df.setDataType(DATATYPE.STRING);
} else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
df.setOptype(OPTYPE.CONTINUOUS);
df.setDataType(DATATYPE.DOUBLE);
}
if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
NormDiscrete normDiscrete = df.addNewNormDiscrete();
normDiscrete.setField(targetCol);
normDiscrete.setValue(colname);
} else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
FieldRef fieldRef = df.addNewFieldRef();
fieldRef.setField(targetCol);
}
}
}
use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method initializeFrom.
/**
* {@inheritDoc}
*/
@Override
public void initializeFrom(final PMMLDocument pmmlDoc) {
m_nameMapper = new DerivedFieldMapper(pmmlDoc);
NeuralNetwork[] models = pmmlDoc.getPMML().getNeuralNetworkArray();
if (models.length == 0) {
throw new IllegalArgumentException("No neural network model" + " provided.");
} else if (models.length > 1) {
LOGGER.warn("Multiple neural network models found. " + "Only the first model is considered.");
}
NeuralNetwork nnModel = models[0];
// ------------------------------
// initiate Neural Input
initInputLayer(nnModel);
// -------------------------------
// initiate Hidden Layer
initiateHiddenLayers(nnModel);
// -------------------------------
// initiate Final Layer
initiateFinalLayer(nnModel);
// --------------------------------
// initiate Neural Outputs
initiateNeuralOutputs(nnModel);
// --------------------------------
// initiate Neural Network properties
ACTIVATIONFUNCTION.Enum actFunc = nnModel.getActivationFunction();
NNNORMALIZATIONMETHOD.Enum normMethod = nnModel.getNormalizationMethod();
if (ACTIVATIONFUNCTION.LOGISTIC != actFunc) {
LOGGER.error("Only logistic activation function is " + "supported in KNIME MLP.");
}
if (NNNORMALIZATIONMETHOD.NONE != normMethod) {
LOGGER.error("No normalization method is " + "supported in KNIME MLP.");
}
MININGFUNCTION.Enum functionName = nnModel.getFunctionName();
if (MININGFUNCTION.CLASSIFICATION == functionName) {
m_mlpMethod = MultiLayerPerceptron.CLASSIFICATION_MODE;
} else if (MININGFUNCTION.REGRESSION == functionName) {
m_mlpMethod = MultiLayerPerceptron.REGRESSION_MODE;
}
if (m_allLayers.size() < 3) {
throw new IllegalArgumentException("Only neural networks with 3 Layers supported in KNIME MLP.");
}
Layer[] allLayers = new Layer[m_allLayers.size()];
allLayers = m_allLayers.toArray(allLayers);
m_mlp = new MultiLayerPerceptron(allLayers);
Architecture myarch = new Architecture(allLayers[0].getPerceptrons().length, allLayers.length - 2, allLayers[1].getPerceptrons().length, allLayers[allLayers.length - 1].getPerceptrons().length);
m_mlp.setArchitecture(myarch);
m_mlp.setClassMapping(m_classmap);
m_mlp.setInputMapping(m_inputmap);
m_mlp.setMode(m_mlpMethod);
}
use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method addLayer.
/**
* Writes a layer of the MLP.
*
* @param nnModel
* the NeuralNetwork model.
* @param mlp
* the underlying {@link MultiLayerPerceptron}.
* @param layer
* the number of the current layer.
*/
protected void addLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp, final int layer) {
Layer curLayer = mlp.getLayer(layer);
Perceptron[] perceptrons = curLayer.getPerceptrons();
AttributesImpl atts = new AttributesImpl();
atts.addAttribute(null, null, "numberOfNeurons", CDATA, "" + perceptrons.length);
NeuralLayer neuralLayer = nnModel.addNewNeuralLayer();
for (int i = 0; i < perceptrons.length; i++) {
Neuron neuron = neuralLayer.addNewNeuron();
neuron.setId(layer + "," + i);
neuron.setBias(-1 * perceptrons[i].getThreshold());
double[] weights = perceptrons[i].getWeights();
int predLayerLength = weights.length;
for (int j = 0; j < predLayerLength; j++) {
Con con = neuron.addNewCon();
con.setFrom((layer - 1) + "," + j);
con.setWeight(weights[j]);
}
}
}
use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.
the class RPropNodeModel method execute.
/**
* The execution consists of three steps:
* <ol>
* <li>A neural network is build with the inputs and outputs according to
* the input datatable, number of hidden layers as specified.</li>
* <li>Input DataTables are converted into double-arrays so they can be
* attached to the neural net.</li>
* <li>The neural net is trained.</li>
* </ol>
*
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
// If class column is not set, it is the last column.
DataTableSpec posSpec = (DataTableSpec) inData[INDATA].getSpec();
if (m_classcol.getStringValue() == null) {
m_classcol.setStringValue(posSpec.getColumnSpec(posSpec.getNumColumns() - 1).getName());
}
List<String> learningCols = new LinkedList<String>();
List<String> targetCols = new LinkedList<String>();
// Determine the number of inputs and the number of outputs. Make also
// sure that the inputs are double values.
int nrInputs = 0;
int nrOutputs = 0;
HashMap<String, Integer> inputmap = new HashMap<String, Integer>();
HashMap<DataCell, Integer> classMap = new HashMap<DataCell, Integer>();
for (DataColumnSpec colspec : posSpec) {
// check for class column
if (colspec.getName().toString().compareTo(m_classcol.getStringValue()) == 0) {
targetCols.add(colspec.getName());
if (colspec.getType().isCompatible(DoubleValue.class)) {
// check if the values are in range [0,1]
DataColumnDomain domain = colspec.getDomain();
if (domain.hasBounds()) {
double lower = ((DoubleValue) domain.getLowerBound()).getDoubleValue();
double upper = ((DoubleValue) domain.getUpperBound()).getDoubleValue();
if (lower < 0 || upper > 1) {
throw new InvalidSettingsException("Domain range for regression in column " + colspec.getName() + " not in range [0,1]");
}
}
nrOutputs = 1;
classMap = new HashMap<DataCell, Integer>();
classMap.put(new StringCell(colspec.getName()), 0);
m_regression = true;
} else {
m_regression = false;
DataColumnDomain domain = colspec.getDomain();
if (domain.hasValues()) {
Set<DataCell> allvalues = domain.getValues();
int outputneuron = 0;
classMap = new HashMap<DataCell, Integer>();
for (DataCell value : allvalues) {
classMap.put(value, outputneuron);
outputneuron++;
}
nrOutputs = allvalues.size();
} else {
throw new Exception("Could not find domain values in" + "nominal column " + colspec.getName().toString());
}
}
} else {
if (!colspec.getType().isCompatible(DoubleValue.class)) {
throw new Exception("Only double columns for input");
}
inputmap.put(colspec.getName(), nrInputs);
learningCols.add(colspec.getName());
nrInputs++;
}
}
assert targetCols.size() == 1 : "Only one class column allowed.";
m_architecture.setNrInputNeurons(nrInputs);
m_architecture.setNrHiddenLayers(m_nrHiddenLayers.getIntValue());
m_architecture.setNrHiddenNeurons(m_nrHiddenNeuronsperLayer.getIntValue());
m_architecture.setNrOutputNeurons(nrOutputs);
Random random = new Random();
if (m_useRandomSeed.getBooleanValue()) {
random.setSeed(m_randomSeed.getIntValue());
}
m_mlp = new MultiLayerPerceptron(m_architecture, random);
if (m_regression) {
m_mlp.setMode(MultiLayerPerceptron.REGRESSION_MODE);
} else {
m_mlp.setMode(MultiLayerPerceptron.CLASSIFICATION_MODE);
}
// Convert inputs to double arrays. Values from the class column are
// encoded as bitvectors.
int classColNr = posSpec.findColumnIndex(m_classcol.getStringValue());
List<Double[]> samples = new ArrayList<Double[]>();
List<Double[]> outputs = new ArrayList<Double[]>();
Double[] sample = new Double[nrInputs];
Double[] output = new Double[nrOutputs];
final RowIterator rowIt = ((BufferedDataTable) inData[INDATA]).iterator();
int rowcounter = 0;
while (rowIt.hasNext()) {
boolean add = true;
output = new Double[nrOutputs];
sample = new Double[nrInputs];
DataRow row = rowIt.next();
int nrCells = row.getNumCells();
int index = 0;
for (int i = 0; i < nrCells; i++) {
if (i != classColNr) {
if (!row.getCell(i).isMissing()) {
DoubleValue dc = (DoubleValue) row.getCell(i);
sample[index] = dc.getDoubleValue();
index++;
} else {
if (m_ignoreMV.getBooleanValue()) {
add = false;
break;
} else {
throw new Exception("Missing values in input" + " datatable");
}
}
} else {
if (row.getCell(i).isMissing()) {
add = false;
if (!m_ignoreMV.getBooleanValue()) {
throw new Exception("Missing value in class" + " column");
}
break;
}
if (m_regression) {
DoubleValue dc = (DoubleValue) row.getCell(i);
output[0] = dc.getDoubleValue();
} else {
for (int j = 0; j < nrOutputs; j++) {
if (classMap.get(row.getCell(i)) == j) {
output[j] = new Double(1.0);
} else {
output[j] = new Double(0.0);
}
}
}
}
}
if (add) {
samples.add(sample);
outputs.add(output);
rowcounter++;
}
}
Double[][] samplesarr = new Double[rowcounter][nrInputs];
Double[][] outputsarr = new Double[rowcounter][nrInputs];
for (int i = 0; i < samplesarr.length; i++) {
samplesarr[i] = samples.get(i);
outputsarr[i] = outputs.get(i);
}
// Now finally train the network.
m_mlp.setClassMapping(classMap);
m_mlp.setInputMapping(inputmap);
RProp myrprop = new RProp();
m_errors = new double[m_nrIterations.getIntValue()];
for (int iteration = 0; iteration < m_nrIterations.getIntValue(); iteration++) {
exec.setProgress((double) iteration / (double) m_nrIterations.getIntValue(), "Iteration " + iteration);
myrprop.train(m_mlp, samplesarr, outputsarr);
double error = 0;
for (int j = 0; j < outputsarr.length; j++) {
double[] myoutput = m_mlp.output(samplesarr[j]);
for (int o = 0; o < outputsarr[0].length; o++) {
error += (myoutput[o] - outputsarr[j][o]) * (myoutput[o] - outputsarr[j][o]);
}
}
m_errors[iteration] = error;
exec.checkCanceled();
}
// handle the optional PMML input
PMMLPortObject inPMMLPort = m_pmmlInEnabled ? (PMMLPortObject) inData[INMODEL] : null;
PMMLPortObjectSpec inPMMLSpec = null;
if (inPMMLPort != null) {
inPMMLSpec = inPMMLPort.getSpec();
}
PMMLPortObjectSpec outPortSpec = createPMMLPortObjectSpec(inPMMLSpec, posSpec, learningCols, targetCols);
PMMLPortObject outPMMLPort = new PMMLPortObject(outPortSpec, inPMMLPort, posSpec);
outPMMLPort.addModelTranslater(new PMMLNeuralNetworkTranslator(m_mlp));
return new PortObject[] { outPMMLPort };
}
use of org.knime.base.data.neural.MultiLayerPerceptron in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method addOutputLayer.
/**
* Writes the PMML output layer of the MLP.
*
* @param nnModel
* the neural network model.
* @param mlp
* the underlying {@link MultiLayerPerceptron}.
* @param spec
* the port object spec
*/
protected void addOutputLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp, final PMMLPortObjectSpec spec) {
int lastlayer = mlp.getNrLayers() - 1;
String targetCol = spec.getTargetFields().iterator().next();
Layer outputlayer = mlp.getLayer(lastlayer);
Perceptron[] outputperceptrons = outputlayer.getPerceptrons();
HashMap<DataCell, Integer> outputmap = mlp.getClassMapping();
NeuralOutputs neuralOuts = nnModel.addNewNeuralOutputs();
neuralOuts.setNumberOfOutputs(BigInteger.valueOf(outputperceptrons.length));
for (int i = 0; i < outputperceptrons.length; i++) {
NeuralOutput neuralOutput = neuralOuts.addNewNeuralOutput();
neuralOutput.setOutputNeuron(lastlayer + "," + i);
// search corresponding output value
String colname = "";
for (Entry<DataCell, Integer> e : outputmap.entrySet()) {
if (e.getValue().equals(i)) {
colname = ((StringValue) e.getKey()).getStringValue();
}
}
DerivedField df = neuralOutput.addNewDerivedField();
df.setOptype(OPTYPE.CATEGORICAL);
df.setDataType(DATATYPE.STRING);
if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
df.setOptype(OPTYPE.CATEGORICAL);
df.setDataType(DATATYPE.STRING);
} else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
df.setOptype(OPTYPE.CONTINUOUS);
df.setDataType(DATATYPE.DOUBLE);
}
if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
NormDiscrete normDiscrete = df.addNewNormDiscrete();
normDiscrete.setField(targetCol);
normDiscrete.setValue(colname);
} else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
FieldRef fieldRef = df.addNewFieldRef();
fieldRef.setField(targetCol);
}
}
}
Aggregations