use of org.knime.base.data.neural.Layer in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method addInputLayer.
/**
* Writes the PMML input layer of the MLP.
*
* @param nnModel
* the Neural Network model.
* @param mlp
* the underlying {@link MultiLayerPerceptron}.
*/
protected void addInputLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp) {
Layer inputlayer = mlp.getLayer(0);
Perceptron[] inputperceptrons = inputlayer.getPerceptrons();
HashMap<String, Integer> inputmap = mlp.getInputMapping();
NeuralInputs neuralInputs = nnModel.addNewNeuralInputs();
neuralInputs.setNumberOfInputs(BigInteger.valueOf(inputperceptrons.length));
for (int i = 0; i < inputperceptrons.length; i++) {
NeuralInput neuralInput = neuralInputs.addNewNeuralInput();
neuralInput.setId(0 + "," + i);
// search corresponding input column
String colname = "";
for (Entry<String, Integer> e : inputmap.entrySet()) {
if (e.getValue().equals(i)) {
colname = e.getKey();
}
}
DerivedField df = neuralInput.addNewDerivedField();
df.setOptype(OPTYPE.CONTINUOUS);
df.setDataType(DATATYPE.DOUBLE);
FieldRef fieldRef = df.addNewFieldRef();
fieldRef.setField(m_nameMapper.getDerivedFieldName(colname));
}
}
use of org.knime.base.data.neural.Layer in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method initializeFrom.
/**
* {@inheritDoc}
*/
@Override
public void initializeFrom(final PMMLDocument pmmlDoc) {
m_nameMapper = new DerivedFieldMapper(pmmlDoc);
NeuralNetwork[] models = pmmlDoc.getPMML().getNeuralNetworkArray();
if (models.length == 0) {
throw new IllegalArgumentException("No neural network model" + " provided.");
} else if (models.length > 1) {
LOGGER.warn("Multiple neural network models found. " + "Only the first model is considered.");
}
NeuralNetwork nnModel = models[0];
// ------------------------------
// initiate Neural Input
initInputLayer(nnModel);
// -------------------------------
// initiate Hidden Layer
initiateHiddenLayers(nnModel);
// -------------------------------
// initiate Final Layer
initiateFinalLayer(nnModel);
// --------------------------------
// initiate Neural Outputs
initiateNeuralOutputs(nnModel);
// --------------------------------
// initiate Neural Network properties
ACTIVATIONFUNCTION.Enum actFunc = nnModel.getActivationFunction();
NNNORMALIZATIONMETHOD.Enum normMethod = nnModel.getNormalizationMethod();
if (ACTIVATIONFUNCTION.LOGISTIC != actFunc) {
LOGGER.error("Only logistic activation function is " + "supported in KNIME MLP.");
}
if (NNNORMALIZATIONMETHOD.NONE != normMethod) {
LOGGER.error("No normalization method is " + "supported in KNIME MLP.");
}
MININGFUNCTION.Enum functionName = nnModel.getFunctionName();
if (MININGFUNCTION.CLASSIFICATION == functionName) {
m_mlpMethod = MultiLayerPerceptron.CLASSIFICATION_MODE;
} else if (MININGFUNCTION.REGRESSION == functionName) {
m_mlpMethod = MultiLayerPerceptron.REGRESSION_MODE;
}
if (m_allLayers.size() < 3) {
throw new IllegalArgumentException("Only neural networks with 3 Layers supported in KNIME MLP.");
}
Layer[] allLayers = new Layer[m_allLayers.size()];
allLayers = m_allLayers.toArray(allLayers);
m_mlp = new MultiLayerPerceptron(allLayers);
Architecture myarch = new Architecture(allLayers[0].getPerceptrons().length, allLayers.length - 2, allLayers[1].getPerceptrons().length, allLayers[allLayers.length - 1].getPerceptrons().length);
m_mlp.setArchitecture(myarch);
m_mlp.setClassMapping(m_classmap);
m_mlp.setInputMapping(m_inputmap);
m_mlp.setMode(m_mlpMethod);
}
use of org.knime.base.data.neural.Layer in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method addLayer.
/**
* Writes a layer of the MLP.
*
* @param nnModel
* the NeuralNetwork model.
* @param mlp
* the underlying {@link MultiLayerPerceptron}.
* @param layer
* the number of the current layer.
*/
protected void addLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp, final int layer) {
Layer curLayer = mlp.getLayer(layer);
Perceptron[] perceptrons = curLayer.getPerceptrons();
AttributesImpl atts = new AttributesImpl();
atts.addAttribute(null, null, "numberOfNeurons", CDATA, "" + perceptrons.length);
NeuralLayer neuralLayer = nnModel.addNewNeuralLayer();
for (int i = 0; i < perceptrons.length; i++) {
Neuron neuron = neuralLayer.addNewNeuron();
neuron.setId(layer + "," + i);
neuron.setBias(-1 * perceptrons[i].getThreshold());
double[] weights = perceptrons[i].getWeights();
int predLayerLength = weights.length;
for (int j = 0; j < predLayerLength; j++) {
Con con = neuron.addNewCon();
con.setFrom((layer - 1) + "," + j);
con.setWeight(weights[j]);
}
}
}
use of org.knime.base.data.neural.Layer in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method addInputLayer.
/**
* Writes the PMML input layer of the MLP.
*
* @param nnModel
* the Neural Network model.
* @param mlp
* the underlying {@link MultiLayerPerceptron}.
*/
protected void addInputLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp) {
Layer inputlayer = mlp.getLayer(0);
Perceptron[] inputperceptrons = inputlayer.getPerceptrons();
HashMap<String, Integer> inputmap = mlp.getInputMapping();
NeuralInputs neuralInputs = nnModel.addNewNeuralInputs();
neuralInputs.setNumberOfInputs(BigInteger.valueOf(inputperceptrons.length));
for (int i = 0; i < inputperceptrons.length; i++) {
NeuralInput neuralInput = neuralInputs.addNewNeuralInput();
neuralInput.setId(0 + "," + i);
// search corresponding input column
String colname = "";
for (Entry<String, Integer> e : inputmap.entrySet()) {
if (e.getValue().equals(i)) {
colname = e.getKey();
}
}
DerivedField df = neuralInput.addNewDerivedField();
df.setOptype(OPTYPE.CONTINUOUS);
df.setDataType(DATATYPE.DOUBLE);
FieldRef fieldRef = df.addNewFieldRef();
fieldRef.setField(m_nameMapper.getDerivedFieldName(colname));
}
}
use of org.knime.base.data.neural.Layer in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method addOutputLayer.
/**
* Writes the PMML output layer of the MLP.
*
* @param nnModel
* the neural network model.
* @param mlp
* the underlying {@link MultiLayerPerceptron}.
* @param spec
* the port object spec
*/
protected void addOutputLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp, final PMMLPortObjectSpec spec) {
int lastlayer = mlp.getNrLayers() - 1;
String targetCol = spec.getTargetFields().iterator().next();
Layer outputlayer = mlp.getLayer(lastlayer);
Perceptron[] outputperceptrons = outputlayer.getPerceptrons();
HashMap<DataCell, Integer> outputmap = mlp.getClassMapping();
NeuralOutputs neuralOuts = nnModel.addNewNeuralOutputs();
neuralOuts.setNumberOfOutputs(BigInteger.valueOf(outputperceptrons.length));
for (int i = 0; i < outputperceptrons.length; i++) {
NeuralOutput neuralOutput = neuralOuts.addNewNeuralOutput();
neuralOutput.setOutputNeuron(lastlayer + "," + i);
// search corresponding output value
String colname = "";
for (Entry<DataCell, Integer> e : outputmap.entrySet()) {
if (e.getValue().equals(i)) {
colname = ((StringValue) e.getKey()).getStringValue();
}
}
DerivedField df = neuralOutput.addNewDerivedField();
df.setOptype(OPTYPE.CATEGORICAL);
df.setDataType(DATATYPE.STRING);
if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
df.setOptype(OPTYPE.CATEGORICAL);
df.setDataType(DATATYPE.STRING);
} else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
df.setOptype(OPTYPE.CONTINUOUS);
df.setDataType(DATATYPE.DOUBLE);
}
if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
NormDiscrete normDiscrete = df.addNewNormDiscrete();
normDiscrete.setField(targetCol);
normDiscrete.setValue(colname);
} else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
FieldRef fieldRef = df.addNewFieldRef();
fieldRef.setField(targetCol);
}
}
}
Aggregations