use of org.knime.base.data.neural.Perceptron in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method initiateFinalLayer.
/**
* @param nnModel
* the PMML neural network model
*/
private void initiateFinalLayer(final NeuralNetwork nnModel) {
NeuralLayer hiddenLayer = nnModel.getNeuralLayerArray(nnModel.getNeuralLayerArray().length - 1);
m_counter = 0;
m_idPosMap = new HashMap<String, Integer>();
m_curLayer++;
m_curPerceptrons = new Vector<Perceptron>();
for (Neuron neuron : hiddenLayer.getNeuronArray()) {
m_weights = new double[m_predPerceptrons.length];
m_curPercpetronID = neuron.getId();
m_curThreshold = -1 * neuron.getBias();
for (Con con : neuron.getConArray()) {
String fromID = con.getFrom();
double weight = con.getWeight();
int pos = m_predidPosMap.get(fromID);
m_weights[pos] = weight;
}
Perceptron p = new SigmoidPerceptron(m_weights, m_predPerceptrons);
p.setThreshold(m_curThreshold);
m_curPerceptrons.add(p);
m_idPosMap.put(m_curPercpetronID, m_counter);
m_counter++;
}
Perceptron[] curPerceptrons = new Perceptron[m_curPerceptrons.size()];
curPerceptrons = m_curPerceptrons.toArray(curPerceptrons);
m_allLayers.add(m_curLayer, new HiddenLayer(m_predLayer, curPerceptrons));
m_predLayer = m_allLayers.get(m_curLayer);
m_predPerceptrons = curPerceptrons;
m_predidPosMap = new HashMap<String, Integer>(m_idPosMap);
}
use of org.knime.base.data.neural.Perceptron in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method initiateHiddenLayers.
/**
* @param nnModel the PMML neural network model
*/
private void initiateHiddenLayers(final NeuralNetwork nnModel) {
for (int i = 0; i < nnModel.getNeuralLayerArray().length - 1; i++) {
NeuralLayer hiddenLayer = nnModel.getNeuralLayerArray(i);
m_counter = 0;
m_idPosMap = new HashMap<String, Integer>();
m_curLayer++;
m_curPerceptrons = new Vector<Perceptron>();
for (Neuron neuron : hiddenLayer.getNeuronArray()) {
m_weights = new double[m_predPerceptrons.length];
m_curPercpetronID = neuron.getId();
m_curThreshold = -1 * neuron.getBias();
for (Con con : neuron.getConArray()) {
String fromID = con.getFrom();
double weight = con.getWeight();
int pos = m_predidPosMap.get(fromID);
m_weights[pos] = weight;
}
Perceptron p = new SigmoidPerceptron(m_weights, m_predPerceptrons);
p.setThreshold(m_curThreshold);
m_curPerceptrons.add(p);
m_idPosMap.put(m_curPercpetronID, m_counter);
m_counter++;
}
Perceptron[] curPerceptrons = new Perceptron[m_curPerceptrons.size()];
curPerceptrons = m_curPerceptrons.toArray(curPerceptrons);
m_allLayers.add(m_curLayer, new HiddenLayer(m_predLayer, curPerceptrons));
m_predLayer = m_allLayers.get(m_curLayer);
m_predPerceptrons = curPerceptrons;
m_predidPosMap = new HashMap<String, Integer>(m_idPosMap);
}
}
use of org.knime.base.data.neural.Perceptron in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method addOutputLayer.
/**
* Writes the PMML output layer of the MLP.
*
* @param nnModel
* the neural network model.
* @param mlp
* the underlying {@link MultiLayerPerceptron}.
* @param spec
* the port object spec
*/
protected void addOutputLayer(final NeuralNetwork nnModel, final MultiLayerPerceptron mlp, final PMMLPortObjectSpec spec) {
int lastlayer = mlp.getNrLayers() - 1;
String targetCol = spec.getTargetFields().iterator().next();
Layer outputlayer = mlp.getLayer(lastlayer);
Perceptron[] outputperceptrons = outputlayer.getPerceptrons();
HashMap<DataCell, Integer> outputmap = mlp.getClassMapping();
NeuralOutputs neuralOuts = nnModel.addNewNeuralOutputs();
neuralOuts.setNumberOfOutputs(BigInteger.valueOf(outputperceptrons.length));
for (int i = 0; i < outputperceptrons.length; i++) {
NeuralOutput neuralOutput = neuralOuts.addNewNeuralOutput();
neuralOutput.setOutputNeuron(lastlayer + "," + i);
// search corresponding output value
String colname = "";
for (Entry<DataCell, Integer> e : outputmap.entrySet()) {
if (e.getValue().equals(i)) {
colname = ((StringValue) e.getKey()).getStringValue();
}
}
DerivedField df = neuralOutput.addNewDerivedField();
df.setOptype(OPTYPE.CATEGORICAL);
df.setDataType(DATATYPE.STRING);
if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
df.setOptype(OPTYPE.CATEGORICAL);
df.setDataType(DATATYPE.STRING);
} else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
df.setOptype(OPTYPE.CONTINUOUS);
df.setDataType(DATATYPE.DOUBLE);
}
if (mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
NormDiscrete normDiscrete = df.addNewNormDiscrete();
normDiscrete.setField(targetCol);
normDiscrete.setValue(colname);
} else if (mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
FieldRef fieldRef = df.addNewFieldRef();
fieldRef.setField(targetCol);
}
}
}
use of org.knime.base.data.neural.Perceptron in project knime-core by knime.
the class RProp method train.
/**
* Train the neural network once.
*
* @param nn neural net to train
* @param samples the samples
* @param outputs the desired outputs for these samples
*/
public void train(final MultiLayerPerceptron nn, final Double[][] samples, final Double[][] outputs) {
m_nn = nn;
m_samples = samples;
m_outputs = outputs;
init();
double sum = 0.0;
double y = 0.0;
double errDer;
double thrErrDer;
double oldErrDer;
double oldThrErrDer;
/*
* For all samples
*/
for (int s = 0; s < samples.length; s++) {
Double[][] sample = new Double[2][samples[0].length];
sample[0] = m_samples[s];
sample[1] = m_outputs[s];
/*
* Forward wave
*/
for (int i = 0; i < m_output.length; i++) {
for (int j = 0; j < m_output[i].length; j++) {
if (i == 0) {
// input neuron
m_output[i][j] = sample[0][j];
} else {
// non-input neuron
Perceptron p = nn.getLayer(i).getPerceptron(j);
sum = 0.0;
for (int k = 0; k < m_output[i - 1].length; k++) {
sum += m_output[i - 1][k] * p.getWeight(k);
}
m_output[i][j] = p.activationFunction(sum - p.getThreshold());
}
}
}
/*
* Backward wave
*/
for (int i = m_delta.length - 1; i >= 0; i--) {
for (int j = 0; j < m_delta[i].length; j++) {
y = m_output[i][j];
if (i == m_delta.length - 1) {
// output neuron
m_delta[i][j] = (sample[1][j] - y) * y * (1 - y);
} else {
// non-output neuron
sum = 0.0;
for (int k = 0; k < m_delta[i + 1].length; k++) {
sum += m_delta[i + 1][k] * nn.getLayer(i + 1).getPerceptron(k).getWeight(j);
}
m_delta[i][j] = y * (1 - y) * sum;
}
}
}
// Now compute error derivations
for (int i = 0; i < m_errDers.length; i++) {
for (int j = 0; j < m_errDers[i].length; j++) {
for (int k = 0; k < m_errDers[i][j].length; k++) {
m_errDers[i][j][k] += m_output[i][k] * -m_delta[i + 1][j];
}
}
}
for (int i = 0; i < m_thrErrDers.length; i++) {
for (int j = 0; j < m_thrErrDers[i].length; j++) {
m_thrErrDers[i][j] += m_delta[i + 1][j];
}
}
}
// STEP 2: for all weights set delta_w
for (int i = 1; i < nn.getLayers().length; i++) {
for (int j = 0; j < nn.getLayer(i).getPerceptrons().length; j++) {
for (int k = 0; k < nn.getLayer(i - 1).getPerceptrons().length; k++) {
// Compute error derivation
errDer = m_errDers[i - 1][j][k];
// Also get old error derivation
oldErrDer = m_oldErrDers[i - 1][j][k];
if ((errDer * oldErrDer) > 0.0) {
m_etaIJ[i - 1][j][k] = Math.min(m_etaIJ[i - 1][j][k] * getEtaPlus(), DELTA_MAX);
double deltaW = -sgn(errDer) * m_etaIJ[i - 1][j][k];
nn.getLayer(i).getPerceptron(j).setWeight(k, nn.getLayer(i).getPerceptron(j).getWeight(k) + deltaW);
m_oldErrDers[i - 1][j][k] = errDer;
} else if ((errDer * oldErrDer) < 0.0) {
m_etaIJ[i - 1][j][k] = Math.max(m_etaIJ[i - 1][j][k] * getEtaMinus(), DELTA_MIN);
m_oldErrDers[i - 1][j][k] = 0;
} else if ((errDer * oldErrDer) == 0) {
double deltaW = -sgn(errDer) * m_etaIJ[i - 1][j][k];
nn.getLayer(i).getPerceptron(j).setWeight(k, nn.getLayer(i).getPerceptron(j).getWeight(k) + deltaW);
m_oldErrDers[i - 1][j][k] = errDer;
}
}
}
}
// Thresholds
for (int i = 1; i < nn.getLayers().length; i++) {
for (int j = 0; j < nn.getLayer(i).getPerceptrons().length; j++) {
// Compute error derivation
thrErrDer = m_thrErrDers[i - 1][j];
// Also get old error derivation
oldThrErrDer = m_oldThrErrDers[i - 1][j];
if ((thrErrDer * oldThrErrDer) > 0.0) {
m_thrEtaIJ[i - 1][j] = Math.min(m_thrEtaIJ[i - 1][j] * getEtaPlus(), DELTA_MAX);
double deltaThr = -sgn(thrErrDer) * m_thrEtaIJ[i - 1][j];
nn.getLayer(i).getPerceptron(j).setThreshold(nn.getLayer(i).getPerceptron(j).getThreshold() + deltaThr);
m_oldThrErrDers[i - 1][j] = thrErrDer;
} else if ((thrErrDer * oldThrErrDer) < 0.0) {
m_thrEtaIJ[i - 1][j] = Math.max(m_thrEtaIJ[i - 1][j] * getEtaMinus(), DELTA_MIN);
m_oldThrErrDers[i - 1][j] = 0;
} else if ((thrErrDer * oldThrErrDer) == 0.0) {
double deltaThr = -sgn(thrErrDer) * m_thrEtaIJ[i - 1][j];
nn.getLayer(i).getPerceptron(j).setThreshold(nn.getLayer(i).getPerceptron(j).getThreshold() + deltaThr);
m_oldThrErrDers[i - 1][j] = thrErrDer;
}
}
}
}
use of org.knime.base.data.neural.Perceptron in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method initiateHiddenLayers.
/**
* @param nnModel the PMML neural network model
*/
private void initiateHiddenLayers(final NeuralNetwork nnModel) {
for (int i = 0; i < nnModel.getNeuralLayerArray().length - 1; i++) {
NeuralLayer hiddenLayer = nnModel.getNeuralLayerArray(i);
m_counter = 0;
m_idPosMap = new HashMap<String, Integer>();
m_curLayer++;
m_curPerceptrons = new Vector<Perceptron>();
for (Neuron neuron : hiddenLayer.getNeuronArray()) {
m_weights = new double[m_predPerceptrons.length];
m_curPercpetronID = neuron.getId();
m_curThreshold = -1 * neuron.getBias();
for (Con con : neuron.getConArray()) {
String fromID = con.getFrom();
double weight = con.getWeight();
int pos = m_predidPosMap.get(fromID);
m_weights[pos] = weight;
}
Perceptron p = new SigmoidPerceptron(m_weights, m_predPerceptrons);
p.setThreshold(m_curThreshold);
m_curPerceptrons.add(p);
m_idPosMap.put(m_curPercpetronID, m_counter);
m_counter++;
}
Perceptron[] curPerceptrons = new Perceptron[m_curPerceptrons.size()];
curPerceptrons = m_curPerceptrons.toArray(curPerceptrons);
m_allLayers.add(m_curLayer, new HiddenLayer(m_predLayer, curPerceptrons));
m_predLayer = m_allLayers.get(m_curLayer);
m_predPerceptrons = curPerceptrons;
m_predidPosMap = new HashMap<String, Integer>(m_idPosMap);
}
}
Aggregations