use of org.dmg.pmml.NeuralNetworkDocument.NeuralNetwork in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method initializeFrom.
/**
* {@inheritDoc}
*/
@Override
public void initializeFrom(final PMMLDocument pmmlDoc) {
m_nameMapper = new DerivedFieldMapper(pmmlDoc);
NeuralNetwork[] models = pmmlDoc.getPMML().getNeuralNetworkArray();
if (models.length == 0) {
throw new IllegalArgumentException("No neural network model" + " provided.");
} else if (models.length > 1) {
LOGGER.warn("Multiple neural network models found. " + "Only the first model is considered.");
}
NeuralNetwork nnModel = models[0];
// ------------------------------
// initiate Neural Input
initInputLayer(nnModel);
// -------------------------------
// initiate Hidden Layer
initiateHiddenLayers(nnModel);
// -------------------------------
// initiate Final Layer
initiateFinalLayer(nnModel);
// --------------------------------
// initiate Neural Outputs
initiateNeuralOutputs(nnModel);
// --------------------------------
// initiate Neural Network properties
ACTIVATIONFUNCTION.Enum actFunc = nnModel.getActivationFunction();
NNNORMALIZATIONMETHOD.Enum normMethod = nnModel.getNormalizationMethod();
if (ACTIVATIONFUNCTION.LOGISTIC != actFunc) {
LOGGER.error("Only logistic activation function is " + "supported in KNIME MLP.");
}
if (NNNORMALIZATIONMETHOD.NONE != normMethod) {
LOGGER.error("No normalization method is " + "supported in KNIME MLP.");
}
MININGFUNCTION.Enum functionName = nnModel.getFunctionName();
if (MININGFUNCTION.CLASSIFICATION == functionName) {
m_mlpMethod = MultiLayerPerceptron.CLASSIFICATION_MODE;
} else if (MININGFUNCTION.REGRESSION == functionName) {
m_mlpMethod = MultiLayerPerceptron.REGRESSION_MODE;
}
if (m_allLayers.size() < 3) {
throw new IllegalArgumentException("Only neural networks with 3 Layers supported in KNIME MLP.");
}
Layer[] allLayers = new Layer[m_allLayers.size()];
allLayers = m_allLayers.toArray(allLayers);
m_mlp = new MultiLayerPerceptron(allLayers);
Architecture myarch = new Architecture(allLayers[0].getPerceptrons().length, allLayers.length - 2, allLayers[1].getPerceptrons().length, allLayers[allLayers.length - 1].getPerceptrons().length);
m_mlp.setArchitecture(myarch);
m_mlp.setClassMapping(m_classmap);
m_mlp.setInputMapping(m_inputmap);
m_mlp.setMode(m_mlpMethod);
}
use of org.dmg.pmml.NeuralNetworkDocument.NeuralNetwork in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method exportTo.
/**
* {@inheritDoc}
*/
@Override
public SchemaType exportTo(final PMMLDocument pmmlDoc, final PMMLPortObjectSpec spec) {
m_nameMapper = new DerivedFieldMapper(pmmlDoc);
NeuralNetwork nnModel = pmmlDoc.getPMML().addNewNeuralNetwork();
PMMLMiningSchemaTranslator.writeMiningSchema(spec, nnModel);
if (m_mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
nnModel.setFunctionName(MININGFUNCTION.CLASSIFICATION);
} else if (m_mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
nnModel.setFunctionName(MININGFUNCTION.REGRESSION);
}
nnModel.setAlgorithmName("RProp");
nnModel.setActivationFunction(ACTIVATIONFUNCTION.LOGISTIC);
nnModel.setNormalizationMethod(NNNORMALIZATIONMETHOD.NONE);
nnModel.setWidth(0.0);
nnModel.setNumberOfLayers(BigInteger.valueOf(m_mlp.getNrLayers() - 1));
// add input layer
addInputLayer(nnModel, m_mlp);
// add hidden & final layers
for (int i = 1; i < m_mlp.getNrLayers(); i++) {
addLayer(nnModel, m_mlp, i);
}
// add output layer
addOutputLayer(nnModel, m_mlp, spec);
return NeuralNetwork.type;
}
use of org.dmg.pmml.NeuralNetworkDocument.NeuralNetwork in project knime-core by knime.
the class PMMLModelWrapper method getSegmentContent.
/**
* Returns the content of a segment as a model wrapper.
* @param s The segment
* @return Returns a wrapper around the model
*/
public static PMMLModelWrapper getSegmentContent(final Segment s) {
TreeModel treemodel = s.getTreeModel();
if (treemodel != null) {
return new PMMLTreeModelWrapper(treemodel);
}
RegressionModel regrmodel = s.getRegressionModel();
if (regrmodel != null) {
return new PMMLRegressionModelWrapper(regrmodel);
}
GeneralRegressionModel genregrmodel = s.getGeneralRegressionModel();
if (genregrmodel != null) {
return new PMMLGeneralRegressionModelWrapper(genregrmodel);
}
ClusteringModel clustmodel = s.getClusteringModel();
if (clustmodel != null) {
return new PMMLClusteringModelWrapper(clustmodel);
}
NaiveBayesModel nbmodel = s.getNaiveBayesModel();
if (nbmodel != null) {
return new PMMLNaiveBayesModelWrapper(nbmodel);
}
NeuralNetwork nn = s.getNeuralNetwork();
if (nn != null) {
return new PMMLNeuralNetworkWrapper(nn);
}
RuleSetModel rsmodel = s.getRuleSetModel();
if (rsmodel != null) {
return new PMMLRuleSetModelWrapper(rsmodel);
}
SupportVectorMachineModel svmmodel = s.getSupportVectorMachineModel();
if (svmmodel != null) {
return new PMMLSupportVectorMachineModelWrapper(svmmodel);
}
return null;
}
use of org.dmg.pmml.NeuralNetworkDocument.NeuralNetwork in project knime-core by knime.
the class PMMLPortObject method moveGlobalTransformationsToModel.
/**
* Moves the content of the transformation dictionary to local
* transformations of the model if a model exists.
*/
public void moveGlobalTransformationsToModel() {
PMML pmml = m_pmmlDoc.getPMML();
TransformationDictionary transDict = pmml.getTransformationDictionary();
if (transDict == null || transDict.getDerivedFieldArray() == null || transDict.getDerivedFieldArray().length == 0) {
// nothing to be moved
return;
}
DerivedField[] globalDerivedFields = transDict.getDerivedFieldArray();
LocalTransformations localTrans = null;
if (pmml.getTreeModelArray().length > 0) {
TreeModel model = pmml.getTreeModelArray(0);
localTrans = model.getLocalTransformations();
if (localTrans == null) {
localTrans = model.addNewLocalTransformations();
}
} else if (pmml.getClusteringModelArray().length > 0) {
ClusteringModel model = pmml.getClusteringModelArray(0);
localTrans = model.getLocalTransformations();
if (localTrans == null) {
localTrans = model.addNewLocalTransformations();
}
} else if (pmml.getNeuralNetworkArray().length > 0) {
NeuralNetwork model = pmml.getNeuralNetworkArray(0);
localTrans = model.getLocalTransformations();
if (localTrans == null) {
localTrans = model.addNewLocalTransformations();
}
} else if (pmml.getSupportVectorMachineModelArray().length > 0) {
SupportVectorMachineModel model = pmml.getSupportVectorMachineModelArray(0);
localTrans = model.getLocalTransformations();
if (localTrans == null) {
localTrans = model.addNewLocalTransformations();
}
} else if (pmml.getRegressionModelArray().length > 0) {
RegressionModel model = pmml.getRegressionModelArray(0);
localTrans = model.getLocalTransformations();
if (localTrans == null) {
localTrans = model.addNewLocalTransformations();
}
} else if (pmml.getGeneralRegressionModelArray().length > 0) {
GeneralRegressionModel model = pmml.getGeneralRegressionModelArray(0);
localTrans = model.getLocalTransformations();
if (localTrans == null) {
localTrans = model.addNewLocalTransformations();
}
} else if (pmml.sizeOfRuleSetModelArray() > 0) {
RuleSetModel model = pmml.getRuleSetModelArray(0);
localTrans = model.getLocalTransformations();
if (localTrans == null) {
localTrans = model.addNewLocalTransformations();
}
}
if (localTrans != null) {
DerivedField[] derivedFields = appendDerivedFields(localTrans.getDerivedFieldArray(), globalDerivedFields);
localTrans.setDerivedFieldArray(derivedFields);
// remove derived fields from TransformationDictionary
transDict.setDerivedFieldArray(new DerivedField[0]);
}
// else do nothing as no model exists yet
}
use of org.dmg.pmml.NeuralNetworkDocument.NeuralNetwork in project knime-core by knime.
the class PMMLNeuralNetworkTranslator method exportTo.
/**
* {@inheritDoc}
*/
@Override
public SchemaType exportTo(final PMMLDocument pmmlDoc, final PMMLPortObjectSpec spec) {
m_nameMapper = new DerivedFieldMapper(pmmlDoc);
NeuralNetwork nnModel = pmmlDoc.getPMML().addNewNeuralNetwork();
PMMLMiningSchemaTranslator.writeMiningSchema(spec, nnModel);
if (m_mlp.getMode() == MultiLayerPerceptron.CLASSIFICATION_MODE) {
nnModel.setFunctionName(MININGFUNCTION.CLASSIFICATION);
} else if (m_mlp.getMode() == MultiLayerPerceptron.REGRESSION_MODE) {
nnModel.setFunctionName(MININGFUNCTION.REGRESSION);
}
nnModel.setAlgorithmName("RProp");
nnModel.setActivationFunction(ACTIVATIONFUNCTION.LOGISTIC);
nnModel.setNormalizationMethod(NNNORMALIZATIONMETHOD.NONE);
nnModel.setWidth(0.0);
nnModel.setNumberOfLayers(BigInteger.valueOf(m_mlp.getNrLayers() - 1));
// add input layer
addInputLayer(nnModel, m_mlp);
// add hidden & final layers
for (int i = 1; i < m_mlp.getNrLayers(); i++) {
addLayer(nnModel, m_mlp, i);
}
// add output layer
addOutputLayer(nnModel, m_mlp, spec);
return NeuralNetwork.type;
}
Aggregations