use of org.knime.core.node.ModelContentRO in project knime-core by knime.
the class SotaPortObjectSpec method load.
/**
* {@inheritDoc}
*/
@Override
protected void load(final ModelContentRO model) throws InvalidSettingsException {
ModelContentRO subContent = model.getModelContent(SotaPortObject.CFG_KEY_SPEC);
m_spec = DataTableSpec.load(subContent);
m_indexOfClassCol = model.getInt(SotaPortObject.CFG_KEY_CLASSCOL_INDEX);
}
use of org.knime.core.node.ModelContentRO in project knime-core by knime.
the class NaiveBayesLearnerNodeModel2 method loadInternals.
/**
* {@inheritDoc}
*/
@Override
protected void loadInternals(final File nodeInternDir, final ExecutionMonitor exec) throws IOException {
final File modelFile = new File(nodeInternDir, CFG_DATA);
final FileInputStream modelIn = new FileInputStream(modelFile);
// because the loadFromXML method returns the content of the root tag
// we don't need to ask for the content of the root tag
final ModelContentRO myModel = ModelContent.loadFromXML(modelIn);
try {
m_model = new NaiveBayesModel(myModel);
} catch (final Exception e) {
throw new IOException(e.getMessage());
}
}
use of org.knime.core.node.ModelContentRO in project knime-core by knime.
the class BasisFunctionLearnerNodeView method modelChanged.
/**
* Called when the model changed.
*
* {@inheritDoc}
*/
@Override
public void modelChanged() {
BasisFunctionLearnerNodeModel model = getNodeModel();
ModelContentRO pp = model.getModelInfo();
if (pp == null) {
m_content.setText("");
} else {
StringBuilder buf = new StringBuilder();
buf.append("<html>\n");
buf.append("<body>\n");
buf.append("<h2>Learner Statistics</h2>");
getNextValue(pp, buf);
buf.append("</body>\n");
buf.append("</html>\n");
m_content.setText(buf.toString());
}
}
use of org.knime.core.node.ModelContentRO in project knime-core by knime.
the class CAIMDiscretizationNodeModel method loadInternals.
/**
* {@inheritDoc}
*/
@Override
protected void loadInternals(final File nodeInternDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
File internalsFile = new File(nodeInternDir, SAVE_INTERNALS_FILE_NAME);
if (!internalsFile.exists()) {
// file to load internals from not available
throw new IOException("Internal model could not be loaded, file \"" + internalsFile.getAbsoluteFile() + "\" does not exist.");
}
BufferedInputStream in = new BufferedInputStream(new GZIPInputStream(new FileInputStream(internalsFile)));
ModelContentRO binModel = ModelContent.loadFromXML(in);
try {
// first load the table spec of included names
ModelContentRO sub = binModel.getModelContent(CONFIG_KEY_COLUMN_NANES);
DataTableSpec inclCols = DataTableSpec.load(sub);
// now load/create the model
m_discretizationModel = new DiscretizationModel(binModel, inclCols);
} catch (InvalidSettingsException ise) {
throw new IOException("Internal model could not be loaded.", ise);
}
}
use of org.knime.core.node.ModelContentRO in project knime-core by knime.
the class MultiLayerPerceptron method loadPredictorParams.
/**
* @param predParams the ConfigObject containing the model of the mlp
* @return a new MultiLayerPerceptron based on the config
* @throws InvalidSettingsException if settings are incorrect
*/
public static MultiLayerPerceptron loadPredictorParams(final ModelContentRO predParams) throws InvalidSettingsException {
MultiLayerPerceptron mlp;
Layer predecessorLayer = null;
Layer actLayer;
ModelContentRO alllayers = predParams.getModelContent(ALLLAYERS_KEY);
Layer[] allLayers = new Layer[alllayers.keySet().size()];
HashMap<DataCell, Integer> myclassmap = new HashMap<DataCell, Integer>();
HashMap<String, Integer> myinputmap = new HashMap<String, Integer>();
int l = 0;
for (String layerKey : alllayers.keySet()) {
ModelContentRO neuronsconf = alllayers.getModelContent(layerKey);
if (l == 0) {
// Input Layer
InputPerceptron[] inputs = new InputPerceptron[neuronsconf.keySet().size()];
int n = 0;
for (String neuron : neuronsconf.keySet()) {
ModelContentRO inpneurconf = neuronsconf.getModelContent(neuron);
inputs[n] = new InputPerceptron(inpneurconf.getDouble(INPUT_KEY));
if (inpneurconf.containsKey(CLASSVALUE_KEY)) {
inputs[n].setClassValue(inpneurconf.getString(CLASSVALUE_KEY));
myinputmap.put(inpneurconf.getString(CLASSVALUE_KEY), n);
}
n++;
}
actLayer = new InputLayer(inputs);
allLayers[l] = actLayer;
predecessorLayer = actLayer;
} else {
Perceptron[] neuronodes = new Perceptron[neuronsconf.keySet().size()];
int n = 0;
for (String neuron : neuronsconf.keySet()) {
ModelContentRO neurconf = neuronsconf.getModelContent(neuron);
// TODO: save neuron type in config, create new neuron
// accordingly?
neuronodes[n] = new SigmoidPerceptron(neurconf.getDoubleArray(WEIGHT_KEY), predecessorLayer.getPerceptrons());
neuronodes[n].setThreshold(neurconf.getDouble(THRESHOLD_KEY));
if (neurconf.containsKey(CLASSVALUE_KEY)) {
neuronodes[n].setClassValue(neurconf.getString(CLASSVALUE_KEY));
myclassmap.put(new StringCell(neurconf.getString(CLASSVALUE_KEY)), n);
}
n++;
}
actLayer = new HiddenLayer(predecessorLayer, neuronodes);
allLayers[l] = actLayer;
predecessorLayer = actLayer;
}
l++;
}
int mode = predParams.getInt(MODE_KEY);
mlp = new MultiLayerPerceptron(allLayers);
Architecture myarch = new Architecture(allLayers[0].getPerceptrons().length, allLayers.length - 1, allLayers[allLayers.length - 2].getPerceptrons().length, allLayers[allLayers.length - 1].getPerceptrons().length);
mlp.setArchitecture(myarch);
mlp.setClassMapping(myclassmap);
mlp.setInputMapping(myinputmap);
mlp.setMode(mode);
return mlp;
}
Aggregations