use of org.knime.base.node.util.DefaultDataArray in project knime-core by knime.
the class DefaultVisualizationNodeModel method execute.
/**
* Converts the input data at inport 0 into a
* {@link org.knime.base.node.util.DataArray} with maximum number of rows as
* defined in the {@link DefaultVisualizationNodeDialog}. Thereby nominal
* columns are irgnored whose possible values are null or more than 60.
*
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
// generate list of excluded columns, suppressing warning
findCompatibleColumns(inData[0].getDataTableSpec(), false);
DataTable filter = new FilterColumnTable(inData[0], false, getExcludedColumns());
m_input = new DefaultDataArray(filter, 1, m_maxRows.getIntValue(), exec);
if (m_maxRows.getIntValue() < inData[0].size()) {
setWarningMessage("Only the first " + m_maxRows.getIntValue() + " rows are displayed.");
}
return new BufferedDataTable[0];
}
use of org.knime.base.node.util.DefaultDataArray in project knime-core by knime.
the class DefaultVisualizationNodeModel method loadInternals.
/**
* Loads the converted {@link org.knime.base.node.util.DataArray}.
*
* {@inheritDoc}
*/
@Override
protected void loadInternals(final File nodeInternDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
File f = new File(nodeInternDir, FILE_NAME);
ContainerTable table = DataContainer.readFromZip(f);
m_input = new DefaultDataArray(table, 1, m_maxRows.getIntValue(), exec);
}
use of org.knime.base.node.util.DefaultDataArray in project knime-core by knime.
the class LiftChartNodeModel method loadInternals.
/**
* {@inheritDoc}
*/
@Override
protected void loadInternals(final File nodeInternDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
File dataFile1 = new File(nodeInternDir, DATA_FILE + "1");
File dataFile2 = new File(nodeInternDir, DATA_FILE + "2");
ContainerTable dataCont1 = DataContainer.readFromZip(dataFile1);
ContainerTable dataCont2 = DataContainer.readFromZip(dataFile2);
m_dataArray = new DataArray[2];
m_dataArray[0] = new DefaultDataArray(dataCont1, 1, (int) dataCont1.size(), exec.createSubProgress(0.5));
m_dataArray[1] = new DefaultDataArray(dataCont2, 1, (int) dataCont2.size(), exec.createSubProgress(0.5));
}
use of org.knime.base.node.util.DefaultDataArray in project knime-core by knime.
the class LinReg2LearnerNodeModel method loadInternals.
/**
* {@inheritDoc}
*/
@Override
protected void loadInternals(final File internDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
File inFile = new File(internDir, FILE_SAVE);
ModelContentRO c = ModelContent.loadFromXML(new BufferedInputStream(new GZIPInputStream(new FileInputStream(inFile))));
try {
ModelContentRO specContent = c.getModelContent(CFG_SPEC);
DataTableSpec spec = DataTableSpec.load(specContent);
ModelContentRO parContent = c.getModelContent(CFG_LinReg2_CONTENT);
m_content = LinearRegressionContent.load(parContent, spec);
} catch (InvalidSettingsException ise) {
IOException ioe = new IOException("Unable to restore state: " + ise.getMessage());
ioe.initCause(ise);
throw ioe;
}
File dataFile = new File(internDir, FILE_DATA);
ContainerTable t = DataContainer.readFromZip(dataFile);
int rowCount = t.getRowCount();
m_rowContainer = new DefaultDataArray(t, 1, rowCount, exec);
}
use of org.knime.base.node.util.DefaultDataArray in project knime-core by knime.
the class MDSNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
int rowsToUse = m_rowsModel.getIntValue();
if (m_useRowsModel.getBooleanValue()) {
rowsToUse = inData[0].getRowCount();
}
// Warn if number of rows is greater than chosen number of rows
if (inData[0].getRowCount() > rowsToUse) {
setWarningMessage("Maximal number of rows to report is less than number of rows in input data table !");
}
final DataTableSpec inSpec = inData[0].getSpec();
final ColumnRearranger colFilter = new ColumnRearranger(inSpec);
if (m_includeList != null) {
colFilter.keepOnly(m_includeList.toArray(new String[m_includeList.size()]));
}
BufferedDataTable rowCutDataTable = exec.createColumnRearrangeTable(inData[0], colFilter, exec.createSilentSubProgress(0.0));
// use only specified rows
DataTable dataContainer = new DefaultDataArray(rowCutDataTable, 1, rowsToUse);
// save BufferedDataTable with rows to use and ALL columns to generate
// the output data table out of it.
rowCutDataTable = exec.createBufferedDataTable(dataContainer, exec);
// create MDS manager, init and train stuff
m_manager = new MDSManager(m_outputDimModel.getIntValue(), m_distModel.getStringValue(), m_fuzzy, rowCutDataTable, exec);
m_manager.init(m_seedModel.getIntValue());
m_manager.train(m_epochsModel.getIntValue(), m_learnrateModel.getDoubleValue());
// create BufferedDataTable out of mapped data.
ColumnRearranger rearranger = createColumnRearranger(inSpec, new MDSCellFactory(m_manager.getDataPoints(), m_manager.getDimension()));
return new BufferedDataTable[] { exec.createColumnRearrangeTable(inData[0], rearranger, exec.createSubProgress(0.1)) };
}
Aggregations