use of org.knime.base.node.util.DefaultDataArray in project knime-core by knime.
the class HistogramNodeModel method createHistogramModel.
/**
* {@inheritDoc}
*/
@Override
protected void createHistogramModel(final ExecutionContext exec, final int noOfRows, final BufferedDataTable data) throws CanceledExecutionException {
LOGGER.debug("Entering createHistogramModel(exec, dataTable) " + "of class HistogramNodeModel.");
if (noOfRows == 0) {
m_model = null;
return;
}
if (exec == null) {
throw new NullPointerException("exec must not be null");
}
if (data == null) {
throw new IllegalArgumentException("Table shouldn't be null");
}
ExecutionMonitor subExec = exec.createSubProgress(0.5);
exec.setMessage("Adding rows to histogram model...");
final DataArray dataArray = new DefaultDataArray(data, 1, noOfRows, subExec);
exec.setMessage("Adding row color to histogram...");
final SortedSet<Color> colorSet = new TreeSet<Color>(HSBColorComparator.getInstance());
subExec = exec.createSubProgress(0.5);
final double progressPerRow = 1.0 / noOfRows;
double progress = 0.0;
final CloseableRowIterator rowIterator = data.iterator();
try {
for (int i = 0; i < noOfRows && rowIterator.hasNext(); i++) {
final DataRow row = rowIterator.next();
final Color color = data.getDataTableSpec().getRowColor(row).getColor(false, false);
if (!colorSet.contains(color)) {
colorSet.add(color);
}
progress += progressPerRow;
subExec.setProgress(progress, "Adding data rows to histogram...");
subExec.checkCanceled();
}
} finally {
if (rowIterator != null) {
rowIterator.close();
}
}
exec.setProgress(1.0, "Histogram finished.");
m_model = new InteractiveHistogramDataModel(dataArray, new ArrayList<Color>(colorSet));
LOGGER.debug("Exiting createHistogramModel(exec, dataTable) " + "of class HistogramNodeModel.");
}
use of org.knime.base.node.util.DefaultDataArray in project knime-core by knime.
the class LiftChartNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
ConvenienceMethods.checkTableSize(inData[0]);
int predColIndex = inData[0].getDataTableSpec().findColumnIndex(m_responseColumn.getStringValue());
List<String> inclList = new LinkedList<String>();
inclList.add(m_probabilityColumn.getStringValue());
boolean[] order = new boolean[] { false };
SortedTable st = new SortedTable(inData[0], inclList, order, exec);
long totalResponses = 0;
double partWidth = Double.parseDouble(m_intervalWidth.getStringValue());
int nrParts = (int) Math.ceil(100.0 / partWidth);
List<Integer> positiveResponses = new LinkedList<Integer>();
int rowIndex = 0;
for (DataRow row : st) {
if (row.getCell(predColIndex).isMissing()) {
setWarningMessage("There are missing values." + " Please check your data.");
continue;
}
String response = ((StringValue) row.getCell(predColIndex)).getStringValue().trim();
if (response.equalsIgnoreCase(m_responseLabel.getStringValue())) {
totalResponses++;
positiveResponses.add(rowIndex);
}
rowIndex++;
}
int[] counter = new int[nrParts];
int partWidthAbsolute = (int) Math.ceil(rowIndex / (double) nrParts);
double avgResponse = (double) positiveResponses.size() / rowIndex;
for (int rIndex : positiveResponses) {
int index = rIndex / partWidthAbsolute;
counter[index]++;
}
DataColumnSpec[] colSpec = new DataColumnSpec[3];
colSpec[0] = new DataColumnSpecCreator("Lift", DoubleCell.TYPE).createSpec();
colSpec[1] = new DataColumnSpecCreator("Baseline", DoubleCell.TYPE).createSpec();
colSpec[2] = new DataColumnSpecCreator("Cumulative Lift", DoubleCell.TYPE).createSpec();
DataTableSpec tableSpec = new DataTableSpec(colSpec);
DataContainer cont = exec.createDataContainer(tableSpec);
colSpec = new DataColumnSpec[2];
colSpec[0] = new DataColumnSpecCreator("Actual", DoubleCell.TYPE).createSpec();
colSpec[1] = new DataColumnSpecCreator("Baseline", DoubleCell.TYPE).createSpec();
tableSpec = new DataTableSpec(colSpec);
DataContainer responseCont = exec.createDataContainer(tableSpec);
long cumulativeCounter = 0;
responseCont.addRowToTable(new DefaultRow(new RowKey("0"), 0.0, 0.0));
for (int i = 0; i < counter.length; i++) {
cumulativeCounter += counter[i];
double responseRate = (double) counter[i] / partWidthAbsolute;
double lift = responseRate / avgResponse;
double cumResponseRate = (double) cumulativeCounter / totalResponses;
long number = partWidthAbsolute * (i + 1);
// well.. rounding problems
if (number > rowIndex) {
number = rowIndex;
}
double cumulativeLift = // (double)cumulativeCounter / (partWidthAbsolute * (i + 1));
(double) cumulativeCounter / number;
cumulativeLift /= avgResponse;
// cumulativeLift = lifts / (i+1);
double rowKey = ((i + 1) * partWidth);
if (rowKey > 100) {
rowKey = 100;
}
cont.addRowToTable(new DefaultRow(new RowKey("" + rowKey), lift, 1.0, cumulativeLift));
double cumBaseline = (i + 1) * partWidth;
if (cumBaseline > 100) {
cumBaseline = 100;
}
responseCont.addRowToTable(new DefaultRow(new RowKey("" + rowKey), cumResponseRate * 100, cumBaseline));
}
cont.close();
responseCont.close();
m_dataArray[0] = new DefaultDataArray(cont.getTable(), 1, (int) cont.size());
m_dataArray[1] = new DefaultDataArray(responseCont.getTable(), 1, (int) responseCont.size());
return new BufferedDataTable[] { st.getBufferedDataTable() };
}
use of org.knime.base.node.util.DefaultDataArray in project knime-core by knime.
the class SotaNodeModel method loadInternals.
/**
* {@inheritDoc}
*/
@Override
protected void loadInternals(final File internDir, final ExecutionMonitor exec) throws IOException {
File file = new File(internDir, TREE_FILE);
FileInputStream fis = new FileInputStream(file);
ModelContentRO modelContent = ModelContent.loadFromXML(fis);
// Load settings
int inDataSize = 0;
int origDataSize = 0;
try {
m_sota.setUseHierarchicalFuzzyData(modelContent.getBoolean(SotaPortObject.CFG_KEY_USE_FUZZY_HIERARCHY));
m_sota.setMaxHierarchicalLevel(modelContent.getInt(SotaPortObject.CFG_KEY_MAX_FUZZY_LEVEL));
inDataSize = modelContent.getInt(SotaPortObject.CFG_KEY_INDATA_SIZE);
origDataSize = modelContent.getInt(SotaPortObject.CFG_KEY_ORIGDATA_SIZE);
} catch (InvalidSettingsException e1) {
IOException ioe = new IOException("Could not load settings," + "due to invalid settings in model content !");
ioe.initCause(e1);
fis.close();
throw ioe;
}
// Load in data
DataTable table = DataContainer.readFromZip(new File(internDir, IN_DATA_FILE));
final DataArray inData = new DefaultDataArray(table, 1, inDataSize);
m_sota.setInData(inData);
// Load orig data
table = DataContainer.readFromZip(new File(internDir, ORIG_DATA_FILE));
final DataArray origData = new DefaultDataArray(table, 1, origDataSize);
m_sota.setOriginalData(origData);
// Load tree
SotaTreeCell root = new SotaTreeCell(0, false);
try {
root.loadFrom(modelContent, 0, null, false);
} catch (InvalidSettingsException e) {
IOException ioe = new IOException("Could not load tree cells," + "due to invalid settings in model content !");
ioe.initCause(e);
fis.close();
throw ioe;
}
m_sota.setRoot(root);
fis.close();
}
use of org.knime.base.node.util.DefaultDataArray in project knime-core by knime.
the class LinReg2LearnerNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
final BufferedDataTable data = (BufferedDataTable) inObjects[0];
// cache the entire table as otherwise the color information
// may be lost (filtering out the "colored" column)
m_rowContainer = new DefaultDataArray(data, m_settings.getScatterPlotFirstRow(), m_settings.getScatterPlotRowCount());
DataTableSpec tableSpec = data.getDataTableSpec();
// handle the optional PMML input
PMMLPortObject inPMMLPort = m_pmmlInEnabled ? (PMMLPortObject) inObjects[1] : null;
PMMLPortObjectSpec inPMMLSpec = null;
if (inPMMLPort != null) {
inPMMLSpec = inPMMLPort.getSpec();
} else {
PMMLPortObjectSpecCreator creator = new PMMLPortObjectSpecCreator(tableSpec);
inPMMLSpec = creator.createSpec();
inPMMLPort = new PMMLPortObject(inPMMLSpec);
}
LinReg2Learner learner = new LinReg2Learner(new PortObjectSpec[] { tableSpec, inPMMLSpec }, m_pmmlInEnabled, m_settings);
m_content = learner.execute(new PortObject[] { data, inPMMLPort }, exec);
if (learner.getWarningMessage() != null && learner.getWarningMessage().length() > 0) {
setWarningMessage(learner.getWarningMessage());
}
// third argument is ignored since we provide a port
PMMLPortObject outPMMLPort = new PMMLPortObject((PMMLPortObjectSpec) learner.getOutputSpec()[0], inPMMLPort, null);
PMMLGeneralRegressionTranslator trans = new PMMLGeneralRegressionTranslator(m_content.createGeneralRegressionContent());
outPMMLPort.addModelTranslater(trans);
final String warningMessage = m_content.getWarningMessage();
if (warningMessage != null) {
setWarningMessage(getWarningMessage() == null ? warningMessage : (getWarningMessage() + "\n" + warningMessage));
}
return new PortObject[] { outPMMLPort, m_content.createTablePortObject(exec) };
}
use of org.knime.base.node.util.DefaultDataArray in project knime-core by knime.
the class PolyRegLearnerNodeModel method loadInternals.
/**
* {@inheritDoc}
*/
@Override
protected void loadInternals(final File nodeInternDir, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
File f = new File(nodeInternDir, "data.zip");
final DataArray rowContainer;
if (f.exists()) {
ContainerTable t = DataContainer.readFromZip(f);
int rowCount = t.getRowCount();
rowContainer = new DefaultDataArray(t, 1, rowCount, exec);
} else {
throw new FileNotFoundException("Internals do not exist");
}
f = new File(nodeInternDir, "internals.xml");
if (f.exists()) {
NodeSettingsRO internals = NodeSettings.loadFromXML(new BufferedInputStream(new FileInputStream(f)));
try {
double[] betas = internals.getDoubleArray("betas");
String[] columnNames = internals.getStringArray("columnNames");
double squaredError = internals.getDouble("squaredError");
double adjustedR2 = internals.getDouble("adjustedSquaredError", Double.NaN);
double[] meanValues = internals.getDoubleArray("meanValues");
double[] emptyArray = new double[betas.length];
Arrays.fill(emptyArray, Double.NaN);
double[] stdErrs = internals.getDoubleArray("stdErrors", emptyArray);
double[] tValues = internals.getDoubleArray("tValues", emptyArray);
double[] pValues = internals.getDoubleArray("pValues", emptyArray);
m_viewData = new PolyRegViewData(meanValues, betas, stdErrs, tValues, pValues, squaredError, adjustedR2, columnNames, m_settings.getDegree(), m_settings.getTargetColumn(), rowContainer);
} catch (InvalidSettingsException ex) {
throw new IOException("Old or corrupt internals", ex);
}
} else {
throw new FileNotFoundException("Internals do not exist");
}
}
Aggregations