use of org.knime.core.data.def.DefaultRow in project knime-core by knime.
the class OutputRow method createDataRow.
/**
* Create a {@link DataRow} that stores information of a right outer join.
*
* @param index The index of this row.
* @param leftIndex The index of the left row.
* @param rightIndex The index of the right row.
* @param settings The common settings object.
* @return New instance of {@link DataRow} populated with the given
* information.
*/
static DataRow createDataRow(final long index, final long leftIndex, final long rightIndex, final OutputRow.Settings settings) {
int[] survivors = settings.getSurvivors();
DataCell[] cells = new DataCell[survivors.length + 3];
int c = 0;
for (int i = 0; i < survivors.length; i++) {
cells[c] = DataType.getMissingCell();
c++;
}
cells[c] = DataType.getMissingCell();
c++;
cells[c] = new LongCell(leftIndex);
c++;
cells[c] = new LongCell(rightIndex);
RowKey rowID = new RowKey(Long.toString(index));
return new DefaultRow(rowID, cells);
}
use of org.knime.core.data.def.DefaultRow in project knime-core by knime.
the class UngroupOperation method compute.
/**
* Performs the ungroup operation on the given row input and pushes the result to the row output.
*
* @param in the row input, will NOT be closed when finished
* @param out the row input, will NOT be closed when finished
* @param exec the execution context to check cancellation and (optional) progress logging
* @param rowCount row count to track the progress or <code>-1</code> without progress tracking
* @throws Exception the thrown exception
* @since 3.2
*/
public void compute(final RowInput in, final RowOutput out, final ExecutionContext exec, final long rowCount) throws Exception {
final Map<RowKey, Set<RowKey>> hiliteMapping = new HashMap<RowKey, Set<RowKey>>();
@SuppressWarnings("unchecked") Iterator<DataCell>[] iterators = new Iterator[m_colIndices.length];
final DataCell[] missingCells = new DataCell[m_colIndices.length];
Arrays.fill(missingCells, DataType.getMissingCell());
long rowCounter = 0;
DataRow row = null;
while ((row = in.poll()) != null) {
rowCounter++;
exec.checkCanceled();
if (rowCount > 0) {
exec.setProgress(rowCounter / (double) rowCount, "Processing row " + rowCounter + " of " + rowCount);
}
boolean allMissing = true;
for (int i = 0, length = m_colIndices.length; i < length; i++) {
final DataCell cell = row.getCell(m_colIndices[i]);
final CollectionDataValue listCell;
final Iterator<DataCell> iterator;
if (cell instanceof CollectionDataValue) {
listCell = (CollectionDataValue) cell;
iterator = listCell.iterator();
allMissing = false;
} else {
iterator = null;
}
iterators[i] = iterator;
}
if (allMissing) {
// with missing cells as well if the skip missing value option is disabled
if (!m_skipMissingValues) {
final DefaultRow newRow = createClone(row.getKey(), row, m_colIndices, m_removeCollectionCol, missingCells);
if (m_enableHilite) {
// create the hilite entry
final Set<RowKey> keys = new HashSet<RowKey>(1);
keys.add(row.getKey());
hiliteMapping.put(row.getKey(), keys);
}
out.push(newRow);
}
continue;
}
long counter = 1;
final Set<RowKey> keys;
if (m_enableHilite) {
keys = new HashSet<RowKey>();
} else {
keys = null;
}
boolean continueLoop = false;
boolean allEmpty = true;
do {
// reset the loop flag
allMissing = true;
continueLoop = false;
final DataCell[] newCells = new DataCell[iterators.length];
for (int i = 0, length = iterators.length; i < length; i++) {
Iterator<DataCell> iterator = iterators[i];
DataCell newCell;
if (iterator != null && iterator.hasNext()) {
allEmpty = false;
continueLoop = true;
newCell = iterator.next();
} else {
if (iterator == null) {
allEmpty = false;
}
newCell = DataType.getMissingCell();
}
if (!newCell.isMissing()) {
allMissing = false;
}
newCells[i] = newCell;
}
if (!allEmpty && !continueLoop) {
break;
}
if (!allEmpty && allMissing && m_skipMissingValues) {
continue;
}
final RowKey oldKey = row.getKey();
final RowKey newKey = new RowKey(oldKey.getString() + "_" + counter++);
final DefaultRow newRow = createClone(newKey, row, m_colIndices, m_removeCollectionCol, newCells);
out.push(newRow);
if (keys != null) {
keys.add(newKey);
}
} while (continueLoop);
if (keys != null && !keys.isEmpty()) {
hiliteMapping.put(row.getKey(), keys);
}
}
if (m_enableHilite) {
m_trans.setMapper(new DefaultHiLiteMapper(hiliteMapping));
}
}
use of org.knime.core.data.def.DefaultRow in project knime-core by knime.
the class Unpivot2NodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
DataTableSpec inSpec = inData[0].getSpec();
String[] retainedColumns = m_retainedColumns.applyTo(inSpec).getIncludes();
String[] valueColumns = m_valueColumns.applyTo(inSpec).getIncludes();
int[] valueColumnIndices = new int[valueColumns.length];
for (int i = 0; i < valueColumnIndices.length; i++) {
valueColumnIndices[i] = inSpec.findColumnIndex(valueColumns[i]);
}
int[] orderColumnIdx = new int[retainedColumns.length];
for (int i = 0; i < orderColumnIdx.length; i++) {
orderColumnIdx[i] = inSpec.findColumnIndex(retainedColumns[i]);
}
final double newRowCnt = inData[0].size() * valueColumns.length;
final boolean enableHilite = m_enableHilite.getBooleanValue();
LinkedHashMap<RowKey, Set<RowKey>> map = new LinkedHashMap<RowKey, Set<RowKey>>();
DataTableSpec outSpec = createOutSpec(inSpec);
BufferedDataContainer buf = exec.createDataContainer(outSpec);
final boolean skipMissings = m_missingValues.getBooleanValue();
for (DataRow row : inData[0]) {
LinkedHashSet<RowKey> set = new LinkedHashSet<RowKey>();
FilterColumnRow crow = new FilterColumnRow(row, orderColumnIdx);
for (int i = 0; i < valueColumns.length; i++) {
String colName = valueColumns[i];
DataCell acell = row.getCell(valueColumnIndices[i]);
if (acell.isMissing() && skipMissings) {
// skip rows containing missing cells (in Value column(s))
continue;
}
RowKey rowKey = RowKey.createRowKey(buf.size());
if (enableHilite) {
set.add(rowKey);
}
DefaultRow drow = new DefaultRow(rowKey, new StringCell(row.getKey().getString()), new StringCell(colName), acell);
buf.addRowToTable(new AppendedColumnRow(rowKey, drow, crow));
exec.checkCanceled();
exec.setProgress(buf.size() / newRowCnt);
}
if (enableHilite) {
map.put(crow.getKey(), set);
}
}
buf.close();
if (enableHilite) {
m_trans.setMapper(new DefaultHiLiteMapper(map));
} else {
m_trans.setMapper(null);
}
return new BufferedDataTable[] { buf.getTable() };
}
use of org.knime.core.data.def.DefaultRow in project knime-core by knime.
the class GlobalTimerinfoNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
BufferedDataTable result0 = NodeTimer.GLOBAL_TIMER.getGlobalStatsTable(exec);
BufferedDataContainer result1 = exec.createDataContainer(createSpecOut1());
int rowcount = 0;
for (IBundleGroupProvider provider : Platform.getBundleGroupProviders()) {
for (IBundleGroup feature : provider.getBundleGroups()) {
DataRow row = new DefaultRow(new RowKey("Row " + rowcount++), new StringCell(feature.getIdentifier()), new StringCell(feature.getVersion()));
result1.addRowToTable(row);
}
}
result1.close();
return new PortObject[] { result0, result1.getTable() };
}
use of org.knime.core.data.def.DefaultRow in project knime-core by knime.
the class LiftCalculator method calculateLiftTables.
/**
* Calculates the tables necessary for displaying a lift chart.
* @param table the data table
* @param exec the execution context to report progress to
* @return warning messages or null
* @throws CanceledExecutionException when the user cancels the execution
*/
public String calculateLiftTables(final BufferedDataTable table, final ExecutionContext exec) throws CanceledExecutionException {
int predColIndex = table.getDataTableSpec().findColumnIndex(m_responseColumn);
String warning = null;
List<String> inclList = new LinkedList<String>();
inclList.add(m_probabilityColumn);
int probColInd = table.getDataTableSpec().findColumnIndex(m_probabilityColumn);
boolean[] order = new boolean[] { false };
m_sorted = new SortedTable(table, inclList, order, exec);
long totalResponses = 0;
double partWidth = m_intervalWidth;
int nrParts = (int) Math.ceil(100.0 / partWidth);
List<Integer> positiveResponses = new LinkedList<Integer>();
int rowIndex = 0;
for (DataRow row : m_sorted) {
if (row.getCell(predColIndex).isMissing() || row.getCell(probColInd).isMissing()) {
if (row.getCell(predColIndex).isMissing()) {
// miss. values in class column we always ignore
continue;
}
if (m_ignoreMissingValues) {
continue;
} else {
warning = "Table contains missing values.";
}
}
String response = ((StringValue) row.getCell(predColIndex)).getStringValue().trim();
if (response.equalsIgnoreCase(m_responseLabel)) {
totalResponses++;
positiveResponses.add(rowIndex);
}
rowIndex++;
}
int[] counter = new int[nrParts];
int partWidthAbsolute = (int) Math.ceil(rowIndex / (double) nrParts);
double avgResponse = (double) positiveResponses.size() / rowIndex;
for (int rIndex : positiveResponses) {
int index = rIndex / partWidthAbsolute;
counter[index]++;
}
DataColumnSpec[] colSpec = new DataColumnSpec[3];
colSpec[0] = new DataColumnSpecCreator("Lift", DoubleCell.TYPE).createSpec();
colSpec[1] = new DataColumnSpecCreator("Baseline", DoubleCell.TYPE).createSpec();
colSpec[2] = new DataColumnSpecCreator("Cumulative Lift", DoubleCell.TYPE).createSpec();
DataTableSpec tableSpec = new DataTableSpec(colSpec);
// new DataContainer(tableSpec);
DataContainer cont = exec.createDataContainer(tableSpec);
colSpec = new DataColumnSpec[2];
colSpec[0] = new DataColumnSpecCreator("Actual", DoubleCell.TYPE).createSpec();
colSpec[1] = new DataColumnSpecCreator("Baseline", DoubleCell.TYPE).createSpec();
tableSpec = new DataTableSpec(colSpec);
// new DataContainer(tableSpec);
DataContainer responseCont = exec.createDataContainer(tableSpec);
long cumulativeCounter = 0;
responseCont.addRowToTable(new DefaultRow(new RowKey("0"), 0.0, 0.0));
for (int i = 0; i < counter.length; i++) {
cumulativeCounter += counter[i];
double responseRate = (double) counter[i] / partWidthAbsolute;
double lift = responseRate / avgResponse;
double cumResponseRate = (double) cumulativeCounter / totalResponses;
long number = partWidthAbsolute * (i + 1);
// well.. rounding problems
if (number > rowIndex) {
number = rowIndex;
}
double cumulativeLift = // (double)cumulativeCounter / (partWidthAbsolute * (i + 1));
(double) cumulativeCounter / number;
cumulativeLift /= avgResponse;
// cumulativeLift = lifts / (i+1);
double rowKey = ((i + 1) * partWidth);
if (rowKey > 100) {
rowKey = 100;
}
cont.addRowToTable(new DefaultRow(new RowKey("" + rowKey), lift, 1.0, cumulativeLift));
double cumBaseline = (i + 1) * partWidth;
if (cumBaseline > 100) {
cumBaseline = 100;
}
responseCont.addRowToTable(new DefaultRow(new RowKey("" + rowKey), cumResponseRate * 100, cumBaseline));
}
cont.close();
responseCont.close();
m_lift = (BufferedDataTable) cont.getTable();
m_response = (BufferedDataTable) responseCont.getTable();
return warning;
}
Aggregations