use of org.knime.core.node.streamable.BufferedDataTableRowOutput in project knime-core by knime.
the class RowKeyNodeModel2 method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws CanceledExecutionException, Exception {
LOGGER.debug("Entering execute(inData, exec) of class RowKeyNodeModel");
// check input data
if (inData == null || inData.length != 1 || inData[DATA_IN_PORT] == null) {
throw new IllegalArgumentException("No input data available.");
}
final BufferedDataTable data = inData[DATA_IN_PORT];
BufferedDataTable outData = null;
if (m_replaceKey.getBooleanValue()) {
// create outspec
DataTableSpec outSpec = configure(data.getDataTableSpec(), true);
// create table
final BufferedDataContainer newContainer = exec.createDataContainer(outSpec, true);
RowInput rowInput = new DataTableRowInput(data);
RowOutput rowOutput = new BufferedDataTableRowOutput(newContainer);
replaceKey(rowInput, rowOutput, outSpec.getNumColumns(), data.getRowCount(), exec);
newContainer.close();
outData = newContainer.getTable();
} else if (m_appendRowKey.getBooleanValue()) {
LOGGER.debug("The user only wants to append a new column with " + "name " + m_newColumnName);
// the user wants only a column with the given name which
// contains the rowkey as value
final DataTableSpec tableSpec = data.getDataTableSpec();
final String newColumnName = m_newColumnName.getStringValue();
final ColumnRearranger c = RowKeyUtil2.createColumnRearranger(tableSpec, newColumnName, StringCell.TYPE);
outData = exec.createColumnRearrangeTable(data, c, exec);
exec.setMessage("New column created");
LOGGER.debug("Column appended successfully");
} else {
// the user doesn't want to do anything at all so we simply return
// the given data
outData = data;
LOGGER.debug("The user hasn't selected a new row ID column" + " and hasn't entered a new column name.");
}
LOGGER.debug("Exiting execute(inData, exec) of class RowKeyNodeModel.");
return new BufferedDataTable[] { outData };
}
use of org.knime.core.node.streamable.BufferedDataTableRowOutput in project knime-core by knime.
the class AppendedRowsNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] rawInData, final ExecutionContext exec) throws Exception {
// remove all null tables first (optional input data)
BufferedDataTable[] noNullArray = noNullArray(rawInData);
DataTableSpec[] noNullSpecs = new DataTableSpec[noNullArray.length];
for (int i = 0; i < noNullArray.length; i++) {
noNullSpecs[i] = noNullArray[i].getDataTableSpec();
}
// table can only be wrapped if a suffix is to be append or the node fails in case of duplicate row ID's
if (m_isAppendSuffix || m_isFailOnDuplicate) {
// just wrap the tables virtually instead of traversing it and copying the rows
// virtually create the concatenated table (no traverse necessary)
Optional<String> suffix = m_isAppendSuffix ? Optional.of(m_suffix) : Optional.empty();
BufferedDataTable concatTable = exec.createConcatenateTable(exec, suffix, m_isFailOnDuplicate, noNullArray);
if (m_isIntersection) {
// wrap the table and filter the non-intersecting columns
DataTableSpec actualOutSpec = getOutputSpec(noNullSpecs);
DataTableSpec currentOutSpec = concatTable.getDataTableSpec();
String[] intersectCols = getIntersection(actualOutSpec, currentOutSpec);
ColumnRearranger cr = new ColumnRearranger(currentOutSpec);
cr.keepOnly(intersectCols);
concatTable = exec.createColumnRearrangeTable(concatTable, cr, exec);
}
if (m_enableHiliting) {
AppendedRowsTable tmp = new AppendedRowsTable(DuplicatePolicy.Fail, null, noNullArray);
Map<RowKey, Set<RowKey>> map = createHiliteTranslationMap(createDuplicateMap(tmp, exec, m_suffix == null ? "" : m_suffix));
m_hiliteTranslator.setMapper(new DefaultHiLiteMapper(map));
}
return new BufferedDataTable[] { concatTable };
} else {
// traverse the table and copy the rows
long totalRowCount = 0L;
RowInput[] inputs = new RowInput[noNullArray.length];
for (int i = 0; i < noNullArray.length; i++) {
totalRowCount += noNullArray[i].size();
inputs[i] = new DataTableRowInput(noNullArray[i]);
}
DataTableSpec outputSpec = getOutputSpec(noNullSpecs);
BufferedDataTableRowOutput output = new BufferedDataTableRowOutput(exec.createDataContainer(outputSpec));
run(inputs, output, exec, totalRowCount);
return new BufferedDataTable[] { output.getDataTable() };
}
}
use of org.knime.core.node.streamable.BufferedDataTableRowOutput in project knime-core by knime.
the class ReadTableNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
exec.setMessage("Extracting temporary table");
ContainerTable table = extractTable(exec.createSubExecutionContext(0.4));
exec.setMessage("Reading into final format");
BufferedDataTableRowOutput c = new BufferedDataTableRowOutput(exec.createDataContainer(table.getDataTableSpec(), true));
execute(table, c, exec.createSubExecutionContext(0.6));
return new BufferedDataTable[] { c.getDataTable() };
}
use of org.knime.core.node.streamable.BufferedDataTableRowOutput in project knime-core by knime.
the class DBReaderImpl method loopTable.
/**
* @since 3.2
*/
@SuppressWarnings("resource")
@Override
public BufferedDataTableRowOutput loopTable(final ExecutionContext exec, final CredentialsProvider cp, final RowInput data, final long rowCount, final boolean failIfException, final boolean appendInputColumns, final boolean includeEmptyResults, final boolean retainAllColumns, final String... columns) throws Exception {
if (m_blobFactory == null) {
m_blobFactory = new BinaryObjectCellFactory();
}
final DatabaseQueryConnectionSettings dbConn = getQueryConnection();
return getQueryConnection().execute(cp, conn -> {
/* Get the selected timezone */
final TimeZone timezone = dbConn.getTimeZone();
/* Get the input table spec */
final DataTableSpec inSpec = data.getDataTableSpec();
/* Create PreparedStatement */
final String query = dbConn.getQuery();
LOGGER.debug("Executing SQL preparedStatement as execute: " + query);
/* Initialize the error table */
final UniqueNameGenerator errorGenerator = new UniqueNameGenerator(inSpec);
final DataColumnSpec errorColSpec = errorGenerator.newColumn(DEF_ERROR_COL_NAME, StringCell.TYPE);
final DataTableSpec errorSpec = new DataTableSpec(inSpec, new DataTableSpec(errorColSpec));
m_errorContainer = exec.createDataContainer(errorSpec);
DataTableSpec dbSpec = new DataTableSpec();
BufferedDataTableRowOutput output = null;
exec.setMessage("Start reading rows from database...");
try (final PreparedStatement stmt = conn.prepareStatement(query)) {
long inDataCounter = 1;
long rowIdCounter = 0;
DataRow row;
while ((row = data.poll()) != null) {
exec.checkCanceled();
if (rowCount > 0) {
exec.setProgress(1.0 * inDataCounter / rowCount, "Row " + "#" + inDataCounter + " of " + rowCount);
} else {
exec.setProgress("Writing Row " + "#" + inDataCounter);
}
final DataCell[] inCells = new DataCell[columns.length];
for (int i = 0; i < columns.length; i++) {
final int dbIdx = i + 1;
final int colIdx = inSpec.findColumnIndex(columns[i]);
final DataColumnSpec colSpec = inSpec.getColumnSpec(colIdx);
inCells[i] = row.getCell(colIdx);
fillStatement(stmt, dbIdx, colSpec, inCells[i], timezone, null);
}
try (final ResultSet result = stmt.executeQuery()) {
/* In the first iteration, create the out DataTableSpec and BufferedDataTableRowOutput */
if (output == null) {
dbSpec = createTableSpec(result.getMetaData());
if (appendInputColumns) {
// Create out DataTableSpec for input table
final DataTableSpec newInSpec;
if (retainAllColumns) {
newInSpec = inSpec;
} else {
final DataColumnSpec[] inColSpecs = new DataColumnSpec[columns.length];
for (int i = 0; i < inColSpecs.length; i++) {
inColSpecs[i] = inSpec.getColumnSpec(columns[i]);
}
newInSpec = new DataTableSpec(inColSpecs);
}
// Create DataTableSpec for database columns, rename if necessary
final UniqueNameGenerator generator = new UniqueNameGenerator(newInSpec);
final DataColumnSpec[] dbColSpecs = new DataColumnSpec[dbSpec.getNumColumns()];
for (int i = 0; i < dbColSpecs.length; i++) {
final DataColumnSpec colSpec = dbSpec.getColumnSpec(i);
dbColSpecs[i] = generator.newColumn(colSpec.getName(), colSpec.getType());
}
dbSpec = new DataTableSpec(dbColSpecs);
m_spec = new DataTableSpec(newInSpec, dbSpec);
} else {
m_spec = dbSpec;
}
output = new BufferedDataTableRowOutput(exec.createDataContainer(m_spec));
}
/* Iterate over the result of the database query and put it into the output table*/
final RowIterator dbRowIterator = createDBRowIterator(dbSpec, dbConn, m_blobFactory, false, result, rowIdCounter);
boolean hasDbRow = false;
while (dbRowIterator.hasNext()) {
hasDbRow = true;
final DataRow dbRow = dbRowIterator.next();
if (appendInputColumns) {
final DataRow inRow;
if (retainAllColumns) {
inRow = new DefaultRow(dbRow.getKey(), row);
} else {
inRow = new DefaultRow(dbRow.getKey(), inCells);
}
final JoinedRow joinedRow = new JoinedRow(inRow, dbRow);
output.push(joinedRow);
} else {
output.push(dbRow);
}
rowIdCounter++;
}
/* Append columns using MissingCell if no result is returned */
if (!hasDbRow && appendInputColumns && includeEmptyResults) {
final DataCell[] cells = new DataCell[dbSpec.getNumColumns()];
Arrays.fill(cells, DataType.getMissingCell());
final RowKey rowKey = RowKey.createRowKey(rowIdCounter);
final DataRow emptyDbRows = new DefaultRow(rowKey, cells);
final DataRow inRow;
if (retainAllColumns) {
inRow = new DefaultRow(rowKey, row);
} else {
inRow = new DefaultRow(rowKey, inCells);
}
final JoinedRow joinedRow = new JoinedRow(inRow, emptyDbRows);
output.push(joinedRow);
rowIdCounter++;
}
inDataCounter++;
} catch (SQLException ex) {
LOGGER.debug("SQLException: " + ex.getMessage());
if (!failIfException) {
if (output == null) {
throw new SQLException(ex);
}
final AppendedColumnRow appendedRow = new AppendedColumnRow(row, new StringCell(ex.getMessage()));
m_errorContainer.addRowToTable(appendedRow);
} else {
throw new SQLException(ex);
}
}
}
} finally {
data.close();
if (output == null) {
output = new BufferedDataTableRowOutput(exec.createDataContainer(inSpec));
}
output.close();
if (m_errorContainer != null) {
m_errorContainer.close();
}
}
return output;
});
}
use of org.knime.core.node.streamable.BufferedDataTableRowOutput in project knime-core by knime.
the class FilterApplyRowSplitterNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
exec.setProgress(0);
PortObject portObject = inObjects[1];
DataTableSpec filterSpec = portObject == null ? ((BufferedDataTable) inObjects[0]).getDataTableSpec() : ((FilterDefinitionHandlerPortObject) portObject).getSpec();
final BufferedDataTableRowOutput out1 = new BufferedDataTableRowOutput(exec.createDataContainer(((BufferedDataTable) inObjects[0]).getDataTableSpec()));
final BufferedDataTableRowOutput out2 = new BufferedDataTableRowOutput(exec.createDataContainer(((BufferedDataTable) inObjects[0]).getDataTableSpec()));
execute(new DataTableRowInput((BufferedDataTable) inObjects[0]), out1, out2, filterSpec, exec, ((BufferedDataTable) inObjects[0]).size());
return new BufferedDataTable[] { out1.getDataTable(), out2.getDataTable() };
}
Aggregations