use of org.knime.core.data.container.DataContainer in project knime-core by knime.
the class HierarchicalClusterNodeModel method createResultTable.
/**
* Creates a standard table as the result table. The result table is
* constructed for the desired number of clusters.
*
* @param inputData
* the input data table which has the meta information like
* column names classes, and so on
*
* @param clusters
* the vector with the clusters
* @param exec
* to check for user cancelations
* @return the result data table which contains the data rows with the class
* information
* @throws CanceledExecutionException
* if user canceled
*/
private DataTable createResultTable(final DataTable inputData, final List<ClusterNode> clusters, final ExecutionContext exec) throws CanceledExecutionException {
DataTableSpec inputSpec = inputData.getDataTableSpec();
DataTableSpec outputSpec = generateOutSpec(inputSpec);
DataContainer resultTable = exec.createDataContainer(outputSpec);
for (int i = 0; i < clusters.size(); i++) {
DataRow[] memberRows = clusters.get(i).getAllDataRows();
for (DataRow dataRow : memberRows) {
DataCell[] cells = new DataCell[dataRow.getNumCells() + 1];
for (int j = 0; j < dataRow.getNumCells(); j++) {
cells[j] = dataRow.getCell(j);
}
// append the cluster id the row belongs to
cells[cells.length - 1] = new StringCell("cluster_" + i);
resultTable.addRowToTable(new DefaultRow(dataRow.getKey(), cells));
exec.checkCanceled();
}
}
resultTable.close();
return resultTable.getTable();
}
use of org.knime.core.data.container.DataContainer in project knime-core by knime.
the class DBReaderImpl method createTable.
/**
* Called from the database port to read the first n-number of rows.
*
* @param useDbRowId <code>true</code> if the KNIME row id should based on the db row id
* @param cachedNoRows number of rows cached for data preview
* @param cp {@link CredentialsProvider} providing user/password
* @return buffered data table read from database
* @throws SQLException if the connection could not be opened
*/
DataTable createTable(final boolean useDbRowId, final int cachedNoRows, final CredentialsProvider cp) throws SQLException {
if (m_blobFactory == null) {
m_blobFactory = new BinaryObjectCellFactory();
}
// retrieve connection
// final Connection conn = initConnection(cp);
final DatabaseQueryConnectionSettings dbConn = getQueryConnection();
// synchronized (dbConn.syncConnection(conn)) {
return dbConn.execute(cp, conn -> {
// remember auto-commit flag
final boolean autoCommit = conn.getAutoCommit();
final Statement stmt = initStatement(cp, conn);
try {
final String[] oQueries = dbConn.getQuery().split(SQL_QUERY_SEPARATOR);
if (cachedNoRows < 0) {
int fetchsize = (DatabaseConnectionSettings.FETCH_SIZE != null) ? DatabaseConnectionSettings.FETCH_SIZE : -1;
dbConn.getUtility().getStatementManipulator().setFetchSize(stmt, fetchsize);
} else {
final int hashAlias = System.identityHashCode(this);
final int selectIdx = oQueries.length - 1;
// replace last element in statement(s) with wrapped SQL
oQueries[selectIdx] = "SELECT * FROM (" + oQueries[selectIdx] + ") table_" + hashAlias;
try {
// bugfix 2925: may fail, e.g. on sqlite
stmt.setMaxRows(cachedNoRows);
} catch (SQLException ex) {
Throwable cause = ExceptionUtils.getRootCause(ex);
if (cause == null) {
cause = ex;
}
LOGGER.warn("Can't set max rows on statement, reason: " + cause.getMessage(), ex);
}
}
// execute all except the last query
for (int i = 0; i < oQueries.length - 1; i++) {
LOGGER.debug("Executing SQL statement as execute: " + oQueries[i]);
stmt.execute(oQueries[i]);
}
final String lastQuery = oQueries[oQueries.length - 1];
LOGGER.debug("Executing SQL statement as executeQuery: " + lastQuery);
final ResultSet result = stmt.executeQuery(lastQuery);
LOGGER.debug("Reading meta data from database ResultSet...");
m_spec = createTableSpec(result.getMetaData());
LOGGER.debug("Parsing database ResultSet...");
// final DBRowIterator dbIt = createRowIterator(useDbRowId, result);
final RowIterator it = createDBRowIterator(m_spec, dbConn, m_blobFactory, useDbRowId, result);
DataContainer buf = new DataContainer(m_spec);
while (it.hasNext()) {
buf.addRowToTable(it.next());
}
buf.close();
return buf.getTable();
} finally {
if (stmt != null) {
if (!conn.getAutoCommit()) {
conn.commit();
}
DatabaseConnectionSettings.setAutoCommit(conn, autoCommit);
stmt.close();
}
}
});
}
use of org.knime.core.data.container.DataContainer in project knime-core by knime.
the class MissingValueApplyNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
BufferedDataTable inTable = (BufferedDataTable) inData[DATA_PORT_IDX];
DataTableSpec inSpec = inTable.getDataTableSpec();
PMMLPortObject pmmlIn = (PMMLPortObject) inData[PMML_PORT_IDX];
MissingCellReplacingDataTable mvTable;
try (LockedSupplier<Document> supplier = pmmlIn.getPMMLValue().getDocumentSupplier()) {
mvTable = new MissingCellReplacingDataTable(inSpec, PMMLDocument.Factory.parse(supplier.get()));
}
// Calculate the statistics
mvTable.init(inTable, exec.createSubExecutionContext(0.5));
long rowCounter = 0;
final long numOfRows = inTable.size();
DataContainer container = exec.createDataContainer(mvTable.getDataTableSpec());
for (DataRow row : mvTable) {
exec.checkCanceled();
if (row != null) {
exec.setProgress(++rowCounter / (double) numOfRows, "Processed row " + rowCounter + "/" + numOfRows + " (\"" + row.getKey() + "\")");
container.addRowToTable(row);
} else {
exec.setProgress(++rowCounter / (double) numOfRows, "Processed row " + rowCounter + "/" + numOfRows);
}
}
container.close();
// Collect warning messages
String warnings = mvTable.finish();
// Handle the warnings
if (warnings.length() > 0) {
setWarningMessage(warnings);
}
return new PortObject[] { (BufferedDataTable) container.getTable() };
}
use of org.knime.core.data.container.DataContainer in project knime-core by knime.
the class InteractiveHiLiteCollectorNodeModel method getHiLiteAnnotationsTable.
/**
* @return table with hilit rows first and then all rows with annotations
*/
DataTable getHiLiteAnnotationsTable() {
// return null if not executed
if (m_data == null) {
return null;
}
DataContainer buf;
if (m_annotationMap.isEmpty()) {
buf = new DataContainer(new DataTableSpec());
} else {
buf = new DataContainer(new DataTableSpec(createSpecs(null)));
}
HiLiteHandler hdl = getInHiLiteHandler(0);
if (hdl != null) {
for (RowKey key : hdl.getHiLitKeys()) {
DataCell[] cells = new DataCell[buf.getTableSpec().getNumColumns()];
for (int i = 0; i < cells.length; i++) {
Map<Integer, String> map = m_annotationMap.get(key);
if (map == null) {
cells[i] = DataType.getMissingCell();
} else {
String str = m_annotationMap.get(key).get(i);
if (str == null) {
cells[i] = DataType.getMissingCell();
} else {
cells[i] = new StringCell(str);
}
}
}
buf.addRowToTable(new DefaultRow(key, cells));
}
for (RowKey key : m_annotationMap.keySet()) {
if (!hdl.isHiLit(key)) {
DataCell[] cells = new DataCell[buf.getTableSpec().getNumColumns()];
for (int i = 0; i < cells.length; i++) {
String str = m_annotationMap.get(key).get(i);
if (str == null) {
cells[i] = DataType.getMissingCell();
} else {
cells[i] = new StringCell(str);
}
}
buf.addRowToTable(new DefaultRow(key, cells));
}
}
}
buf.close();
return buf.getTable();
}
use of org.knime.core.data.container.DataContainer in project knime-core by knime.
the class NominalValueRowFilterNodeModel method execute.
/**
* {@inheritDoc}
*/
@SuppressWarnings("null")
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
// include data container
DataContainer positive = exec.createDataContainer(inData[0].getDataTableSpec());
DataContainer negative = m_splitter ? exec.createDataContainer(inData[0].getDataTableSpec()) : null;
long currentRow = 0;
for (DataRow row : inData[0]) {
// if row matches to included...
if (matches(row)) {
positive.addRowToTable(row);
} else if (m_splitter) {
negative.addRowToTable(row);
}
exec.setProgress(currentRow / (double) inData[0].size(), "filtering row # " + currentRow);
currentRow++;
exec.checkCanceled();
}
positive.close();
BufferedDataTable positiveTable = exec.createBufferedDataTable(positive.getTable(), exec);
if (positiveTable.size() <= 0) {
String warning = "No rows matched!";
if (m_splitter) {
warning = warning + " Input mirrored at out-port 1 (excluded)";
}
setWarningMessage(warning);
}
if (m_splitter) {
negative.close();
BufferedDataTable negativeTable = exec.createBufferedDataTable(negative.getTable(), exec);
if (negativeTable.size() <= 0) {
setWarningMessage("All rows matched! Input mirrored at out-port 0 (included)");
}
return new BufferedDataTable[] { positiveTable, negativeTable };
}
return new BufferedDataTable[] { positiveTable };
}
Aggregations