use of org.knime.core.data.DataTableSpec in project knime-core by knime.
the class VariableToTableNodeModel method createOutSpec.
private DataTableSpec createOutSpec() throws InvalidSettingsException {
List<Pair<String, FlowVariable.Type>> vars;
if (m_settings.getIncludeAll()) {
vars = getAllVariables();
} else {
vars = m_settings.getVariablesOfInterest();
}
if (vars.isEmpty()) {
throw new InvalidSettingsException("No variables selected");
}
DataColumnSpec[] specs = new DataColumnSpec[vars.size()];
for (int i = 0; i < vars.size(); i++) {
Pair<String, FlowVariable.Type> c = vars.get(i);
DataType type;
switch(c.getSecond()) {
case DOUBLE:
type = DoubleCell.TYPE;
break;
case INTEGER:
type = IntCell.TYPE;
break;
case STRING:
type = StringCell.TYPE;
break;
default:
throw new InvalidSettingsException("Unsupported variable type: " + c.getSecond());
}
specs[i] = new DataColumnSpecCreator(c.getFirst(), type).createSpec();
}
return new DataTableSpec(specs);
}
use of org.knime.core.data.DataTableSpec in project knime-core by knime.
the class DBGroupByNodeDialog method loadSettingsFrom.
/**
* {@inheritDoc}
*/
@Override
protected void loadSettingsFrom(final NodeSettingsRO settings, final PortObjectSpec[] specs) throws NotConfigurableException {
if (specs == null || specs.length < 1 || specs[0] == null) {
throw new NotConfigurableException("No input spec available");
}
final DatabasePortObjectSpec dbspec = (DatabasePortObjectSpec) specs[0];
final DataTableSpec spec = dbspec.getDataTableSpec();
try {
m_columnNamePolicy.loadSettingsFrom(settings);
} catch (final InvalidSettingsException e) {
throw new NotConfigurableException(e.getMessage());
}
m_aggregationPanel.loadSettingsFrom(settings, dbspec, spec);
m_groupCol.loadSettingsFrom(settings, new DataTableSpec[] { spec });
columnsChanged();
}
use of org.knime.core.data.DataTableSpec in project knime-core by knime.
the class DBGroupByNodeModel method createDbOutSpec.
/**
* @param inSpec Spec of the input database object
* @param checkRetrieveMetadata true if the retrieveMetadataInConfigure settings should be respected,
* <code>false</code> if the metadata should be retrieved in any case (for execute)
* @return Spec of the output database object
* @throws InvalidSettingsException If the current settings are invalid
*/
private DatabasePortObjectSpec createDbOutSpec(final DatabasePortObjectSpec inSpec, final boolean checkRetrieveMetadata) throws InvalidSettingsException {
if (m_groupByCols.getIncludeList().isEmpty() && m_aggregatedColumns.length == 0) {
throw new InvalidSettingsException("Please select at least one group or aggregation column");
}
DatabaseQueryConnectionSettings connection = inSpec.getConnectionSettings(getCredentialsProvider());
String newQuery = createQuery(connection.getQuery(), connection.getUtility().getStatementManipulator());
connection = createDBQueryConnection(inSpec, newQuery);
if (checkRetrieveMetadata && !connection.getRetrieveMetadataInConfigure()) {
return null;
}
DataTableSpec tableSpec = createOutSpec(inSpec.getDataTableSpec(), connection, newQuery, checkRetrieveMetadata);
return new DatabasePortObjectSpec(tableSpec, connection.createConnectionModel());
}
use of org.knime.core.data.DataTableSpec in project knime-core by knime.
the class DBGroupByNodeModel method createOutSpec.
/**
* @param inSpec Spec of the input table
* @param checkRetrieveMetadata
* @return Spec of the output table
* @throws InvalidSettingsException if settings do not match the input specification
*/
private DataTableSpec createOutSpec(final DataTableSpec inSpec, final DatabaseConnectionSettings settings, final String query, final boolean ignoreExceptions) throws InvalidSettingsException {
// Try get spec from database
try {
DatabaseQueryConnectionSettings querySettings = new DatabaseQueryConnectionSettings(settings, query);
DatabaseReaderConnection conn = new DatabaseReaderConnection(querySettings);
return conn.getDataTableSpec(getCredentialsProvider());
} catch (SQLException e) {
NodeLogger.getLogger(getClass()).info("Could not determine table spec from database, trying to guess now", e);
if (!ignoreExceptions) {
throw new InvalidSettingsException("Error in automatically build sql statement: " + e.getMessage());
}
// Otherwise guess spec
}
List<DataColumnSpec> colSpecs = new ArrayList<>();
// Add all group by columns
for (String col : m_groupByCols.getIncludeList()) {
colSpecs.add(inSpec.getColumnSpec(col));
}
// Add aggregated columns
for (int i = 0; i < m_aggregatedColumns.length; i++) {
String col = m_aggregatedColumns[i];
String method = m_aggregationMethods[i];
if (inSpec.getColumnSpec(col) == null) {
throw new InvalidSettingsException("Column '" + col + "' in aggregation " + method + " does not exist");
}
final DatabaseUtility databaseUtility = settings.getUtility();
final DBAggregationFunction function = databaseUtility.getAggregationFunction(method);
// Get type of column after aggregation
DataType type = function.getType(inSpec.getColumnSpec(col).getType());
colSpecs.add(new DataColumnSpecCreator(generateColumnName(col, method), type).createSpec());
}
return new DataTableSpec(colSpecs.toArray(new DataColumnSpec[colSpecs.size()]));
}
use of org.knime.core.data.DataTableSpec in project knime-core by knime.
the class VariableFileReaderNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
Map<String, FlowVariable> stack = createStack(m_frSettings.getVariableName());
VariableFileReaderNodeSettings settings = m_frSettings.createSettingsFrom(stack);
LOGGER.info("Preparing to read from '" + m_frSettings.getDataFileLocation().toString() + "'.");
// check again the settings - especially file existence (under Linux
// files could be deleted/renamed since last config-call...
SettingsStatus status = settings.getStatusOfSettings(true, null);
if (status.getNumOfErrors() > 0) {
throw new InvalidSettingsException(status.getAllErrorMessages(10));
}
DataTableSpec tSpec = settings.createDataTableSpec();
FileTable fTable = new FileTable(tSpec, settings, settings.getSkippedColumns(), exec);
// create a DataContainer and fill it with the rows read. It is faster
// then reading the file every time (for each row iterator), and it
// collects the domain for each column for us. Also, if things fail,
// the error message is printed during file reader execution (were it
// belongs to) and not some time later when a node uses the row
// iterator from the file table.
BufferedDataContainer c = exec.createDataContainer(fTable.getDataTableSpec(), /* initDomain= */
true);
int row = 0;
FileRowIterator it = fTable.iterator();
try {
if (it.getZipEntryName() != null) {
// seems we are reading a ZIP archive.
LOGGER.info("Reading entry '" + it.getZipEntryName() + "' from the specified ZIP archive.");
}
while (it.hasNext()) {
row++;
DataRow next = it.next();
String message = "Caching row #" + row + " (\"" + next.getKey() + "\")";
exec.setMessage(message);
exec.checkCanceled();
c.addRowToTable(next);
}
if (it.zippedSourceHasMoreEntries()) {
// after reading til the end of the file this returns a valid
// result
setWarningMessage("Source is a ZIP archive with multiple " + "entries. Only reading first entry!");
}
} catch (DuplicateKeyException dke) {
String msg = dke.getMessage();
if (msg == null) {
msg = "Duplicate row IDs";
}
msg += ". Consider making IDs unique in the advanced settings.";
DuplicateKeyException newDKE = new DuplicateKeyException(msg);
newDKE.initCause(dke);
throw newDKE;
} finally {
c.close();
}
// user settings allow for truncating the table
if (it.iteratorEndedEarly()) {
setWarningMessage("Data was truncated due to user settings.");
}
BufferedDataTable out = c.getTable();
// closes all sources.
fTable.dispose();
return new BufferedDataTable[] { out };
}
Aggregations