use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class DBDropTableNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
exec.setMessage("Droping table");
final DatabaseConnectionPortObject incomingConnection = (DatabaseConnectionPortObject) inObjects[0];
final CredentialsProvider cp = getCredentialsProvider();
final DatabaseConnectionSettings connSettings = incomingConnection.getConnectionSettings(cp);
final DatabaseUtility dbUtility = connSettings.getUtility();
final StatementManipulator manipulator = dbUtility.getStatementManipulator();
final String table2Drop = m_tableName.getStringValue();
try {
if (m_failIfNotExists.getBooleanValue() || connSettings.execute(cp, conn -> {
return dbUtility.tableExists(conn, table2Drop);
})) {
connSettings.execute(manipulator.dropTable(table2Drop, m_cascade.getBooleanValue()), cp);
exec.setMessage("Table " + table2Drop + " sucessful droped");
} else {
exec.setMessage("Table " + table2Drop + " does not exist in db");
}
} catch (SQLException ex) {
Throwable cause = ExceptionUtils.getRootCause(ex);
if (cause == null) {
cause = ex;
}
throw new InvalidSettingsException("Error while validating drop statement: " + cause.getMessage(), ex);
}
return inObjects;
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class LoopEndJoinNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
boolean hasSameRowsInEachIteration = m_configuration.hasSameRowsInEachIteration();
LoopStartNode startNode = getLoopStartNode();
if (!(startNode instanceof LoopStartNodeTerminator)) {
throw new IllegalStateException("Loop end is not connected" + " to matching/corresponding loop start node. You" + " are trying to create an infinite loop!");
}
boolean continueLoop = !((LoopStartNodeTerminator) startNode).terminateLoop();
if (m_currentAppendTable == null) {
m_currentAppendTable = copy(inData[0], false, exec);
} else if (hasSameRowsInEachIteration) {
boolean isCacheNew = m_iteration % 50 == 0;
double amount = isCacheNew ? (1.0 / 3.0) : (1.0 / 2.0);
ExecutionContext copyCtx = exec.createSubExecutionContext(amount);
ExecutionContext joinCtx = exec.createSubExecutionContext(amount);
exec.setProgress("Copying input");
BufferedDataTable t = copy(inData[0], true, copyCtx);
copyCtx.setProgress(1.0);
exec.setProgress("Joining with previous input");
m_currentAppendTable = exec.createJoinedTable(m_currentAppendTable, t, joinCtx);
joinCtx.setProgress(1.0);
if (isCacheNew) {
exec.setProgress("Caching intermediate results (iteration " + m_iteration + ")");
ExecutionContext ctx = exec.createSubExecutionContext(amount);
// copy the whole table every 50 columns (avoids wrapping to much individual tables)
// In this case the whole table is copied and column names DON'T need to be made unique (bugfix 6544)
m_currentAppendTable = copy(m_currentAppendTable, m_appendIterSuffixForBackwardComp, ctx);
ctx.setProgress(1.0);
}
} else {
Joiner2Settings settings = new Joiner2Settings();
settings.setCompositionMode(CompositionMode.MatchAll);
settings.setDuplicateColumnSuffix(" (Iter #" + m_iteration + ")");
settings.setDuplicateHandling(DuplicateHandling.AppendSuffix);
settings.setEnableHiLite(false);
// joining on RowIDs, this should not generate new row IDs but
// only fill missing rows in either table
settings.setJoinMode(JoinMode.FullOuterJoin);
settings.setLeftIncludeAll(true);
settings.setRightIncludeAll(true);
// TODO to be replaced by Joiner2Settings.ROW_KEY_IDENTIFIER
// once that is public
settings.setLeftJoinColumns(new String[] { "$RowID$" });
settings.setRightJoinColumns(new String[] { "$RowID$" });
BufferedDataTable left = m_currentAppendTable;
BufferedDataTable right = copy(inData[0], true, exec.createSubExecutionContext(0.1));
Joiner joiner = new Joiner(left.getDataTableSpec(), right.getDataTableSpec(), settings);
m_currentAppendTable = joiner.computeJoinTable(left, right, exec.createSubExecutionContext(0.9));
}
m_iteration += 1;
if (continueLoop) {
super.continueLoop();
return null;
} else {
return new BufferedDataTable[] { m_currentAppendTable };
}
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class StatisticCalculator method evaluate.
/**
* @param dataTable actual data table to compute the
* @param exec execution context
* @return a potential warnings message or <code>null</code>
* @throws CanceledExecutionException if the user cancels the execution
*/
public String evaluate(final BufferedDataTable dataTable, final ExecutionContext exec) throws CanceledExecutionException {
for (Statistic stat : m_statistics) {
stat.beforeEvaluation(dataTable.size());
}
if (!m_colToSortOn.isEmpty()) {
ColumnBufferedDataTableSorter columnDataTableSorter;
try {
columnDataTableSorter = new ColumnBufferedDataTableSorter(dataTable.getDataTableSpec(), dataTable.size(), m_colToSortOn.toArray(new String[m_colToSortOn.size()]));
} catch (InvalidSettingsException e) {
throw new RuntimeException("Error on initialize the sorting", e);
}
exec.setMessage("Sorting Data.");
final Iterator<DataRow> it = dataTable.iterator();
final MutableLong count = new MutableLong();
final ExecutionContext evalProgress = exec.createSubExecutionContext(0.3);
final int[] specMapping = createSpecMapping(dataTable.getSpec(), m_colToSortOn.toArray(new String[m_colToSortOn.size()]));
columnDataTableSorter.sort(dataTable, exec.createSubExecutionContext(0.7), new SortingConsumer() {
@Override
public void consume(final DataRow defaultRow) {
DataRow next = it.next();
evalProgress.setProgress(count.longValue() / (double) dataTable.size(), "Processing Row: " + next.getKey());
count.increment();
for (Statistic stat : m_statistics) {
stat.consumeRow(new OverwritingRow(next, defaultRow, specMapping));
}
}
});
} else {
exec.setMessage("Evaluating statistics.");
long count = 0;
for (DataRow currRow : dataTable) {
exec.setProgress(count++ / (double) dataTable.size(), "Processing Row: " + currRow.getKey());
for (Statistic stat : m_statistics) {
stat.consumeRow(currRow);
}
}
}
StringBuilder warnings = new StringBuilder();
for (Statistic stat : m_statistics) {
String warningString = stat.finish();
if (warningString != null) {
warnings.append(warningString);
warnings.append("\n");
}
}
return warnings.length() > 0 ? warnings.toString() : null;
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class Normalizer2 method doZScoreNorm.
/**
* Does the Z-Score Normalization.
*
* @param exec an object to check for user cancelations. Can be <code>null</code>.
* @throws CanceledExecutionException if user canceled
* @return the normalized DataTable
*/
public AffineTransTable doZScoreNorm(final ExecutionContext exec) throws CanceledExecutionException {
ExecutionContext statisticsExec = exec.createSubExecutionContext(.5);
final Statistics3Table st = new Statistics3Table(m_table, false, 0, Collections.<String>emptyList(), statisticsExec);
checkForMissVals(st);
double[] mean = st.getMean();
double[] stddev = st.getStandardDeviation();
final double[] scales = new double[m_colindices.length];
final double[] transforms = new double[m_colindices.length];
final double[] mins = new double[m_colindices.length];
final double[] maxs = new double[m_colindices.length];
for (int i = 0; i < m_colindices.length; i++) {
if (Double.isNaN(mean[m_colindices[i]])) {
scales[i] = Double.NaN;
transforms[i] = Double.NaN;
} else {
scales[i] = (stddev[m_colindices[i]] == 0.0 ? 1.0 : 1.0 / stddev[m_colindices[i]]);
transforms[i] = -mean[m_colindices[i]] * scales[i];
}
mins[i] = Double.NaN;
maxs[i] = Double.NaN;
}
String[] includes = getNames();
String summary = "Z-Score (Gaussian) normalization on " + includes.length + " column(s)";
AffineTransConfiguration configuration = new AffineTransConfiguration(includes, scales, transforms, mins, maxs, summary);
return new AffineTransTable(m_table, configuration);
}
use of org.knime.core.node.ExecutionContext in project knime-core by knime.
the class Normalizer2 method doMinMaxNorm.
/**
* Does the Min-Max Normalization.
*
* @param newmax the new maximum
* @param newmin the new minimum
* @param exec an object to check for user cancelations. Can be <code>null</code>.
* @throws CanceledExecutionException if user canceled
* @return normalized DataTable
*/
public AffineTransTable doMinMaxNorm(final double newmax, final double newmin, final ExecutionContext exec) throws CanceledExecutionException {
ExecutionContext statisticsExec = exec.createSilentSubExecutionContext(.5);
Statistics3Table st;
st = new Statistics3Table(m_table, false, 0, Collections.<String>emptyList(), statisticsExec);
checkForMissVals(st);
DataTableSpec spec = m_table.getDataTableSpec();
double[] max = st.getMax();
double[] min = st.getMin();
final double[] scales = new double[m_colindices.length];
final double[] transforms = new double[m_colindices.length];
final double[] mins = new double[m_colindices.length];
final double[] maxs = new double[m_colindices.length];
for (int i = 0; i < transforms.length; i++) {
DataColumnSpec cSpec = spec.getColumnSpec(m_colindices[i]);
boolean isDouble = cSpec.getType().isCompatible(DoubleValue.class);
if (!isDouble) {
assert (!isDouble);
scales[i] = Double.NaN;
transforms[i] = Double.NaN;
mins[i] = Double.NaN;
maxs[i] = Double.NaN;
} else {
// scales and translation to [0,1]
double maxI = max[m_colindices[i]];
double minI = min[m_colindices[i]];
scales[i] = (maxI == minI ? 1 : 1.0 / (maxI - minI));
transforms[i] = -minI * scales[i];
// scale and translation to [newmin, newmax]
scales[i] *= (newmax - newmin);
transforms[i] *= (newmax - newmin);
transforms[i] += newmin;
mins[i] = newmin;
maxs[i] = newmax;
}
}
String[] includes = getNames();
String minS = DoubleFormat.formatDouble(newmin);
String maxS = DoubleFormat.formatDouble(newmax);
String summary = "Min/Max (" + minS + ", " + maxS + ") normalization " + "on " + includes.length + " column(s)";
AffineTransConfiguration configuration = new AffineTransConfiguration(includes, scales, transforms, mins, maxs, summary);
return new AffineTransTable(m_table, configuration);
}
Aggregations