use of org.knime.core.node.workflow.FlowVariable in project knime-core by knime.
the class VariableFileReaderNodeDialog method takeOverNewFileLocation.
/*
* Reads the entered variable name from the edit field and stores the new
* value in the settings object. Throws an exception if the entered URL is
* invalid (and clears the URL in the settings object before). Returns true
* if the entered location (string) is different from the one previously
* set.
*/
private boolean takeOverNewFileLocation() throws InvalidSettingsException {
FlowVariable sel = (FlowVariable) m_urlCombo.getSelectedItem();
String varName = null;
if (sel != null) {
varName = sel.getName();
}
if (getAvailableFlowVariables().get(varName) == null) {
// oops.
throw new InvalidSettingsException("Selected variable not available anymore. " + "Select a different one.");
}
m_frSettings.setVariableName(varName);
URL oldUrl = m_frSettings.getDataFileLocation();
try {
m_frSettings = m_frSettings.createSettingsFrom(getAvailableFlowVariables());
} catch (Exception e) {
m_frSettings.setDataFileLocationAndUpdateTableName(null);
throw new InvalidSettingsException(e.getMessage());
}
String oldString = "";
if (oldUrl != null) {
oldString = oldUrl.toString();
}
return !oldString.equals(m_frSettings.getDataFileLocation().toString());
}
use of org.knime.core.node.workflow.FlowVariable in project knime-core by knime.
the class VariableFileReaderNodeDialog method loadSettingsFromInternal.
/**
* We do the entire load settings in the Event/GUI thread as it accesses a
* lot of GUI components.
*/
private void loadSettingsFromInternal(final NodeSettingsRO settings, final PortObjectSpec[] specs) {
assert (settings != null && specs != null);
// loading of the variable names would trigger an item changed event.
m_urlCombo.removeItemListener(this);
m_urlCombo.removeAllItems();
Map<String, FlowVariable> stack = getAvailableFlowVariables();
for (FlowVariable fv : stack.values()) {
m_urlCombo.addItem(fv);
}
m_urlCombo.addItemListener(this);
try {
// this will fail if the settings are invalid (which will be the
// case when they come from an uninitialized model). We create
// an empty settings object in the catch block.
m_frSettings = new VariableFileReaderNodeSettings(settings);
} catch (InvalidSettingsException ice) {
m_frSettings = new VariableFileReaderNodeSettings();
}
String loadedLocation = null;
if (m_frSettings.getDataFileLocation() != null) {
loadedLocation = m_frSettings.getDataFileLocation().toString();
}
// check the specified variable
if (stack.get(m_frSettings.getVariableName()) == null) {
// the variable is not on the stack anymore
m_frSettings.setVariableName("");
m_frSettings.setDataFileLocationAndUpdateTableName(null);
} else {
String varVal = stack.get(m_frSettings.getVariableName()).getStringValue();
try {
URL varURL = textToURL(varVal);
if (!varURL.toString().equals(m_frSettings.getDataFileLocation().toString())) {
// variable points to a different location
m_frSettings.setDataFileLocationAndUpdateTableName(null);
}
} catch (Exception e) {
// the variable is still there - but has a invalid location
m_frSettings.setDataFileLocationAndUpdateTableName(null);
}
}
// set the URL, if available, or clear an invalid variable name
try {
m_frSettings = m_frSettings.createSettingsFrom(stack);
} catch (Exception e) {
m_frSettings.setVariableName("");
m_frSettings.setDataFileLocationAndUpdateTableName(null);
}
FlowVariable fv = getAvailableFlowVariables().get(m_frSettings.getVariableName());
m_urlCombo.setSelectedItem(fv);
// transfer settings from the structure in the dialog's components
if ((m_frSettings.getDataFileLocation() != null) && m_frSettings.getDataFileLocation().toString().equals(loadedLocation) && (m_frSettings.getColumnProperties() != null) && (m_frSettings.getColumnProperties().size() > 0)) {
// do not analyze file if we got settings to use
loadSettings(false);
} else {
// load settings and analyze file
loadSettings(true);
}
// after loading settings we can clear the analyze warning
setAnalWarningText("");
// Reset flag when dialog opens
m_preserveSettings.setSelected(false);
updatePreview();
}
use of org.knime.core.node.workflow.FlowVariable in project knime-core by knime.
the class VariableFileReaderNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
Map<String, FlowVariable> stack = createStack(m_frSettings.getVariableName());
VariableFileReaderNodeSettings settings = m_frSettings.createSettingsFrom(stack);
LOGGER.info("Preparing to read from '" + m_frSettings.getDataFileLocation().toString() + "'.");
// check again the settings - especially file existence (under Linux
// files could be deleted/renamed since last config-call...
SettingsStatus status = settings.getStatusOfSettings(true, null);
if (status.getNumOfErrors() > 0) {
throw new InvalidSettingsException(status.getAllErrorMessages(10));
}
DataTableSpec tSpec = settings.createDataTableSpec();
FileTable fTable = new FileTable(tSpec, settings, settings.getSkippedColumns(), exec);
// create a DataContainer and fill it with the rows read. It is faster
// then reading the file every time (for each row iterator), and it
// collects the domain for each column for us. Also, if things fail,
// the error message is printed during file reader execution (were it
// belongs to) and not some time later when a node uses the row
// iterator from the file table.
BufferedDataContainer c = exec.createDataContainer(fTable.getDataTableSpec(), /* initDomain= */
true);
int row = 0;
FileRowIterator it = fTable.iterator();
try {
if (it.getZipEntryName() != null) {
// seems we are reading a ZIP archive.
LOGGER.info("Reading entry '" + it.getZipEntryName() + "' from the specified ZIP archive.");
}
while (it.hasNext()) {
row++;
DataRow next = it.next();
String message = "Caching row #" + row + " (\"" + next.getKey() + "\")";
exec.setMessage(message);
exec.checkCanceled();
c.addRowToTable(next);
}
if (it.zippedSourceHasMoreEntries()) {
// after reading til the end of the file this returns a valid
// result
setWarningMessage("Source is a ZIP archive with multiple " + "entries. Only reading first entry!");
}
} catch (DuplicateKeyException dke) {
String msg = dke.getMessage();
if (msg == null) {
msg = "Duplicate row IDs";
}
msg += ". Consider making IDs unique in the advanced settings.";
DuplicateKeyException newDKE = new DuplicateKeyException(msg);
newDKE.initCause(dke);
throw newDKE;
} finally {
c.close();
}
// user settings allow for truncating the table
if (it.iteratorEndedEarly()) {
setWarningMessage("Data was truncated due to user settings.");
}
BufferedDataTable out = c.getTable();
// closes all sources.
fTable.dispose();
return new BufferedDataTable[] { out };
}
use of org.knime.core.node.workflow.FlowVariable in project knime-core by knime.
the class VariableFileReaderNodeModel method createStack.
private final Map<String, FlowVariable> createStack(final String varName) {
String loc = peekFlowVariableString(varName);
FlowVariable scopeVar = new FlowVariable(varName, loc);
Map<String, FlowVariable> stack = new HashMap<String, FlowVariable>();
if (scopeVar != null) {
stack.put(varName, scopeVar);
}
return stack;
}
use of org.knime.core.node.workflow.FlowVariable in project knime-core by knime.
the class Node method createNodeExecutionResult.
/**
* Creates an execution result containing all calculated values in a
* execution. The returned value is suitable to be used in
* {@link #loadDataAndInternals(
* NodeContentPersistor, ExecutionMonitor, LoadResult)}.
* If this node is not executed, it will assign null values to the fields
* in the returned execution result.
* @param exec For progress information.
* @return A new execution result containing the values being calculated.
* @throws CanceledExecutionException If canceled
*/
public NodeExecutionResult createNodeExecutionResult(final ExecutionMonitor exec) throws CanceledExecutionException {
NodeExecutionResult result = new NodeExecutionResult();
result.setWarningMessage(m_model.getWarningMessage());
if (hasContent()) {
File internTempDir;
try {
internTempDir = FileUtil.createTempDir("knime_node_internDir");
exec.setMessage("Saving internals");
saveInternals(internTempDir, exec.createSubProgress(0.0));
result.setNodeInternDir(new ReferencedFile(internTempDir));
} catch (IOException ioe) {
LOGGER.error("Unable to save internals", ioe);
}
}
if (m_internalHeldPortObjects != null) {
PortObject[] internalHeldPortObjects = Arrays.copyOf(m_internalHeldPortObjects, m_internalHeldPortObjects.length);
result.setInternalHeldPortObjects(internalHeldPortObjects);
}
PortObject[] pos = new PortObject[getNrOutPorts()];
PortObjectSpec[] poSpecs = new PortObjectSpec[getNrOutPorts()];
for (int i = 0; i < pos.length; i++) {
PortObject po = getOutputObject(i);
if (po != null) {
pos[i] = po;
poSpecs[i] = po.getSpec();
}
}
result.setPortObjects(pos);
result.setPortObjectSpecs(poSpecs);
// Add the outgoing flow variables to the execution result
FlowObjectStack outgoingStack = m_model.getOutgoingFlowObjectStack();
List<FlowVariable> nodeFlowVars = outgoingStack.getAvailableFlowVariables().values().stream().filter(f -> f.getScope().equals(FlowVariable.Scope.Flow)).collect(Collectors.toList());
// the bottom most element should remain at the bottom of the stack
Collections.reverse(nodeFlowVars);
result.setFlowVariables(nodeFlowVars);
return result;
}
Aggregations