Search in sources :

Example 71 with ExecutionMonitor

use of org.knime.core.node.ExecutionMonitor in project knime-core by knime.

the class SubgroupMinerModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected BufferedDataTable[] execute(final BufferedDataTable[] inData, final ExecutionContext exec) throws Exception {
    DataTable input = (BufferedDataTable) inData[0];
    ExecutionMonitor exec1 = exec.createSubProgress(0.5);
    ExecutionMonitor exec2 = exec.createSubProgress(0.5);
    List<BitVectorValue> transactions = preprocess(input, exec1);
    m_nameMapping = input.getDataTableSpec().getColumnSpec(m_bitVectorColumn.getStringValue()).getElementNames();
    m_apriori = AprioriAlgorithmFactory.getAprioriAlgorithm(AprioriAlgorithmFactory.AlgorithmDataStructure.valueOf(m_underlyingStruct.getStringValue()), m_maxBitsetLength, m_nrOfRows);
    LOGGER.debug("support: " + m_minSupport);
    LOGGER.debug(m_minSupport + " start apriori: " + new Date());
    m_apriori.findFrequentItemSets(transactions, m_minSupport.getDoubleValue(), m_maxItemSetLength.getIntValue(), FrequentItemSet.Type.valueOf(m_itemSetType.getStringValue()), exec2);
    LOGGER.debug("ended apriori: " + new Date());
    m_itemSetTable = createOutputTable(exec);
    return new BufferedDataTable[] { m_itemSetTable };
}
Also used : DataTable(org.knime.core.data.DataTable) BufferedDataTable(org.knime.core.node.BufferedDataTable) BufferedDataTable(org.knime.core.node.BufferedDataTable) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) BitVectorValue(org.knime.core.data.vector.bitvector.BitVectorValue) Date(java.util.Date)

Example 72 with ExecutionMonitor

use of org.knime.core.node.ExecutionMonitor in project knime-core by knime.

the class MissingValueHandlingTable method createMissingValueHandlingTable.

// getColSetting(DataTableSpec, ColSetting[])
/**
 * Does missing value handling to the argument table given the col settings
 * in an array and also reports progress.
 *
 * @param table the table to do missing value handling on
 * @param colSettings the settings
 * @param exec for progress/cancel and to create the buffered data table
 * @param warningBuffer To which potential warning messages are added.
 * @return a cache table, cleaned up
 * @throws CanceledExecutionException if canceled
 */
public static BufferedDataTable createMissingValueHandlingTable(final DataTable table, final ColSetting[] colSettings, final ExecutionContext exec, final StringBuffer warningBuffer) throws CanceledExecutionException {
    ColSetting[] colSetting;
    try {
        colSetting = getColSetting(table.getDataTableSpec(), colSettings, false);
    } catch (InvalidSettingsException ise) {
        LOGGER.coding("getColSetting method is not supposed to throw " + "an exception, ignoring settings", ise);
        DataTableSpec s = table.getDataTableSpec();
        colSetting = new ColSetting[s.getNumColumns()];
        for (int i = 0; i < s.getNumColumns(); i++) {
            colSetting[i] = new ColSetting(s.getColumnSpec(i));
            colSetting[i].setMethod(ColSetting.METHOD_NO_HANDLING);
        }
    }
    boolean needStatistics = false;
    int mostFrequentColCount = 0;
    for (int i = 0; i < colSetting.length; i++) {
        ColSetting c = colSetting[i];
        switch(c.getMethod()) {
            case ColSetting.METHOD_MOST_FREQUENT:
                mostFrequentColCount++;
            case ColSetting.METHOD_MAX:
            case ColSetting.METHOD_MIN:
            case ColSetting.METHOD_MEAN:
                needStatistics = true;
                break;
            default:
        }
    }
    int[] mostFrequentCols = new int[mostFrequentColCount];
    if (mostFrequentColCount > 0) {
        int index = 0;
        for (int i = 0; i < colSetting.length; i++) {
            ColSetting c = colSetting[i];
            switch(c.getMethod()) {
                case ColSetting.METHOD_MOST_FREQUENT:
                    mostFrequentCols[index++] = i;
                    break;
                default:
            }
        }
    }
    DataTable t;
    ExecutionMonitor e;
    if (needStatistics && !(table instanceof StatisticsTable)) {
        // for creating statistics table
        ExecutionMonitor subExec = exec.createSubProgress(0.5);
        t = new MyStatisticsTable(table, subExec, mostFrequentCols);
        if (((MyStatisticsTable) t).m_warningMessage != null) {
            warningBuffer.append(((MyStatisticsTable) t).m_warningMessage);
        }
        // for the iterator
        e = exec.createSubProgress(0.5);
    } else {
        t = table;
        e = exec;
    }
    MissingValueHandlingTable mvht = new MissingValueHandlingTable(t, colSetting);
    BufferedDataContainer container = exec.createDataContainer(mvht.getDataTableSpec());
    e.setMessage("Adding rows...");
    int count = 0;
    try {
        MissingValueHandlingTableIterator it = new MissingValueHandlingTableIterator(mvht, e);
        while (it.hasNext()) {
            DataRow next;
            next = it.next();
            e.setMessage("Adding row " + (count + 1) + " (\"" + next.getKey() + "\")");
            container.addRowToTable(next);
            count++;
        }
    } catch (MissingValueHandlingTableIterator.RuntimeCanceledExecutionException rcee) {
        throw rcee.getCause();
    } finally {
        container.close();
    }
    return container.getTable();
}
Also used : DataTable(org.knime.core.data.DataTable) BufferedDataTable(org.knime.core.node.BufferedDataTable) DataTableSpec(org.knime.core.data.DataTableSpec) BufferedDataContainer(org.knime.core.node.BufferedDataContainer) StatisticsTable(org.knime.base.data.statistics.StatisticsTable) DataRow(org.knime.core.data.DataRow) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) ExecutionMonitor(org.knime.core.node.ExecutionMonitor)

Example 73 with ExecutionMonitor

use of org.knime.core.node.ExecutionMonitor in project knime-core by knime.

the class HiLiteCollectorNodeModel method execute.

/**
 * {@inheritDoc}
 */
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
    if (m_annotationMap.isEmpty()) {
        return inData;
    }
    DataTableSpec inSpec = (DataTableSpec) inData[0].getSpec();
    final DataColumnSpec[] cspecs = createSpecs(inSpec);
    ColumnRearranger cr = new ColumnRearranger(inSpec);
    cr.append(new CellFactory() {

        /**
         * {@inheritDoc}
         */
        @Override
        public DataCell[] getCells(final DataRow row) {
            if (m_annotationMap.isEmpty()) {
                return new DataCell[0];
            }
            DataCell[] cells = new DataCell[m_lastIndex + 1];
            for (int i = 0; i < cells.length; i++) {
                Map<Integer, String> map = m_annotationMap.get(row.getKey());
                if (map == null) {
                    cells[i] = DataType.getMissingCell();
                } else {
                    String str = map.get(i);
                    if (str == null) {
                        cells[i] = DataType.getMissingCell();
                    } else {
                        cells[i] = new StringCell(str);
                    }
                }
            }
            return cells;
        }

        @Override
        public DataColumnSpec[] getColumnSpecs() {
            return cspecs;
        }

        /**
         * {@inheritDoc}
         */
        @Override
        public void setProgress(final int curRowNr, final int rowCount, final RowKey lastKey, final ExecutionMonitor em) {
            em.setProgress((double) curRowNr / rowCount);
        }
    });
    return new BufferedDataTable[] { exec.createColumnRearrangeTable((BufferedDataTable) inData[0], cr, exec) };
}
Also used : DataTableSpec(org.knime.core.data.DataTableSpec) RowKey(org.knime.core.data.RowKey) DataRow(org.knime.core.data.DataRow) DataColumnSpec(org.knime.core.data.DataColumnSpec) ColumnRearranger(org.knime.core.data.container.ColumnRearranger) StringCell(org.knime.core.data.def.StringCell) BufferedDataTable(org.knime.core.node.BufferedDataTable) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) CellFactory(org.knime.core.data.container.CellFactory) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map)

Example 74 with ExecutionMonitor

use of org.knime.core.node.ExecutionMonitor in project knime-core by knime.

the class FileUtil method zipDir.

/**
 * Packs all files and directories passed in the includeList into a zip
 * stream. Recursively adds all files contained in directories. Files in the
 * include list are placed in the root of the archive. Files and directories
 * in the include list must not have the same (simple) name - otherwise an
 * I/O Exception is thrown. The passed stream is not closed when the method
 * returns. The stream should have the appropriate compression level set.
 *
 * @param zout a zipped output stream. Zip entries for each file are added
 *            to the stream. The compression level is not changed by this
 *            method. The stream remains open after the method returns!
 * @param includeList list of files or directories to add to the zip
 *            archive. Directories will be added with their content
 *            (recursively). Files are placed in the root of the archive
 *            (i.e. their path is not preserved).
 * @param zipEntryPrefix an optional parameter to specify the parent entry of
 *            the added directory content. In most cases this parameter is
 *            "" or null but can also be, e.g. "subfolder1/subfolder2/" as
 *            parent hierarchy. Callers should then create the respective
 *            (empty) zip entries up-front and should include the '/'
 *            at the end of this string
 * @param filter each file (and directory) contained is only included in the
 *            zip archive if it is accepted by the filter. If a directory is
 *            not accepted, it entire content is excluded from the zip. Must
 *            not be null.
 * @param exec receives progress messages and is checked for cancel
 *            requests. Optional, can be null.
 *
 * @return <code>true</code> if all files and dirs accepted by the filter
 *         are included, <code>false</code> if an error occurs reading a
 *         file in a directory, if a directory is unreadable.
 * @throws CanceledExecutionException if the operation was canceled through
 *             the <code>exec</code>
 * @throws IOException if an I/O error occurs when writing the zip file, or
 *             if two files or directories in the include list have the same
 *             (simple) name, or an element in the include list doesn't
 *             exist.
 * @since 3.2
 */
public static boolean zipDir(final ZipOutputStream zout, final Collection<File> includeList, final String zipEntryPrefix, final ZipFileFilter filter, final ExecutionMonitor exec) throws IOException, CanceledExecutionException {
    ExecutionMonitor execMon = exec;
    if (execMon == null) {
        execMon = new ExecutionMonitor();
    }
    // traverse the source to get a good progress estimate
    long size = 0;
    if (exec != null) {
        for (File f : includeList) {
            size += getFileSizeRec(f);
        }
    } else {
        size = Long.MAX_VALUE;
    }
    ZipWrapper zipper = new ZipWrapper(zout, zipEntryPrefix);
    // the read buffer, re-used for each file
    final byte[] buff = new byte[BUFF_SIZE];
    // false if unable to look into a sub dir or an I/O error occurs
    boolean complete = true;
    for (File f : includeList) {
        if (!filter.include(f)) {
            continue;
        }
        if (f.isFile()) {
            complete &= addZipEntry(buff, zipper, f, f.getName(), execMon, size);
        } else if (f.isDirectory()) {
            complete &= addOneDir(zipper, f, filter, execMon, size, buff);
        } else {
            throw new IOException("File " + f.getAbsolutePath() + " not added to zip archive");
        }
    }
    return complete;
}
Also used : IOException(java.io.IOException) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) File(java.io.File)

Example 75 with ExecutionMonitor

use of org.knime.core.node.ExecutionMonitor in project knime-core by knime.

the class BatchExecutor method loadWorkflow.

/**
 * Loads a single workflow.
 *
 * @param config the workflow configuration
 * @return the workflow manager representing the loaded workflow
 * @throws IOException if an I/O error occurs while loading the workflow
 * @throws InvalidSettingsException if some node or workflow settings are invalid
 * @throws CanceledExecutionException if loading the workflow is canceled by the user (should not happen in batch
 *             mode)
 * @throws UnsupportedWorkflowVersionException if the workflow version is not supported
 * @throws LockFailedException if the workflow cannot be locked
 * @throws IllegalOptionException if a node option is invalid
 * @since 2.7
 */
protected WorkflowManager loadWorkflow(final WorkflowConfiguration config) throws IOException, InvalidSettingsException, CanceledExecutionException, UnsupportedWorkflowVersionException, LockFailedException, IllegalOptionException {
    if (config.inputWorkflow.isFile()) {
        File dir = FileUtil.createTempDir("BatchExecutorInput");
        FileUtil.unzip(config.inputWorkflow, dir);
        config.workflowLocation = dir;
    } else {
        config.workflowLocation = config.inputWorkflow;
    }
    // exported to a zip using the wizard)
    if (!new File(config.workflowLocation, WorkflowPersistor.WORKFLOW_FILE).exists()) {
        File[] children = config.workflowLocation.listFiles();
        if (children.length == 0) {
            throw new IOException("No workflow directory at " + config.workflowLocation);
        } else {
            config.workflowLocation = config.workflowLocation.listFiles()[0];
        }
    }
    BatchExecWorkflowLoadHelper batchLH = new BatchExecWorkflowLoadHelper(config.credentials, config.workflowLocation);
    WorkflowLoadResult loadResult = WorkflowManager.loadProject(config.workflowLocation, new ExecutionMonitor(), batchLH);
    WorkflowManager wfm = loadResult.getWorkflowManager();
    if (config.failOnLoadError && loadResult.hasErrors()) {
        if (wfm != null) {
            wfm.getParent().removeProject(wfm.getID());
        }
        LOGGER.error(loadResult.getFilteredError("", LoadResultEntryType.Error));
        throw new IOException("Error(s) during workflow loading. Check log file for details.");
    }
    BatchExecWorkflowTemplateLoadHelper batchTemplateLH = new BatchExecWorkflowTemplateLoadHelper(batchLH);
    if (config.updateMetanodeLinks) {
        LOGGER.debug("Checking for metanode link updates...");
        try {
            wfm.updateMetaNodeLinks(batchTemplateLH, config.failOnLoadError, new ExecutionMonitor());
        } catch (IOException ex) {
            wfm.getParent().removeProject(wfm.getID());
            throw ex;
        }
        LOGGER.debug("Checking for metanode link updates... done");
    }
    if (!config.flowVariables.isEmpty()) {
        applyWorkflowVariables(wfm, config.reset, config.flowVariables);
    }
    if (config.reset) {
        wfm.resetAndConfigureAll();
        LOGGER.debug("Workflow reset done.");
    }
    try {
        setNodeOptions(config.nodeOptions, wfm);
    } catch (IllegalOptionException ex) {
        wfm.getParent().removeProject(wfm.getID());
        throw ex;
    } catch (InvalidSettingsException ex) {
        wfm.getParent().removeProject(wfm.getID());
        throw ex;
    }
    return wfm;
}
Also used : InvalidSettingsException(org.knime.core.node.InvalidSettingsException) IOException(java.io.IOException) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) File(java.io.File) WorkflowLoadResult(org.knime.core.node.workflow.WorkflowPersistor.WorkflowLoadResult)

Aggregations

ExecutionMonitor (org.knime.core.node.ExecutionMonitor)160 BufferedDataTable (org.knime.core.node.BufferedDataTable)50 DataTableSpec (org.knime.core.data.DataTableSpec)43 DataRow (org.knime.core.data.DataRow)39 DataCell (org.knime.core.data.DataCell)35 CanceledExecutionException (org.knime.core.node.CanceledExecutionException)35 Test (org.junit.Test)33 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)33 File (java.io.File)29 IOException (java.io.IOException)25 PortObject (org.knime.core.node.port.PortObject)25 ColumnRearranger (org.knime.core.data.container.ColumnRearranger)23 DataColumnSpec (org.knime.core.data.DataColumnSpec)21 RowKey (org.knime.core.data.RowKey)20 ArrayList (java.util.ArrayList)19 WorkflowLoadResult (org.knime.core.node.workflow.WorkflowPersistor.WorkflowLoadResult)17 BufferedDataContainer (org.knime.core.node.BufferedDataContainer)16 ExecutionException (java.util.concurrent.ExecutionException)14 ExecutionContext (org.knime.core.node.ExecutionContext)13 FileOutputStream (java.io.FileOutputStream)12