Search in sources :

Example 76 with Config

use of org.knime.core.node.config.Config in project knime-core by knime.

the class SettingsModel method saveEnableStatusAndModelID.

/**
 * Saves this' model id and the current enable status into the specified
 * settings object. It creates a new sub config for that.
 *
 * @param settings the settings object to add the settings to.
 */
private void saveEnableStatusAndModelID(final NodeSettingsWO settings) {
    Config intCfg = settings.addConfig(getConfigName() + CFGKEY_INTERNAL);
    intCfg.addString(CFGKEY_MODELID, getModelTypeID());
    intCfg.addBoolean(CFGKEY_ENABLESTAT, m_enabled);
}
Also used : Config(org.knime.core.node.config.Config)

Example 77 with Config

use of org.knime.core.node.config.Config in project knime-core by knime.

the class HistogramColumn method loadHistogramsPrivate.

private static Map<Integer, HistogramNumericModel> loadHistogramsPrivate(final File histogramsGz, final Map<Integer, Map<Integer, Set<RowKey>>> numericKeys, final BinNumberSelectionStrategy strategy, final double[] means) throws IOException, InvalidSettingsException {
    final FileInputStream is = new FileInputStream(histogramsGz);
    final GZIPInputStream inData = new GZIPInputStream(is);
    final ConfigRO config = NodeSettings.loadFromXML(inData);
    Map<Integer, HistogramNumericModel> histograms = new HashMap<Integer, HistogramNumericModel>();
    // .getConfig(HISTOGRAMS);
    ConfigRO hs = config;
    int[] numColumnIndices = config.getIntArray(NUMERIC_COLUMNS);
    for (int colIdx : numColumnIndices) {
        Config h = hs.getConfig(HISTOGRAM + colIdx);
        double min = h.getDouble(MIN), max = h.getDouble(MAX), width = h.getDouble(WIDTH);
        int maxCount = h.getInt(MAX_COUNT);
        int rowCount = h.getInt(ROW_COUNT);
        String colName = h.getString(COL_NAME);
        double[] binMins = h.getDoubleArray(BIN_MINS), binMaxes = h.getDoubleArray(BIN_MAXES);
        int[] binCounts = h.getIntArray(BIN_COUNTS);
        double mean = means[colIdx];
        HistogramNumericModel histogramData = new HistogramNumericModel(min, max, binMins.length, colIdx, colName, min, max, mean);
        for (int i = binMins.length; i-- > 0; ) {
            histogramData.getBins().set(i, histogramData.new NumericBin(binMins[i], binMaxes[i]));
            histogramData.getBins().get(i).setCount(binCounts[i]);
        }
        histogramData.setMaxCount(maxCount);
        histogramData.setRowCount(rowCount);
        assert Math.abs(histogramData.m_width - width) < 1e-9 : "histogram data width: " + histogramData.m_width + " width: " + width;
        histograms.put(colIdx, histogramData);
        numericKeys.put(colIdx, new HashMap<Integer, Set<RowKey>>());
    }
    return histograms;
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Config(org.knime.core.node.config.Config) FileInputStream(java.io.FileInputStream) GZIPInputStream(java.util.zip.GZIPInputStream) ConfigRO(org.knime.core.node.config.ConfigRO)

Example 78 with Config

use of org.knime.core.node.config.Config in project knime-core by knime.

the class HistogramColumn method saveNominalHistogramData.

/**
 * @param histograms The nominal histogram models associated to the column indices.
 * @param histogramsFile The output file.
 * @throws IOException File write problem.
 */
public static void saveNominalHistogramData(final Map<Integer, HistogramModel<?>> histograms, final File histogramsFile) throws IOException {
    Config histogramData = new NodeSettings(HISTOGRAMS);
    final FileOutputStream os = new FileOutputStream(histogramsFile);
    final GZIPOutputStream dataOS = new GZIPOutputStream(os);
    List<Integer> colIndices = new ArrayList<Integer>(histograms.keySet());
    Collections.sort(colIndices);
    int[] nominalColumnIndices = new int[colIndices.size()];
    for (int i = colIndices.size(); i-- > 0; ) {
        nominalColumnIndices[i] = colIndices.get(i).intValue();
    }
    histogramData.addIntArray(NOMINAL_COLUMNS, nominalColumnIndices);
    for (Integer colIdx : colIndices) {
        Object object = histograms.get(colIdx);
        if (object instanceof HistogramNominalModel) {
            HistogramNominalModel hd = (HistogramNominalModel) object;
            assert hd.getColIndex() == colIdx.intValue() : "colIdx: " + colIdx + ", but: " + hd.getColIndex();
            Config h = histogramData.addConfig(HISTOGRAM + colIdx);
            h.addInt(COL_INDEX, hd.getColIndex());
            h.addString(COL_NAME, hd.getColName());
            h.addInt(MAX_COUNT, hd.getMaxCount());
            h.addInt(ROW_COUNT, hd.getRowCount());
            int[] counts = new int[hd.getBins().size()];
            String[] values = new String[hd.getBins().size()];
            for (int c = 0; c < hd.getBins().size(); c++) {
                HistogramModel.Bin<DataValue> bin = hd.getBins().get(c);
                if (bin.getDef() instanceof StringCell) {
                    values[c] = ((StringCell) bin.getDef()).getStringValue();
                } else {
                    values[c] = "?";
                }
                counts[c] = bin.getCount();
            }
            h.addStringArray(BIN_VALUES, values);
            h.addIntArray(BIN_COUNTS, counts);
        } else {
            throw new IllegalStateException("Illegal argument: " + colIdx + ": " + object.getClass() + "\n   " + object);
        }
    }
    histogramData.saveToXML(dataOS);
}
Also used : DataValue(org.knime.core.data.DataValue) Config(org.knime.core.node.config.Config) ArrayList(java.util.ArrayList) NodeSettings(org.knime.core.node.NodeSettings) GZIPOutputStream(java.util.zip.GZIPOutputStream) StringCell(org.knime.core.data.def.StringCell) FileOutputStream(java.io.FileOutputStream)

Example 79 with Config

use of org.knime.core.node.config.Config in project knime-core by knime.

the class StringToDurationPeriodNodeModel method createStreamableOperator.

/**
 * {@inheritDoc}
 */
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
    return new StreamableOperator() {

        private SimpleStreamableOperatorInternals m_internals = new SimpleStreamableOperatorInternals();

        /**
         * {@inheritDoc}
         */
        @Override
        public void runIntermediate(final PortInput[] inputs, final ExecutionContext exec) throws Exception {
            final RowInput rowInput = (RowInput) inputs[0];
            final DataRow row = rowInput.poll();
            if (row != null) {
                final DataTableSpec inSpec = rowInput.getDataTableSpec();
                final int[] includeIndexes = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
                final Config config = m_internals.getConfig();
                // detect types
                detectTypes(new OneRowAdditionalRowInput(rowInput, row));
                for (int i = 0; i < m_detectedTypes.length; i++) {
                    config.addDataType("detected_type" + i, m_detectedTypes[i]);
                }
                // write detected types and column names into config
                if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
                    for (int i = 0; i < rowInput.getDataTableSpec().getNumColumns(); i++) {
                        final int searchIdx = Arrays.binarySearch(includeIndexes, i);
                        config.addString("colname" + i, inSpec.getColumnNames()[i]);
                        if (searchIdx < 0) {
                            config.addDataType("type" + i, inSpec.getColumnSpec(i).getType());
                        } else {
                            config.addDataType("type" + i, m_detectedTypes[searchIdx] != null ? m_detectedTypes[searchIdx] : null);
                        }
                    }
                    config.addInt("sizeRow", rowInput.getDataTableSpec().getNumColumns());
                } else {
                    for (int i = 0; i < inSpec.getNumColumns(); i++) {
                        config.addString("colname" + i, inSpec.getColumnNames()[i]);
                        config.addDataType("type" + i, inSpec.getColumnSpec(i).getType());
                    }
                    for (int i = 0; i < m_detectedTypes.length; i++) {
                        config.addString("colname" + (i + inSpec.getNumColumns()), new UniqueNameGenerator(inSpec).newName(inSpec.getColumnSpec(includeIndexes[i]).getName() + m_suffix.getStringValue()));
                        config.addDataType("type" + (i + inSpec.getNumColumns()), m_detectedTypes[i]);
                    }
                    config.addInt("sizeRow", inSpec.getNumColumns() + m_detectedTypes.length);
                }
                config.addBoolean("needsIteration", false);
            } else {
                m_internals.getConfig().addInt("sizeRow", 0);
            }
            rowInput.close();
        }

        /**
         * {@inheritDoc}
         */
        @Override
        public StreamableOperatorInternals saveInternals() {
            return m_internals;
        }

        /**
         * {@inheritDoc}
         */
        @Override
        public void loadInternals(final StreamableOperatorInternals internals) {
            m_internals = (SimpleStreamableOperatorInternals) internals;
        }

        @Override
        public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
            final RowInput in = (RowInput) inputs[0];
            final RowOutput out = (RowOutput) outputs[0];
            final DataTableSpec inSpec = in.getDataTableSpec();
            final int[] includeIndexes = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
            // read detected types from config
            final DataType[] detectedTypes = new DataType[includeIndexes.length];
            final Config config = m_internals.getConfig();
            for (int i = 0; i < includeIndexes.length; i++) {
                detectedTypes[i] = config.getDataType("detected_type" + i, null);
            }
            // compute every row
            DataRow row;
            while ((row = in.poll()) != null) {
                exec.checkCanceled();
                DataCell[] datacells = new DataCell[includeIndexes.length];
                for (int i = 0; i < includeIndexes.length; i++) {
                    if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
                        final StringToDurationPeriodCellFactory cellFac = new StringToDurationPeriodCellFactory(new DataColumnSpecCreator(inSpec.getColumnNames()[includeIndexes[i]], detectedTypes[i]).createSpec(), includeIndexes[i]);
                        datacells[i] = cellFac.getCells(row)[0];
                    } else {
                        final DataColumnSpec dataColSpec = new UniqueNameGenerator(inSpec).newColumn(inSpec.getColumnNames()[includeIndexes[i]] + m_suffix.getStringValue(), detectedTypes[i]);
                        final StringToDurationPeriodCellFactory cellFac = new StringToDurationPeriodCellFactory(dataColSpec, includeIndexes[i]);
                        datacells[i] = cellFac.getCells(row)[0];
                    }
                }
                if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
                    out.push(new ReplacedColumnsDataRow(row, datacells, includeIndexes));
                } else {
                    out.push(new AppendedColumnRow(row, datacells));
                }
            }
            in.close();
            out.close();
        }
    };
}
Also used : Arrays(java.util.Arrays) NodeSettingsRO(org.knime.core.node.NodeSettingsRO) DataTableSpec(org.knime.core.data.DataTableSpec) InvalidSettingsException(org.knime.core.node.InvalidSettingsException) CanceledExecutionException(org.knime.core.node.CanceledExecutionException) UniqueNameGenerator(org.knime.core.util.UniqueNameGenerator) AppendedColumnRow(org.knime.core.data.append.AppendedColumnRow) ExecutionContext(org.knime.core.node.ExecutionContext) SingleCellFactory(org.knime.core.data.container.SingleCellFactory) DataTableRowInput(org.knime.core.node.streamable.DataTableRowInput) StreamableOperatorInternals(org.knime.core.node.streamable.StreamableOperatorInternals) DataColumnSpec(org.knime.core.data.DataColumnSpec) DataColumnSpecCreator(org.knime.core.data.DataColumnSpecCreator) LinkedList(java.util.LinkedList) DataCell(org.knime.core.data.DataCell) StringValue(org.knime.core.data.StringValue) InputFilter(org.knime.core.node.util.filter.InputFilter) PortInput(org.knime.core.node.streamable.PortInput) DurationPeriodFormatUtils(org.knime.time.util.DurationPeriodFormatUtils) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) DurationCellFactory(org.knime.core.data.time.duration.DurationCellFactory) PartitionInfo(org.knime.core.node.streamable.PartitionInfo) RowInput(org.knime.core.node.streamable.RowInput) SettingsModelBoolean(org.knime.core.node.defaultnodesettings.SettingsModelBoolean) PortObjectSpec(org.knime.core.node.port.PortObjectSpec) IOException(java.io.IOException) SettingsModelColumnFilter2(org.knime.core.node.defaultnodesettings.SettingsModelColumnFilter2) OutputPortRole(org.knime.core.node.streamable.OutputPortRole) ReplacedColumnsDataRow(org.knime.base.data.replace.ReplacedColumnsDataRow) NodeModel(org.knime.core.node.NodeModel) File(java.io.File) DataRow(org.knime.core.data.DataRow) SettingsModelString(org.knime.core.node.defaultnodesettings.SettingsModelString) PortOutput(org.knime.core.node.streamable.PortOutput) NodeSettingsWO(org.knime.core.node.NodeSettingsWO) DateTimeParseException(java.time.format.DateTimeParseException) List(java.util.List) BufferedDataTable(org.knime.core.node.BufferedDataTable) InputPortRole(org.knime.core.node.streamable.InputPortRole) MissingCell(org.knime.core.data.MissingCell) ColumnRearranger(org.knime.core.data.container.ColumnRearranger) Config(org.knime.core.node.config.Config) SimpleStreamableOperatorInternals(org.knime.core.node.streamable.simple.SimpleStreamableOperatorInternals) DataType(org.knime.core.data.DataType) PeriodCellFactory(org.knime.core.data.time.period.PeriodCellFactory) MergeOperator(org.knime.core.node.streamable.MergeOperator) RowOutput(org.knime.core.node.streamable.RowOutput) StreamableOperator(org.knime.core.node.streamable.StreamableOperator) DataTableSpec(org.knime.core.data.DataTableSpec) SimpleStreamableOperatorInternals(org.knime.core.node.streamable.simple.SimpleStreamableOperatorInternals) DataColumnSpecCreator(org.knime.core.data.DataColumnSpecCreator) Config(org.knime.core.node.config.Config) StreamableOperator(org.knime.core.node.streamable.StreamableOperator) StreamableOperatorInternals(org.knime.core.node.streamable.StreamableOperatorInternals) SimpleStreamableOperatorInternals(org.knime.core.node.streamable.simple.SimpleStreamableOperatorInternals) DataTableRowInput(org.knime.core.node.streamable.DataTableRowInput) RowInput(org.knime.core.node.streamable.RowInput) ReplacedColumnsDataRow(org.knime.base.data.replace.ReplacedColumnsDataRow) DataRow(org.knime.core.data.DataRow) UniqueNameGenerator(org.knime.core.util.UniqueNameGenerator) RowOutput(org.knime.core.node.streamable.RowOutput) ExecutionContext(org.knime.core.node.ExecutionContext) DataColumnSpec(org.knime.core.data.DataColumnSpec) DataType(org.knime.core.data.DataType) DataCell(org.knime.core.data.DataCell) ReplacedColumnsDataRow(org.knime.base.data.replace.ReplacedColumnsDataRow) AppendedColumnRow(org.knime.core.data.append.AppendedColumnRow)

Example 80 with Config

use of org.knime.core.node.config.Config in project knime-core by knime.

the class StringToDurationPeriodNodeModel method createMergeOperator.

@Override
public MergeOperator createMergeOperator() {
    return new MergeOperator() {

        @Override
        public StreamableOperatorInternals mergeIntermediate(final StreamableOperatorInternals[] operators) {
            final SimpleStreamableOperatorInternals internals = new SimpleStreamableOperatorInternals();
            final Config config = internals.getConfig();
            for (StreamableOperatorInternals operator : operators) {
                final Config configToMerge = ((SimpleStreamableOperatorInternals) operator).getConfig();
                final int sizeRow = configToMerge.getInt("sizeRow", -1);
                config.addInt("sizeRow", sizeRow);
                for (int i = 0; i < sizeRow; i++) {
                    if (!config.containsKey("type" + i) && configToMerge.getDataType("type" + i, null) != null) {
                        config.addDataType("type" + i, configToMerge.getDataType("type" + i, null));
                        config.addString("colname" + i, configToMerge.getString("colname" + i, null));
                    }
                    if (!config.containsKey("detected_type" + i) && configToMerge.getDataType("detected_type" + i, null) != null) {
                        config.addDataType("detected_type" + i, configToMerge.getDataType("detected_type" + i, null));
                    }
                }
            }
            // if a column's type could not be detected, guess it to be a PeriodCell
            final Config configToMerge = ((SimpleStreamableOperatorInternals) operators[0]).getConfig();
            for (int i = 0; i < configToMerge.getInt("sizeRow", -1); i++) {
                if (!config.containsKey("type" + i)) {
                    config.addDataType("type" + i, PeriodCellFactory.TYPE);
                    config.addString("colname" + i, configToMerge.getString("colname" + i, null));
                }
                if (!config.containsKey("detected_type" + i)) {
                    config.addDataType("detected_type" + i, PeriodCellFactory.TYPE);
                }
            }
            return internals;
        }

        @Override
        public StreamableOperatorInternals mergeFinal(final StreamableOperatorInternals[] operators) {
            return null;
        }
    };
}
Also used : SimpleStreamableOperatorInternals(org.knime.core.node.streamable.simple.SimpleStreamableOperatorInternals) Config(org.knime.core.node.config.Config) MergeOperator(org.knime.core.node.streamable.MergeOperator) StreamableOperatorInternals(org.knime.core.node.streamable.StreamableOperatorInternals) SimpleStreamableOperatorInternals(org.knime.core.node.streamable.simple.SimpleStreamableOperatorInternals)

Aggregations

Config (org.knime.core.node.config.Config)84 InvalidSettingsException (org.knime.core.node.InvalidSettingsException)25 Color (java.awt.Color)10 File (java.io.File)10 FileOutputStream (java.io.FileOutputStream)8 NodeSettings (org.knime.core.node.NodeSettings)8 ArrayList (java.util.ArrayList)7 DataColumnSpec (org.knime.core.data.DataColumnSpec)7 ConfigRO (org.knime.core.node.config.ConfigRO)7 HashMap (java.util.HashMap)6 GZIPOutputStream (java.util.zip.GZIPOutputStream)6 LinkedHashMap (java.util.LinkedHashMap)5 DataColumnSpecCreator (org.knime.core.data.DataColumnSpecCreator)5 DataTableSpec (org.knime.core.data.DataTableSpec)5 SimpleStreamableOperatorInternals (org.knime.core.node.streamable.simple.SimpleStreamableOperatorInternals)5 FileInputStream (java.io.FileInputStream)4 IOException (java.io.IOException)4 HashSet (java.util.HashSet)4 GZIPInputStream (java.util.zip.GZIPInputStream)4 DataCell (org.knime.core.data.DataCell)4