use of org.knime.core.node.config.Config in project knime-core by knime.
the class SettingsModel method saveEnableStatusAndModelID.
/**
* Saves this' model id and the current enable status into the specified
* settings object. It creates a new sub config for that.
*
* @param settings the settings object to add the settings to.
*/
private void saveEnableStatusAndModelID(final NodeSettingsWO settings) {
Config intCfg = settings.addConfig(getConfigName() + CFGKEY_INTERNAL);
intCfg.addString(CFGKEY_MODELID, getModelTypeID());
intCfg.addBoolean(CFGKEY_ENABLESTAT, m_enabled);
}
use of org.knime.core.node.config.Config in project knime-core by knime.
the class HistogramColumn method loadHistogramsPrivate.
private static Map<Integer, HistogramNumericModel> loadHistogramsPrivate(final File histogramsGz, final Map<Integer, Map<Integer, Set<RowKey>>> numericKeys, final BinNumberSelectionStrategy strategy, final double[] means) throws IOException, InvalidSettingsException {
final FileInputStream is = new FileInputStream(histogramsGz);
final GZIPInputStream inData = new GZIPInputStream(is);
final ConfigRO config = NodeSettings.loadFromXML(inData);
Map<Integer, HistogramNumericModel> histograms = new HashMap<Integer, HistogramNumericModel>();
// .getConfig(HISTOGRAMS);
ConfigRO hs = config;
int[] numColumnIndices = config.getIntArray(NUMERIC_COLUMNS);
for (int colIdx : numColumnIndices) {
Config h = hs.getConfig(HISTOGRAM + colIdx);
double min = h.getDouble(MIN), max = h.getDouble(MAX), width = h.getDouble(WIDTH);
int maxCount = h.getInt(MAX_COUNT);
int rowCount = h.getInt(ROW_COUNT);
String colName = h.getString(COL_NAME);
double[] binMins = h.getDoubleArray(BIN_MINS), binMaxes = h.getDoubleArray(BIN_MAXES);
int[] binCounts = h.getIntArray(BIN_COUNTS);
double mean = means[colIdx];
HistogramNumericModel histogramData = new HistogramNumericModel(min, max, binMins.length, colIdx, colName, min, max, mean);
for (int i = binMins.length; i-- > 0; ) {
histogramData.getBins().set(i, histogramData.new NumericBin(binMins[i], binMaxes[i]));
histogramData.getBins().get(i).setCount(binCounts[i]);
}
histogramData.setMaxCount(maxCount);
histogramData.setRowCount(rowCount);
assert Math.abs(histogramData.m_width - width) < 1e-9 : "histogram data width: " + histogramData.m_width + " width: " + width;
histograms.put(colIdx, histogramData);
numericKeys.put(colIdx, new HashMap<Integer, Set<RowKey>>());
}
return histograms;
}
use of org.knime.core.node.config.Config in project knime-core by knime.
the class HistogramColumn method saveNominalHistogramData.
/**
* @param histograms The nominal histogram models associated to the column indices.
* @param histogramsFile The output file.
* @throws IOException File write problem.
*/
public static void saveNominalHistogramData(final Map<Integer, HistogramModel<?>> histograms, final File histogramsFile) throws IOException {
Config histogramData = new NodeSettings(HISTOGRAMS);
final FileOutputStream os = new FileOutputStream(histogramsFile);
final GZIPOutputStream dataOS = new GZIPOutputStream(os);
List<Integer> colIndices = new ArrayList<Integer>(histograms.keySet());
Collections.sort(colIndices);
int[] nominalColumnIndices = new int[colIndices.size()];
for (int i = colIndices.size(); i-- > 0; ) {
nominalColumnIndices[i] = colIndices.get(i).intValue();
}
histogramData.addIntArray(NOMINAL_COLUMNS, nominalColumnIndices);
for (Integer colIdx : colIndices) {
Object object = histograms.get(colIdx);
if (object instanceof HistogramNominalModel) {
HistogramNominalModel hd = (HistogramNominalModel) object;
assert hd.getColIndex() == colIdx.intValue() : "colIdx: " + colIdx + ", but: " + hd.getColIndex();
Config h = histogramData.addConfig(HISTOGRAM + colIdx);
h.addInt(COL_INDEX, hd.getColIndex());
h.addString(COL_NAME, hd.getColName());
h.addInt(MAX_COUNT, hd.getMaxCount());
h.addInt(ROW_COUNT, hd.getRowCount());
int[] counts = new int[hd.getBins().size()];
String[] values = new String[hd.getBins().size()];
for (int c = 0; c < hd.getBins().size(); c++) {
HistogramModel.Bin<DataValue> bin = hd.getBins().get(c);
if (bin.getDef() instanceof StringCell) {
values[c] = ((StringCell) bin.getDef()).getStringValue();
} else {
values[c] = "?";
}
counts[c] = bin.getCount();
}
h.addStringArray(BIN_VALUES, values);
h.addIntArray(BIN_COUNTS, counts);
} else {
throw new IllegalStateException("Illegal argument: " + colIdx + ": " + object.getClass() + "\n " + object);
}
}
histogramData.saveToXML(dataOS);
}
use of org.knime.core.node.config.Config in project knime-core by knime.
the class StringToDurationPeriodNodeModel method createStreamableOperator.
/**
* {@inheritDoc}
*/
@Override
public StreamableOperator createStreamableOperator(final PartitionInfo partitionInfo, final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
return new StreamableOperator() {
private SimpleStreamableOperatorInternals m_internals = new SimpleStreamableOperatorInternals();
/**
* {@inheritDoc}
*/
@Override
public void runIntermediate(final PortInput[] inputs, final ExecutionContext exec) throws Exception {
final RowInput rowInput = (RowInput) inputs[0];
final DataRow row = rowInput.poll();
if (row != null) {
final DataTableSpec inSpec = rowInput.getDataTableSpec();
final int[] includeIndexes = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
final Config config = m_internals.getConfig();
// detect types
detectTypes(new OneRowAdditionalRowInput(rowInput, row));
for (int i = 0; i < m_detectedTypes.length; i++) {
config.addDataType("detected_type" + i, m_detectedTypes[i]);
}
// write detected types and column names into config
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
for (int i = 0; i < rowInput.getDataTableSpec().getNumColumns(); i++) {
final int searchIdx = Arrays.binarySearch(includeIndexes, i);
config.addString("colname" + i, inSpec.getColumnNames()[i]);
if (searchIdx < 0) {
config.addDataType("type" + i, inSpec.getColumnSpec(i).getType());
} else {
config.addDataType("type" + i, m_detectedTypes[searchIdx] != null ? m_detectedTypes[searchIdx] : null);
}
}
config.addInt("sizeRow", rowInput.getDataTableSpec().getNumColumns());
} else {
for (int i = 0; i < inSpec.getNumColumns(); i++) {
config.addString("colname" + i, inSpec.getColumnNames()[i]);
config.addDataType("type" + i, inSpec.getColumnSpec(i).getType());
}
for (int i = 0; i < m_detectedTypes.length; i++) {
config.addString("colname" + (i + inSpec.getNumColumns()), new UniqueNameGenerator(inSpec).newName(inSpec.getColumnSpec(includeIndexes[i]).getName() + m_suffix.getStringValue()));
config.addDataType("type" + (i + inSpec.getNumColumns()), m_detectedTypes[i]);
}
config.addInt("sizeRow", inSpec.getNumColumns() + m_detectedTypes.length);
}
config.addBoolean("needsIteration", false);
} else {
m_internals.getConfig().addInt("sizeRow", 0);
}
rowInput.close();
}
/**
* {@inheritDoc}
*/
@Override
public StreamableOperatorInternals saveInternals() {
return m_internals;
}
/**
* {@inheritDoc}
*/
@Override
public void loadInternals(final StreamableOperatorInternals internals) {
m_internals = (SimpleStreamableOperatorInternals) internals;
}
@Override
public void runFinal(final PortInput[] inputs, final PortOutput[] outputs, final ExecutionContext exec) throws Exception {
final RowInput in = (RowInput) inputs[0];
final RowOutput out = (RowOutput) outputs[0];
final DataTableSpec inSpec = in.getDataTableSpec();
final int[] includeIndexes = Arrays.stream(m_colSelect.applyTo(inSpec).getIncludes()).mapToInt(s -> inSpec.findColumnIndex(s)).toArray();
// read detected types from config
final DataType[] detectedTypes = new DataType[includeIndexes.length];
final Config config = m_internals.getConfig();
for (int i = 0; i < includeIndexes.length; i++) {
detectedTypes[i] = config.getDataType("detected_type" + i, null);
}
// compute every row
DataRow row;
while ((row = in.poll()) != null) {
exec.checkCanceled();
DataCell[] datacells = new DataCell[includeIndexes.length];
for (int i = 0; i < includeIndexes.length; i++) {
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
final StringToDurationPeriodCellFactory cellFac = new StringToDurationPeriodCellFactory(new DataColumnSpecCreator(inSpec.getColumnNames()[includeIndexes[i]], detectedTypes[i]).createSpec(), includeIndexes[i]);
datacells[i] = cellFac.getCells(row)[0];
} else {
final DataColumnSpec dataColSpec = new UniqueNameGenerator(inSpec).newColumn(inSpec.getColumnNames()[includeIndexes[i]] + m_suffix.getStringValue(), detectedTypes[i]);
final StringToDurationPeriodCellFactory cellFac = new StringToDurationPeriodCellFactory(dataColSpec, includeIndexes[i]);
datacells[i] = cellFac.getCells(row)[0];
}
}
if (m_isReplaceOrAppend.getStringValue().equals(OPTION_REPLACE)) {
out.push(new ReplacedColumnsDataRow(row, datacells, includeIndexes));
} else {
out.push(new AppendedColumnRow(row, datacells));
}
}
in.close();
out.close();
}
};
}
use of org.knime.core.node.config.Config in project knime-core by knime.
the class StringToDurationPeriodNodeModel method createMergeOperator.
@Override
public MergeOperator createMergeOperator() {
return new MergeOperator() {
@Override
public StreamableOperatorInternals mergeIntermediate(final StreamableOperatorInternals[] operators) {
final SimpleStreamableOperatorInternals internals = new SimpleStreamableOperatorInternals();
final Config config = internals.getConfig();
for (StreamableOperatorInternals operator : operators) {
final Config configToMerge = ((SimpleStreamableOperatorInternals) operator).getConfig();
final int sizeRow = configToMerge.getInt("sizeRow", -1);
config.addInt("sizeRow", sizeRow);
for (int i = 0; i < sizeRow; i++) {
if (!config.containsKey("type" + i) && configToMerge.getDataType("type" + i, null) != null) {
config.addDataType("type" + i, configToMerge.getDataType("type" + i, null));
config.addString("colname" + i, configToMerge.getString("colname" + i, null));
}
if (!config.containsKey("detected_type" + i) && configToMerge.getDataType("detected_type" + i, null) != null) {
config.addDataType("detected_type" + i, configToMerge.getDataType("detected_type" + i, null));
}
}
}
// if a column's type could not be detected, guess it to be a PeriodCell
final Config configToMerge = ((SimpleStreamableOperatorInternals) operators[0]).getConfig();
for (int i = 0; i < configToMerge.getInt("sizeRow", -1); i++) {
if (!config.containsKey("type" + i)) {
config.addDataType("type" + i, PeriodCellFactory.TYPE);
config.addString("colname" + i, configToMerge.getString("colname" + i, null));
}
if (!config.containsKey("detected_type" + i)) {
config.addDataType("detected_type" + i, PeriodCellFactory.TYPE);
}
}
return internals;
}
@Override
public StreamableOperatorInternals mergeFinal(final StreamableOperatorInternals[] operators) {
return null;
}
};
}
Aggregations