use of org.knime.core.data.DoubleValue in project knime-core by knime.
the class FuzzyClusterNodeModel method execute.
/**
* Generate new clustering based on InputDataTable and specified number of
* clusters. In the output table, you will find the datarow with
* supplementary information about the membership to each cluster center.
* OUTPORT = original datarows with cluster membership information
*
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inData, final ExecutionContext exec) throws Exception {
BufferedDataTable indata = (BufferedDataTable) inData[0];
m_clusters = null;
m_betweenClusterVariation = Double.NaN;
m_withinClusterVariation = null;
if (m_noise) {
if (m_calculateDelta) {
if (m_memory) {
m_fcmAlgo = new FCMAlgorithmMemory(m_nrClusters, m_fuzzifier, m_calculateDelta, m_lambda);
} else {
m_fcmAlgo = new FCMAlgorithm(m_nrClusters, m_fuzzifier, m_calculateDelta, m_lambda);
}
} else {
if (m_memory) {
m_fcmAlgo = new FCMAlgorithmMemory(m_nrClusters, m_fuzzifier, m_calculateDelta, m_delta);
} else {
m_fcmAlgo = new FCMAlgorithm(m_nrClusters, m_fuzzifier, m_calculateDelta, m_delta);
}
}
} else {
if (m_memory) {
m_fcmAlgo = new FCMAlgorithmMemory(m_nrClusters, m_fuzzifier);
} else {
m_fcmAlgo = new FCMAlgorithm(m_nrClusters, m_fuzzifier);
}
}
int nrRows = indata.getRowCount();
DataTableSpec spec = indata.getDataTableSpec();
int nrCols = spec.getNumColumns();
List<String> learningCols = new LinkedList<String>();
List<String> ignoreCols = new LinkedList<String>();
// counter for included columns
int z = 0;
final int[] columns = new int[m_list.size()];
for (int i = 0; i < nrCols; i++) {
// if include does contain current column name
String colname = spec.getColumnSpec(i).getName();
if (m_list.contains(colname)) {
columns[z] = i;
z++;
learningCols.add(colname);
} else {
ignoreCols.add(colname);
}
}
ColumnRearranger colre = new ColumnRearranger(spec);
colre.keepOnly(columns);
BufferedDataTable filteredtable = exec.createColumnRearrangeTable(indata, colre, exec);
// get dimension of feature space
int dimension = filteredtable.getDataTableSpec().getNumColumns();
Random random = new Random();
if (m_useRandomSeed) {
random.setSeed(m_randomSeed);
}
m_fcmAlgo.init(nrRows, dimension, filteredtable, random);
// main loop - until clusters stop changing or maxNrIterations reached
int currentIteration = 0;
double totalchange = Double.MAX_VALUE;
while ((totalchange > 1e-7) && (currentIteration < m_maxNrIterations)) {
exec.checkCanceled();
exec.setProgress((double) currentIteration / (double) m_maxNrIterations, "Iteration " + currentIteration + " Total change of prototypes: " + totalchange);
totalchange = m_fcmAlgo.doOneIteration(exec);
currentIteration++;
}
if (m_measures) {
double[][] data = null;
if (m_fcmAlgo instanceof FCMAlgorithmMemory) {
data = ((FCMAlgorithmMemory) m_fcmAlgo).getConvertedData();
} else {
data = new double[nrRows][m_fcmAlgo.getDimension()];
int curRow = 0;
for (DataRow dRow : filteredtable) {
for (int j = 0; j < dRow.getNumCells(); j++) {
if (!(dRow.getCell(j).isMissing())) {
DoubleValue dv = (DoubleValue) dRow.getCell(j);
data[curRow][j] = dv.getDoubleValue();
} else {
data[curRow][j] = 0;
}
}
curRow++;
}
}
m_fcmmeasures = new FCMQualityMeasures(m_fcmAlgo.getClusterCentres(), m_fcmAlgo.getweightMatrix(), data, m_fuzzifier);
}
ColumnRearranger colRearranger = new ColumnRearranger(spec);
CellFactory membershipFac = new ClusterMembershipFactory(m_fcmAlgo);
colRearranger.append(membershipFac);
BufferedDataTable result = exec.createColumnRearrangeTable(indata, colRearranger, exec);
// don't write out the noise cluster!
double[][] clustercentres = m_fcmAlgo.getClusterCentres();
if (m_noise) {
double[][] cleaned = new double[clustercentres.length - 1][];
for (int i = 0; i < cleaned.length; i++) {
cleaned[i] = new double[clustercentres[i].length];
System.arraycopy(clustercentres[i], 0, cleaned[i], 0, clustercentres[i].length);
}
clustercentres = cleaned;
}
exec.setMessage("Creating PMML cluster model...");
// handle the optional PMML input
PMMLPortObject inPMMLPort = m_enablePMMLInput ? (PMMLPortObject) inData[1] : null;
PMMLPortObjectSpec inPMMLSpec = null;
if (inPMMLPort != null) {
inPMMLSpec = inPMMLPort.getSpec();
}
PMMLPortObjectSpec pmmlOutSpec = createPMMLPortObjectSpec(inPMMLSpec, spec, learningCols);
PMMLPortObject outPMMLPort = new PMMLPortObject(pmmlOutSpec, inPMMLPort, spec);
outPMMLPort.addModelTranslater(new PMMLClusterTranslator(ComparisonMeasure.squaredEuclidean, m_nrClusters, clustercentres, null, new LinkedHashSet<String>(pmmlOutSpec.getLearningFields())));
return new PortObject[] { result, outPMMLPort };
}
use of org.knime.core.data.DoubleValue in project knime-core by knime.
the class MDSProjectionManager method preprocFixedDataPoints.
/**
* Initializes for each of the fixed data points a point in the
* target space. Which of the columns of the data table containing the
* fixed points have to be considered (according to the non fixed points)
* is specified by the given array of indices.
*
* @param fixedDataMdsIndices The indices specifying the columns of
* the data table containing the fixed data points, to consider.
* @throws CanceledExecutionException If the process is canceled.
*/
protected void preprocFixedDataPoints(final int[] fixedDataMdsIndices) throws CanceledExecutionException {
m_exec.setMessage("Preprocessing fixed data points");
// sort indices
Arrays.sort(fixedDataMdsIndices);
RowIterator it = m_fixedDataPoints.iterator();
while (it.hasNext()) {
m_exec.checkCanceled();
DataRow row = it.next();
DataPoint p = new DataPoint(m_dimension);
for (int i = 0; i < m_dimension; i++) {
final DataCell cell = row.getCell(fixedDataMdsIndices[i]);
if (!cell.isMissing()) {
final Double d = ((DoubleValue) cell).getDoubleValue();
p.setElementAt(i, d);
}
}
m_fixedPoints.put(row.getKey(), p);
}
}
use of org.knime.core.data.DoubleValue in project knime-core by knime.
the class DecisionTreeNodeSplitContinuous method addCoveredPattern.
/**
* Add patterns given as a row of values if they fall within a specific
* node. This node simply forwards this request to the appropriate child.
*
* @param cell the cell to be used for the split at this level
* @param row input pattern
* @param spec the corresponding table spec
* @param weight the weight of the row (between 0.0 and 1.0)
* @throws Exception if something went wrong (unknown attribute for example)
*/
@Override
public void addCoveredPattern(final DataCell cell, final DataRow row, final DataTableSpec spec, final double weight) throws Exception {
double value = ((DoubleValue) cell).getDoubleValue();
if (value <= m_threshold) {
super.getChildNodeAt(0).addCoveredPattern(row, spec, weight);
} else {
super.getChildNodeAt(1).addCoveredPattern(row, spec, weight);
}
Color col = spec.getRowColor(row).getColor();
addColorToMap(col, weight);
}
use of org.knime.core.data.DoubleValue in project knime-core by knime.
the class DecisionTreeNodeSplitContinuous method addCoveredColor.
/**
* Add colors for a pattern given as a row of values. This is a leaf so we
* will simply add the color to our list.
*
* @param cell the cell to be used for the split at this level
* @param row input pattern
* @param spec the corresponding table spec
* @param weight the weight of the row (between 0.0 and 1.0)
* @throws Exception if something went wrong (unknown attribute for example)
*/
@Override
public void addCoveredColor(final DataCell cell, final DataRow row, final DataTableSpec spec, final double weight) throws Exception {
double value = ((DoubleValue) cell).getDoubleValue();
if (value <= m_threshold) {
super.getChildNodeAt(0).addCoveredColor(row, spec, weight);
} else {
super.getChildNodeAt(1).addCoveredColor(row, spec, weight);
}
Color col = spec.getRowColor(row).getColor();
addColorToMap(col, weight);
}
use of org.knime.core.data.DoubleValue in project knime-core by knime.
the class RPropNodeModel method configure.
/**
* returns null.
*
* {@inheritDoc}
*/
@Override
protected PortObjectSpec[] configure(final PortObjectSpec[] inSpecs) throws InvalidSettingsException {
if (m_classcol.getStringValue() != null) {
List<String> learningCols = new LinkedList<String>();
List<String> targetCols = new LinkedList<String>();
boolean classcolinspec = false;
for (DataColumnSpec colspec : (DataTableSpec) inSpecs[INDATA]) {
if (!(colspec.getName().toString().compareTo(m_classcol.getStringValue()) == 0)) {
if (!colspec.getType().isCompatible(DoubleValue.class)) {
throw new InvalidSettingsException("Only double columns for input");
} else {
learningCols.add(colspec.getName());
DataColumnDomain domain = colspec.getDomain();
if (domain.hasBounds()) {
double lower = ((DoubleValue) domain.getLowerBound()).getDoubleValue();
double upper = ((DoubleValue) domain.getUpperBound()).getDoubleValue();
if (lower < 0 || upper > 1) {
setWarningMessage("Input data not normalized." + " Please consider using the " + "Normalizer Node first.");
}
}
}
} else {
targetCols.add(colspec.getName());
classcolinspec = true;
// TODO: Check what happens to other values than double
if (colspec.getType().isCompatible(DoubleValue.class)) {
// check if the values are in range [0,1]
DataColumnDomain domain = colspec.getDomain();
if (domain.hasBounds()) {
double lower = ((DoubleValue) domain.getLowerBound()).getDoubleValue();
double upper = ((DoubleValue) domain.getUpperBound()).getDoubleValue();
if (lower < 0 || upper > 1) {
throw new InvalidSettingsException("Domain range for regression in column " + colspec.getName() + " not in range [0,1]");
}
}
}
}
}
if (!classcolinspec) {
throw new InvalidSettingsException("Class column " + m_classcol.getStringValue() + " not found in DataTableSpec");
}
return new PortObjectSpec[] { createPMMLPortObjectSpec(m_pmmlInEnabled ? (PMMLPortObjectSpec) inSpecs[1] : null, (DataTableSpec) inSpecs[0], learningCols, targetCols) };
} else {
throw new InvalidSettingsException("Class column not set");
}
}
Aggregations