use of org.apache.commons.math.random.RandomData in project knime-core by knime.
the class TreeNumericColumnDataTest method testCalcBestSplitClassification.
@Test
public void testCalcBestSplitClassification() throws Exception {
TreeEnsembleLearnerConfiguration config = createConfig();
/* data from J. Fuernkranz, Uni Darmstadt:
* http://www.ke.tu-darmstadt.de/lehre/archiv/ws0809/mldm/dt.pdf */
final double[] data = asDataArray("60,70,75,85, 90, 95, 100,120,125,220");
final String[] target = asStringArray("No,No,No,Yes,Yes,Yes,No, No, No, No");
Pair<TreeOrdinaryNumericColumnData, TreeTargetNominalColumnData> exampleData = exampleData(config, data, target);
RandomData rd = config.createRandomData();
TreeNumericColumnData columnData = exampleData.getFirst();
TreeTargetNominalColumnData targetData = exampleData.getSecond();
assertEquals(SplitCriterion.Gini, config.getSplitCriterion());
double[] rowWeights = new double[data.length];
Arrays.fill(rowWeights, 1.0);
TreeData treeData = createTreeDataClassification(exampleData);
IDataIndexManager indexManager = new DefaultDataIndexManager(treeData);
DataMemberships dataMemberships = new RootDataMemberships(rowWeights, treeData, indexManager);
ClassificationPriors priors = targetData.getDistribution(rowWeights, config);
SplitCandidate splitCandidate = columnData.calcBestSplitClassification(dataMemberships, priors, targetData, rd);
assertNotNull(splitCandidate);
assertThat(splitCandidate, instanceOf(NumericSplitCandidate.class));
assertTrue(splitCandidate.canColumnBeSplitFurther());
// libre office calc
assertEquals(/*0.42 - 0.300 */
0.12, splitCandidate.getGainValue(), 0.00001);
NumericSplitCandidate numSplitCandidate = (NumericSplitCandidate) splitCandidate;
TreeNodeNumericCondition[] childConditions = numSplitCandidate.getChildConditions();
assertEquals(2, childConditions.length);
assertEquals((95.0 + 100.0) / 2.0, childConditions[0].getSplitValue(), 0.0);
assertEquals((95.0 + 100.0) / 2.0, childConditions[1].getSplitValue(), 0.0);
assertEquals(NumericOperator.LessThanOrEqual, childConditions[0].getNumericOperator());
assertEquals(NumericOperator.LargerThan, childConditions[1].getNumericOperator());
double[] childRowWeights = new double[data.length];
System.arraycopy(rowWeights, 0, childRowWeights, 0, rowWeights.length);
BitSet inChild = columnData.updateChildMemberships(childConditions[0], dataMemberships);
DataMemberships childMemberships = dataMemberships.createChildMemberships(inChild);
ClassificationPriors childTargetPriors = targetData.getDistribution(childMemberships, config);
SplitCandidate splitCandidateChild = columnData.calcBestSplitClassification(childMemberships, childTargetPriors, targetData, rd);
assertNotNull(splitCandidateChild);
assertThat(splitCandidateChild, instanceOf(NumericSplitCandidate.class));
// manually via libre office calc
assertEquals(0.5, splitCandidateChild.getGainValue(), 0.00001);
TreeNodeNumericCondition[] childConditions2 = ((NumericSplitCandidate) splitCandidateChild).getChildConditions();
assertEquals(2, childConditions2.length);
assertEquals((75.0 + 85.0) / 2.0, childConditions2[0].getSplitValue(), 0.0);
System.arraycopy(rowWeights, 0, childRowWeights, 0, rowWeights.length);
inChild = columnData.updateChildMemberships(childConditions[1], dataMemberships);
childMemberships = dataMemberships.createChildMemberships(inChild);
childTargetPriors = targetData.getDistribution(childMemberships, config);
splitCandidateChild = columnData.calcBestSplitClassification(childMemberships, childTargetPriors, targetData, rd);
assertNull(splitCandidateChild);
}
use of org.apache.commons.math.random.RandomData in project knime-core by knime.
the class TreeNumericColumnDataTest method testCalcBestSplitClassificationSplitAtEnd.
/**
* Test splits at last possible split position - even if no change in target can be observed, see example data in
* method body.
* @throws Exception
*/
@Test
public void testCalcBestSplitClassificationSplitAtEnd() throws Exception {
// Index: 1 2 3 4 5 6 7 8
// Value: 1 1|2 2 2|3 3 3
// Target: A A|A A A|A A B
double[] data = asDataArray("1,1,2,2,2,3,3,3");
String[] target = asStringArray("A,A,A,A,A,A,A,B");
TreeEnsembleLearnerConfiguration config = createConfig();
RandomData rd = config.createRandomData();
Pair<TreeOrdinaryNumericColumnData, TreeTargetNominalColumnData> exampleData = exampleData(config, data, target);
TreeNumericColumnData columnData = exampleData.getFirst();
TreeTargetNominalColumnData targetData = exampleData.getSecond();
double[] rowWeights = new double[data.length];
Arrays.fill(rowWeights, 1.0);
TreeData treeData = createTreeDataClassification(exampleData);
IDataIndexManager indexManager = new DefaultDataIndexManager(treeData);
DataMemberships dataMemberships = new RootDataMemberships(rowWeights, treeData, indexManager);
ClassificationPriors priors = targetData.getDistribution(rowWeights, config);
SplitCandidate splitCandidate = columnData.calcBestSplitClassification(dataMemberships, priors, targetData, rd);
assertNotNull(splitCandidate);
assertThat(splitCandidate, instanceOf(NumericSplitCandidate.class));
assertTrue(splitCandidate.canColumnBeSplitFurther());
// manually calculated
assertEquals(/*0.21875 - 0.166666667 */
0.05208, splitCandidate.getGainValue(), 0.001);
NumericSplitCandidate numSplitCandidate = (NumericSplitCandidate) splitCandidate;
TreeNodeNumericCondition[] childConditions = numSplitCandidate.getChildConditions();
assertEquals(2, childConditions.length);
assertEquals((2.0 + 3.0) / 2.0, childConditions[0].getSplitValue(), 0.0);
assertEquals(NumericOperator.LessThanOrEqual, childConditions[0].getNumericOperator());
double[] childRowWeights = new double[data.length];
System.arraycopy(rowWeights, 0, childRowWeights, 0, rowWeights.length);
BitSet inChild = columnData.updateChildMemberships(childConditions[0], dataMemberships);
DataMemberships childMemberships = dataMemberships.createChildMemberships(inChild);
ClassificationPriors childTargetPriors = targetData.getDistribution(childMemberships, config);
SplitCandidate splitCandidateChild = columnData.calcBestSplitClassification(childMemberships, childTargetPriors, targetData, rd);
assertNull(splitCandidateChild);
System.arraycopy(rowWeights, 0, childRowWeights, 0, rowWeights.length);
inChild = columnData.updateChildMemberships(childConditions[1], dataMemberships);
childMemberships = dataMemberships.createChildMemberships(inChild);
childTargetPriors = targetData.getDistribution(childMemberships, config);
splitCandidateChild = columnData.calcBestSplitClassification(childMemberships, childTargetPriors, targetData, null);
assertNull(splitCandidateChild);
}
use of org.apache.commons.math.random.RandomData in project knime-core by knime.
the class TreeNumericColumnDataTest method testCalcBestSplitClassificationMissingValStrategy1.
/**
* This test is outdated and will likely be removed soon.
*
* @throws Exception
*/
// @Test
public void testCalcBestSplitClassificationMissingValStrategy1() throws Exception {
TreeEnsembleLearnerConfiguration config = createConfig();
final double[] data = asDataArray("1, 2, 3, 4, 5, 6, 7, NaN, NaN, NaN");
final String[] target = asStringArray("Y, Y, Y, Y, N, N, N, Y, Y, Y");
Pair<TreeOrdinaryNumericColumnData, TreeTargetNominalColumnData> exampleData = exampleData(config, data, target);
double[] rowWeights = new double[data.length];
Arrays.fill(rowWeights, 1.0);
RandomData rd = config.createRandomData();
TreeNumericColumnData columnData = exampleData.getFirst();
TreeTargetNominalColumnData targetData = exampleData.getSecond();
TreeData treeData = createTreeDataClassification(exampleData);
IDataIndexManager indexManager = new DefaultDataIndexManager(treeData);
DataMemberships dataMemberships = new RootDataMemberships(rowWeights, treeData, indexManager);
ClassificationPriors priors = targetData.getDistribution(rowWeights, config);
SplitCandidate splitCandidate = columnData.calcBestSplitClassification(dataMemberships, priors, targetData, rd);
assertNotNull(splitCandidate);
assertThat(splitCandidate, instanceOf(NumericMissingSplitCandidate.class));
assertTrue(splitCandidate.canColumnBeSplitFurther());
assertEquals(0.42, splitCandidate.getGainValue(), 0.0001);
TreeNodeNumericCondition[] childConditions = ((NumericMissingSplitCandidate) splitCandidate).getChildConditions();
assertEquals(2, childConditions.length);
assertEquals(NumericOperator.LessThanOrEqualOrMissing, childConditions[0].getNumericOperator());
assertEquals(NumericOperator.LargerThan, childConditions[1].getNumericOperator());
assertEquals(4.5, childConditions[0].getSplitValue(), 0.0);
}
use of org.apache.commons.math.random.RandomData in project knime-core by knime.
the class TreeEnsembleLearner method learnEnsemble.
public TreeEnsembleModel learnEnsemble(final ExecutionMonitor exec) throws CanceledExecutionException, ExecutionException {
final int nrModels = m_config.getNrModels();
final RandomData rd = m_config.createRandomData();
final ThreadPool tp = KNIMEConstants.GLOBAL_THREAD_POOL;
final AtomicReference<Throwable> learnThrowableRef = new AtomicReference<Throwable>();
@SuppressWarnings("unchecked") final Future<TreeLearnerResult>[] modelFutures = new Future[nrModels];
final int procCount = 3 * Runtime.getRuntime().availableProcessors() / 2;
final Semaphore semaphore = new Semaphore(procCount);
Callable<TreeLearnerResult[]> learnCallable = new Callable<TreeLearnerResult[]>() {
@Override
public TreeLearnerResult[] call() throws Exception {
final TreeLearnerResult[] results = new TreeLearnerResult[nrModels];
for (int i = 0; i < nrModels; i++) {
semaphore.acquire();
finishedTree(i - procCount, exec);
checkThrowable(learnThrowableRef);
RandomData rdSingle = TreeEnsembleLearnerConfiguration.createRandomData(rd.nextLong(Long.MIN_VALUE, Long.MAX_VALUE));
ExecutionMonitor subExec = exec.createSubProgress(0.0);
modelFutures[i] = tp.enqueue(new TreeLearnerCallable(subExec, rdSingle, learnThrowableRef, semaphore));
}
for (int i = 0; i < procCount; i++) {
semaphore.acquire();
finishedTree(nrModels - 1 + i - procCount, exec);
}
for (int i = 0; i < nrModels; i++) {
try {
results[i] = modelFutures[i].get();
} catch (Exception e) {
learnThrowableRef.compareAndSet(null, e);
}
}
return results;
}
private void finishedTree(final int treeIndex, final ExecutionMonitor progMon) {
if (treeIndex > 0) {
progMon.setProgress(treeIndex / (double) nrModels, "Tree " + treeIndex + "/" + nrModels);
}
}
};
TreeLearnerResult[] modelResults = tp.runInvisible(learnCallable);
checkThrowable(learnThrowableRef);
AbstractTreeModel[] models = new AbstractTreeModel[nrModels];
m_rowSamples = new RowSample[nrModels];
m_columnSampleStrategies = new ColumnSampleStrategy[nrModels];
for (int i = 0; i < nrModels; i++) {
models[i] = modelResults[i].m_treeModel;
m_rowSamples[i] = modelResults[i].m_rowSample;
m_columnSampleStrategies[i] = modelResults[i].m_rootColumnSampleStrategy;
}
m_ensembleModel = new TreeEnsembleModel(m_config, m_data.getMetaData(), models, m_data.getTreeType());
return m_ensembleModel;
}
use of org.apache.commons.math.random.RandomData in project knime-core by knime.
the class RegressionTreeLearnerNodeModel method execute.
// /**
// * @param ensembleSpec
// * @param ensembleModel
// * @param inSpec
// * @return
// * @throws InvalidSettingsException
// */
// private TreeEnsemblePredictor createOutOfBagPredictor(final TreeEnsembleModelPortObjectSpec ensembleSpec,
// final TreeEnsembleModelPortObject ensembleModel, final DataTableSpec inSpec) throws InvalidSettingsException {
// TreeEnsemblePredictorConfiguration ooBConfig = new TreeEnsemblePredictorConfiguration(true);
// String targetColumn = m_configuration.getTargetColumn();
// String append = targetColumn + " (Out-of-bag)";
// ooBConfig.setPredictionColumnName(append);
// ooBConfig.setAppendPredictionConfidence(true);
// ooBConfig.setAppendClassConfidences(true);
// ooBConfig.setAppendModelCount(true);
// return new TreeEnsemblePredictor(ensembleSpec, ensembleModel, inSpec, ooBConfig);
// }
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
BufferedDataTable t = (BufferedDataTable) inObjects[0];
DataTableSpec spec = t.getDataTableSpec();
final FilterLearnColumnRearranger learnRearranger = m_configuration.filterLearnColumns(spec);
String warn = learnRearranger.getWarning();
BufferedDataTable learnTable = exec.createColumnRearrangeTable(t, learnRearranger, exec.createSubProgress(0.0));
DataTableSpec learnSpec = learnTable.getDataTableSpec();
TreeEnsembleModelPortObjectSpec ensembleSpec = m_configuration.createPortObjectSpec(learnSpec);
ExecutionMonitor readInExec = exec.createSubProgress(0.1);
ExecutionMonitor learnExec = exec.createSubProgress(0.9);
TreeDataCreator dataCreator = new TreeDataCreator(m_configuration, learnSpec, learnTable.getRowCount());
exec.setProgress("Reading data into memory");
TreeData data = dataCreator.readData(learnTable, m_configuration, readInExec);
m_hiliteRowSample = dataCreator.getDataRowsForHilite();
m_viewMessage = dataCreator.getViewMessage();
String dataCreationWarning = dataCreator.getAndClearWarningMessage();
if (dataCreationWarning != null) {
if (warn == null) {
warn = dataCreationWarning;
} else {
warn = warn + "\n" + dataCreationWarning;
}
}
readInExec.setProgress(1.0);
exec.setMessage("Learning tree");
// TreeEnsembleLearner learner = new TreeEnsembleLearner(m_configuration, data);
// TreeEnsembleModel model;
// try {
// model = learner.learnEnsemble(learnExec);
// } catch (ExecutionException e) {
// Throwable cause = e.getCause();
// if (cause instanceof Exception) {
// throw (Exception)cause;
// }
// throw e;
// }
RandomData rd = m_configuration.createRandomData();
TreeLearnerRegression treeLearner = new TreeLearnerRegression(m_configuration, data, rd);
TreeModelRegression regTree = treeLearner.learnSingleTree(learnExec, rd);
RegressionTreeModel model = new RegressionTreeModel(m_configuration, data.getMetaData(), regTree, data.getTreeType());
RegressionTreeModelPortObjectSpec treePortObjectSpec = new RegressionTreeModelPortObjectSpec(learnSpec);
RegressionTreeModelPortObject treePortObject = new RegressionTreeModelPortObject(model, treePortObjectSpec);
learnExec.setProgress(1.0);
m_treeModelPortObject = treePortObject;
if (warn != null) {
setWarningMessage(warn);
}
return new PortObject[] { treePortObject };
}
Aggregations