Search in sources :

Example 6 with RandomData

use of org.apache.commons.math.random.RandomData in project knime-core by knime.

the class LKGradientBoostedTreesLearner method learn.

/**
 * {@inheritDoc}
 *
 * @throws ExecutionException
 * @throws InterruptedException
 */
@Override
public MultiClassGradientBoostedTreesModel learn(final ExecutionMonitor exec) throws CanceledExecutionException, InterruptedException, ExecutionException {
    final TreeData data = getData();
    final TreeTargetNominalColumnData target = (TreeTargetNominalColumnData) data.getTargetColumn();
    final NominalValueRepresentation[] classNomVals = target.getMetaData().getValues();
    final int numClasses = classNomVals.length;
    final String[] classLabels = new String[numClasses];
    final int nrModels = getConfig().getNrModels();
    final int nrRows = target.getNrRows();
    final TreeModelRegression[][] models = new TreeModelRegression[nrModels][numClasses];
    final ArrayList<ArrayList<Map<TreeNodeSignature, Double>>> coefficientMaps = new ArrayList<ArrayList<Map<TreeNodeSignature, Double>>>(nrModels);
    // variables for parallelization
    final ThreadPool tp = KNIMEConstants.GLOBAL_THREAD_POOL;
    final AtomicReference<Throwable> learnThrowableRef = new AtomicReference<Throwable>();
    final int procCount = 3 * Runtime.getRuntime().availableProcessors() / 2;
    exec.setMessage("Transforming problem");
    // transform the original k class classification problem into k regression problems
    final TreeData[] actual = new TreeData[numClasses];
    for (int i = 0; i < numClasses; i++) {
        final double[] newTarget = calculateNewTarget(target, i);
        actual[i] = createNumericDataFromArray(newTarget);
        classLabels[i] = classNomVals[i].getNominalValue();
    }
    final RandomData rd = getConfig().createRandomData();
    final double[][] previousFunctions = new double[numClasses][nrRows];
    TreeNodeSignatureFactory signatureFactory = null;
    final int maxLevels = getConfig().getMaxLevels();
    if (maxLevels < TreeEnsembleLearnerConfiguration.MAX_LEVEL_INFINITE) {
        int capacity = IntMath.pow(2, maxLevels - 1);
        signatureFactory = new TreeNodeSignatureFactory(capacity);
    } else {
        signatureFactory = new TreeNodeSignatureFactory();
    }
    exec.setMessage("Learn trees");
    for (int i = 0; i < nrModels; i++) {
        final Semaphore semaphore = new Semaphore(procCount);
        final ArrayList<Map<TreeNodeSignature, Double>> classCoefficientMaps = new ArrayList<Map<TreeNodeSignature, Double>>(numClasses);
        // prepare calculation of pseudoResiduals
        final double[][] probs = new double[numClasses][nrRows];
        for (int r = 0; r < nrRows; r++) {
            double sumExpF = 0;
            for (int j = 0; j < numClasses; j++) {
                sumExpF += Math.exp(previousFunctions[j][r]);
            }
            for (int j = 0; j < numClasses; j++) {
                probs[j][r] = Math.exp(previousFunctions[j][r]) / sumExpF;
            }
        }
        final Future<?>[] treeCoefficientMapPairs = new Future<?>[numClasses];
        for (int j = 0; j < numClasses; j++) {
            checkThrowable(learnThrowableRef);
            final RandomData rdSingle = TreeEnsembleLearnerConfiguration.createRandomData(rd.nextLong(Long.MIN_VALUE, Long.MAX_VALUE));
            final ExecutionMonitor subExec = exec.createSubProgress(0.0);
            semaphore.acquire();
            treeCoefficientMapPairs[j] = tp.enqueue(new TreeLearnerCallable(rdSingle, probs[j], actual[j], subExec, numClasses, previousFunctions[j], semaphore, learnThrowableRef, signatureFactory));
        }
        for (int j = 0; j < numClasses; j++) {
            checkThrowable(learnThrowableRef);
            semaphore.acquire();
            final Pair<TreeModelRegression, Map<TreeNodeSignature, Double>> pair = (Pair<TreeModelRegression, Map<TreeNodeSignature, Double>>) treeCoefficientMapPairs[j].get();
            models[i][j] = pair.getFirst();
            classCoefficientMaps.add(pair.getSecond());
            semaphore.release();
        }
        checkThrowable(learnThrowableRef);
        coefficientMaps.add(classCoefficientMaps);
        exec.setProgress((double) i / nrModels, "Finished level " + i + "/" + nrModels);
    }
    return MultiClassGradientBoostedTreesModel.createMultiClassGradientBoostedTreesModel(getConfig(), data.getMetaData(), models, data.getTreeType(), 0, numClasses, coefficientMaps, classLabels);
}
Also used : RandomData(org.apache.commons.math.random.RandomData) ArrayList(java.util.ArrayList) ThreadPool(org.knime.core.util.ThreadPool) NominalValueRepresentation(org.knime.base.node.mine.treeensemble2.data.NominalValueRepresentation) Semaphore(java.util.concurrent.Semaphore) TreeNodeSignature(org.knime.base.node.mine.treeensemble2.model.TreeNodeSignature) TreeModelRegression(org.knime.base.node.mine.treeensemble2.model.TreeModelRegression) ExecutionMonitor(org.knime.core.node.ExecutionMonitor) TreeNodeSignatureFactory(org.knime.base.node.mine.treeensemble2.learner.TreeNodeSignatureFactory) Pair(org.knime.core.util.Pair) AtomicReference(java.util.concurrent.atomic.AtomicReference) Future(java.util.concurrent.Future) TreeData(org.knime.base.node.mine.treeensemble2.data.TreeData) HashMap(java.util.HashMap) Map(java.util.Map) TreeTargetNominalColumnData(org.knime.base.node.mine.treeensemble2.data.TreeTargetNominalColumnData)

Example 7 with RandomData

use of org.apache.commons.math.random.RandomData in project knime-core by knime.

the class TreeNominalColumnDataTest method testCalcBestSplitRegressionBinaryXGBoostMissingValueHandling.

/**
 * Tests the XGBoost missing value handling in case of a regression with binary splits.
 *
 * @throws Exception
 */
@Test
public void testCalcBestSplitRegressionBinaryXGBoostMissingValueHandling() throws Exception {
    final TreeEnsembleLearnerConfiguration config = createConfig(true);
    config.setMissingValueHandling(MissingValueHandling.XGBoost);
    final TestDataGenerator dataGen = new TestDataGenerator(config);
    final String noMissingCSV = "A, A, A, B, B, B, B, C, C";
    final String noMissingsTarget = "1, 2, 2, 7, 6, 5, 2, 3, 1";
    TreeNominalColumnData dataCol = dataGen.createNominalAttributeColumn(noMissingCSV, "noMissings", 0);
    TreeTargetNumericColumnData targetCol = TestDataGenerator.createNumericTargetColumn(noMissingsTarget);
    double[] weights = new double[9];
    Arrays.fill(weights, 1.0);
    int[] indices = new int[9];
    for (int i = 0; i < indices.length; i++) {
        indices[i] = i;
    }
    final RandomData rd = config.createRandomData();
    DataMemberships dataMemberships = new MockDataColMem(indices, indices, weights);
    // first test the case that there are no missing values during training (we still need to provide a missing value direction for prediction)
    SplitCandidate split = dataCol.calcBestSplitRegression(dataMemberships, targetCol.getPriors(weights, config), targetCol, rd);
    assertNotNull("SplitCandidate may not be null", split);
    assertThat(split, instanceOf(NominalBinarySplitCandidate.class));
    assertEquals("Wrong gain.", 22.755555, split.getGainValue(), 1e-5);
    assertTrue("No missing values in dataCol therefore the missedRows BitSet must be empty.", split.getMissedRows().isEmpty());
    NominalBinarySplitCandidate nomSplit = (NominalBinarySplitCandidate) split;
    TreeNodeNominalBinaryCondition[] conditions = nomSplit.getChildConditions();
    assertEquals("Binary split candidate must have two children.", 2, conditions.length);
    final String[] values = new String[] { "A", "C" };
    assertArrayEquals("Wrong values in split condition.", values, conditions[0].getValues());
    assertArrayEquals("Wrong values in split condition.", values, conditions[1].getValues());
    assertFalse("Missings should go with majority", conditions[0].acceptsMissings());
    assertTrue("Missings should go with majority", conditions[1].acceptsMissings());
    assertEquals("Wrong set logic.", SetLogic.IS_NOT_IN, conditions[0].getSetLogic());
    assertEquals("Wrong set logic.", SetLogic.IS_IN, conditions[1].getSetLogic());
    // test the case that there are missing values during training
    final String missingCSV = "A, A, A, B, B, B, B, C, C, ?";
    final String missingTarget = "1, 2, 2, 7, 6, 5, 2, 3, 1, 8";
    dataCol = dataGen.createNominalAttributeColumn(missingCSV, "missing", 0);
    targetCol = TestDataGenerator.createNumericTargetColumn(missingTarget);
    weights = new double[10];
    Arrays.fill(weights, 1.0);
    indices = new int[10];
    for (int i = 0; i < indices.length; i++) {
        indices[i] = i;
    }
    dataMemberships = new MockDataColMem(indices, indices, weights);
    split = dataCol.calcBestSplitRegression(dataMemberships, targetCol.getPriors(weights, config), targetCol, rd);
    assertNotNull("SplitCandidate may not be null.", split);
    assertThat(split, instanceOf(NominalBinarySplitCandidate.class));
    assertEquals("Wrong gain.", 36.1, split.getGainValue(), 1e-5);
    assertTrue("Conditions should handle missing values therefore the missedRows BitSet must be empty.", split.getMissedRows().isEmpty());
    nomSplit = (NominalBinarySplitCandidate) split;
    conditions = nomSplit.getChildConditions();
    assertEquals("Binary split candidate must have two children.", 2, conditions.length);
    assertArrayEquals("Wrong values in split condition.", values, conditions[0].getValues());
    assertArrayEquals("Wrong values in split condition.", values, conditions[1].getValues());
    assertTrue("Missings should go with B (because there target values are similar)", conditions[0].acceptsMissings());
    assertFalse("Missings should go with B (because there target values are similar)", conditions[1].acceptsMissings());
    assertEquals("Wrong set logic.", SetLogic.IS_NOT_IN, conditions[0].getSetLogic());
    assertEquals("Wrong set logic.", SetLogic.IS_IN, conditions[1].getSetLogic());
}
Also used : TreeEnsembleLearnerConfiguration(org.knime.base.node.mine.treeensemble2.node.learner.TreeEnsembleLearnerConfiguration) RandomData(org.apache.commons.math.random.RandomData) NominalMultiwaySplitCandidate(org.knime.base.node.mine.treeensemble2.learner.NominalMultiwaySplitCandidate) NominalBinarySplitCandidate(org.knime.base.node.mine.treeensemble2.learner.NominalBinarySplitCandidate) SplitCandidate(org.knime.base.node.mine.treeensemble2.learner.SplitCandidate) DataMemberships(org.knime.base.node.mine.treeensemble2.data.memberships.DataMemberships) RootDataMemberships(org.knime.base.node.mine.treeensemble2.data.memberships.RootDataMemberships) TreeNodeNominalBinaryCondition(org.knime.base.node.mine.treeensemble2.model.TreeNodeNominalBinaryCondition) NominalBinarySplitCandidate(org.knime.base.node.mine.treeensemble2.learner.NominalBinarySplitCandidate) Test(org.junit.Test)

Example 8 with RandomData

use of org.apache.commons.math.random.RandomData in project knime-core by knime.

the class TreeNominalColumnDataTest method testCalcBestSplitRegressionMultiwayXGBoostMissingValueHandling.

/**
 * This method tests the XGBoost missing value handling in case of a regression task and multiway splits.
 *
 * @throws Exception
 */
@Test
public void testCalcBestSplitRegressionMultiwayXGBoostMissingValueHandling() throws Exception {
    final TreeEnsembleLearnerConfiguration config = createConfig(true);
    config.setMissingValueHandling(MissingValueHandling.XGBoost);
    config.setUseBinaryNominalSplits(false);
    final TestDataGenerator dataGen = new TestDataGenerator(config);
    final String noMissingCSV = "A, A, A, B, B, B, B, C, C";
    final String noMissingsTarget = "1, 2, 2, 7, 6, 5, 2, 3, 1";
    TreeNominalColumnData dataCol = dataGen.createNominalAttributeColumn(noMissingCSV, "noMissings", 0);
    TreeTargetNumericColumnData targetCol = TestDataGenerator.createNumericTargetColumn(noMissingsTarget);
    double[] weights = new double[9];
    Arrays.fill(weights, 1.0);
    int[] indices = new int[9];
    for (int i = 0; i < indices.length; i++) {
        indices[i] = i;
    }
    final RandomData rd = config.createRandomData();
    DataMemberships dataMemberships = new MockDataColMem(indices, indices, weights);
    // first test the case that there are no missing values during training (we still need to provide a missing value direction for prediction)
    SplitCandidate split = dataCol.calcBestSplitRegression(dataMemberships, targetCol.getPriors(weights, config), targetCol, rd);
    assertNotNull("SplitCandidate may not be null", split);
    assertThat(split, instanceOf(NominalMultiwaySplitCandidate.class));
    assertEquals("Wrong gain.", 22.888888, split.getGainValue(), 1e-5);
    assertTrue("No missing values in dataCol therefore the missedRows BitSet must be empty.", split.getMissedRows().isEmpty());
    NominalMultiwaySplitCandidate nomSplit = (NominalMultiwaySplitCandidate) split;
    TreeNodeNominalCondition[] conditions = nomSplit.getChildConditions();
    assertEquals("3 nominal values therefore there must be 3 children.", 3, conditions.length);
    assertEquals("Wrong value.", "A", conditions[0].getValue());
    assertEquals("Wrong value.", "B", conditions[1].getValue());
    assertEquals("Wrong value.", "C", conditions[2].getValue());
    assertFalse("Missings should go with majority", conditions[0].acceptsMissings());
    assertTrue("Missings should go with majority", conditions[1].acceptsMissings());
    assertFalse("Missings should go with majority", conditions[2].acceptsMissings());
    // test the case that there are missing values during training
    final String missingCSV = "A, A, A, B, B, B, B, C, C, ?";
    final String missingTarget = "1, 2, 2, 7, 6, 5, 2, 3, 1, 8";
    dataCol = dataGen.createNominalAttributeColumn(missingCSV, "missing", 0);
    targetCol = TestDataGenerator.createNumericTargetColumn(missingTarget);
    weights = new double[10];
    Arrays.fill(weights, 1.0);
    indices = new int[10];
    for (int i = 0; i < indices.length; i++) {
        indices[i] = i;
    }
    dataMemberships = new MockDataColMem(indices, indices, weights);
    split = dataCol.calcBestSplitRegression(dataMemberships, targetCol.getPriors(weights, config), targetCol, rd);
    assertNotNull("SplitCandidate may not be null.", split);
    assertThat(split, instanceOf(NominalMultiwaySplitCandidate.class));
    // assertEquals("Wrong gain.", 36.233333333, split.getGainValue(), 1e-5);
    assertTrue("Conditions should handle missing values therefore the missedRows BitSet must be empty.", split.getMissedRows().isEmpty());
    nomSplit = (NominalMultiwaySplitCandidate) split;
    conditions = nomSplit.getChildConditions();
    assertEquals("3 values (not counting missing values) therefore there must be 3 children.", 3, conditions.length);
    assertEquals("Wrong value.", "A", conditions[0].getValue());
    assertEquals("Wrong value.", "B", conditions[1].getValue());
    assertEquals("Wrong value.", "C", conditions[2].getValue());
    assertFalse("Missings should go with majority", conditions[0].acceptsMissings());
    assertTrue("Missings should go with majority", conditions[1].acceptsMissings());
    assertFalse("Missings should go with majority", conditions[2].acceptsMissings());
}
Also used : TreeEnsembleLearnerConfiguration(org.knime.base.node.mine.treeensemble2.node.learner.TreeEnsembleLearnerConfiguration) RandomData(org.apache.commons.math.random.RandomData) TreeNodeNominalCondition(org.knime.base.node.mine.treeensemble2.model.TreeNodeNominalCondition) NominalMultiwaySplitCandidate(org.knime.base.node.mine.treeensemble2.learner.NominalMultiwaySplitCandidate) NominalBinarySplitCandidate(org.knime.base.node.mine.treeensemble2.learner.NominalBinarySplitCandidate) SplitCandidate(org.knime.base.node.mine.treeensemble2.learner.SplitCandidate) DataMemberships(org.knime.base.node.mine.treeensemble2.data.memberships.DataMemberships) RootDataMemberships(org.knime.base.node.mine.treeensemble2.data.memberships.RootDataMemberships) NominalMultiwaySplitCandidate(org.knime.base.node.mine.treeensemble2.learner.NominalMultiwaySplitCandidate) Test(org.junit.Test)

Example 9 with RandomData

use of org.apache.commons.math.random.RandomData in project knime-core by knime.

the class TreeNominalColumnDataTest method testCalcBestSplitCassificationBinaryTwoClassXGBoostMissingValue.

/**
 * Tests the XGBoost Missing value handling in case of a two class problem <br>
 * currently not tested because missing value handling will probably be implemented differently.
 *
 * @throws Exception
 */
// @Test
public void testCalcBestSplitCassificationBinaryTwoClassXGBoostMissingValue() throws Exception {
    final TreeEnsembleLearnerConfiguration config = createConfig(false);
    config.setMissingValueHandling(MissingValueHandling.XGBoost);
    final TestDataGenerator dataGen = new TestDataGenerator(config);
    // check correct behavior if no missing values are encountered during split search
    Pair<TreeNominalColumnData, TreeTargetNominalColumnData> twoClassTennisData = twoClassTennisData(config);
    TreeData treeData = dataGen.createTreeData(twoClassTennisData.getSecond(), twoClassTennisData.getFirst());
    IDataIndexManager indexManager = new DefaultDataIndexManager(treeData);
    double[] rowWeights = new double[TWO_CLASS_INDICES.length];
    Arrays.fill(rowWeights, 1.0);
    // DataMemberships dataMemberships = TestDataGenerator.createMockDataMemberships(TWO_CLASS_INDICES.length);
    DataMemberships dataMemberships = new RootDataMemberships(rowWeights, treeData, indexManager);
    TreeTargetNominalColumnData targetData = twoClassTennisData.getSecond();
    TreeNominalColumnData columnData = twoClassTennisData.getFirst();
    ClassificationPriors priors = targetData.getDistribution(rowWeights, config);
    RandomData rd = TestDataGenerator.createRandomData();
    SplitCandidate splitCandidate = columnData.calcBestSplitClassification(dataMemberships, priors, targetData, rd);
    assertNotNull(splitCandidate);
    assertThat(splitCandidate, instanceOf(NominalBinarySplitCandidate.class));
    NominalBinarySplitCandidate binarySplitCandidate = (NominalBinarySplitCandidate) splitCandidate;
    TreeNodeNominalBinaryCondition[] childConditions = binarySplitCandidate.getChildConditions();
    assertEquals(2, childConditions.length);
    assertArrayEquals(new String[] { "R" }, childConditions[0].getValues());
    assertArrayEquals(new String[] { "R" }, childConditions[1].getValues());
    assertEquals(SetLogic.IS_NOT_IN, childConditions[0].getSetLogic());
    assertEquals(SetLogic.IS_IN, childConditions[1].getSetLogic());
    // check if missing values go left
    assertTrue(childConditions[0].acceptsMissings());
    assertFalse(childConditions[1].acceptsMissings());
    // check correct behavior if missing values are encountered during split search
    String dataContainingMissingsCSV = "S,?,O,R,S,R,S,O,O,?";
    columnData = dataGen.createNominalAttributeColumn(dataContainingMissingsCSV, "column containing missing values", 0);
    treeData = dataGen.createTreeData(targetData, columnData);
    indexManager = new DefaultDataIndexManager(treeData);
    dataMemberships = new RootDataMemberships(rowWeights, treeData, indexManager);
    splitCandidate = columnData.calcBestSplitClassification(dataMemberships, priors, targetData, null);
    assertNotNull(splitCandidate);
    binarySplitCandidate = (NominalBinarySplitCandidate) splitCandidate;
    assertEquals("Gain was not as expected", 0.08, binarySplitCandidate.getGainValue(), 1e-8);
    childConditions = binarySplitCandidate.getChildConditions();
    String[] conditionValues = new String[] { "O", "?" };
    assertArrayEquals("Values in nominal condition did not match", conditionValues, childConditions[0].getValues());
    assertArrayEquals("Values in nominal condition did not match", conditionValues, childConditions[1].getValues());
    assertEquals("Wrong set logic.", SetLogic.IS_NOT_IN, childConditions[0].getSetLogic());
    assertEquals("Wrong set logic.", SetLogic.IS_IN, childConditions[1].getSetLogic());
    assertFalse("Missig values are not sent to the correct child.", childConditions[0].acceptsMissings());
    assertTrue("Missig values are not sent to the correct child.", childConditions[1].acceptsMissings());
}
Also used : TreeEnsembleLearnerConfiguration(org.knime.base.node.mine.treeensemble2.node.learner.TreeEnsembleLearnerConfiguration) RootDataMemberships(org.knime.base.node.mine.treeensemble2.data.memberships.RootDataMemberships) RandomData(org.apache.commons.math.random.RandomData) IDataIndexManager(org.knime.base.node.mine.treeensemble2.data.memberships.IDataIndexManager) NominalMultiwaySplitCandidate(org.knime.base.node.mine.treeensemble2.learner.NominalMultiwaySplitCandidate) NominalBinarySplitCandidate(org.knime.base.node.mine.treeensemble2.learner.NominalBinarySplitCandidate) SplitCandidate(org.knime.base.node.mine.treeensemble2.learner.SplitCandidate) DefaultDataIndexManager(org.knime.base.node.mine.treeensemble2.data.memberships.DefaultDataIndexManager) DataMemberships(org.knime.base.node.mine.treeensemble2.data.memberships.DataMemberships) RootDataMemberships(org.knime.base.node.mine.treeensemble2.data.memberships.RootDataMemberships) TreeNodeNominalBinaryCondition(org.knime.base.node.mine.treeensemble2.model.TreeNodeNominalBinaryCondition) NominalBinarySplitCandidate(org.knime.base.node.mine.treeensemble2.learner.NominalBinarySplitCandidate)

Example 10 with RandomData

use of org.apache.commons.math.random.RandomData in project knime-core by knime.

the class EqualSizeRowSamplerTest method testCreateRowSampleWithReplacement.

@Test
public void testCreateRowSampleWithReplacement() throws Exception {
    final SubsetSelector<SubsetWithReplacementRowSample> selector = SubsetWithReplacementSelector.getInstance();
    double fraction = 0.5;
    EqualSizeRowSampler<SubsetWithReplacementRowSample> sampler = new EqualSizeRowSampler<SubsetWithReplacementRowSample>(fraction, selector, SamplerTestUtil.TARGET);
    final RandomData rd = TestDataGenerator.createRandomData();
    SubsetWithReplacementRowSample sample = sampler.createRowSample(rd);
    assertEquals(6, SamplerTestUtil.countRows(sample));
    assertEquals(15, sample.getNrRows());
    fraction = 1.0;
    sampler = new EqualSizeRowSampler<SubsetWithReplacementRowSample>(fraction, selector, SamplerTestUtil.TARGET);
    sample = sampler.createRowSample(rd);
    assertEquals(12, SamplerTestUtil.countRows(sample));
    assertEquals(15, sample.getNrRows());
    int minorityCount = 0;
    for (int i = 11; i < 15; i++) {
        minorityCount += sample.getCountFor(i);
    }
    assertEquals(4, minorityCount);
}
Also used : RandomData(org.apache.commons.math.random.RandomData) Test(org.junit.Test)

Aggregations

RandomData (org.apache.commons.math.random.RandomData)36 Test (org.junit.Test)21 TreeEnsembleLearnerConfiguration (org.knime.base.node.mine.treeensemble2.node.learner.TreeEnsembleLearnerConfiguration)16 DataMemberships (org.knime.base.node.mine.treeensemble2.data.memberships.DataMemberships)11 RootDataMemberships (org.knime.base.node.mine.treeensemble2.data.memberships.RootDataMemberships)11 SplitCandidate (org.knime.base.node.mine.treeensemble2.learner.SplitCandidate)11 TreeData (org.knime.base.node.mine.treeensemble2.data.TreeData)8 DefaultDataIndexManager (org.knime.base.node.mine.treeensemble2.data.memberships.DefaultDataIndexManager)7 IDataIndexManager (org.knime.base.node.mine.treeensemble2.data.memberships.IDataIndexManager)6 NumericMissingSplitCandidate (org.knime.base.node.mine.treeensemble2.learner.NumericMissingSplitCandidate)6 NumericSplitCandidate (org.knime.base.node.mine.treeensemble2.learner.NumericSplitCandidate)6 TreeNodeNumericCondition (org.knime.base.node.mine.treeensemble2.model.TreeNodeNumericCondition)6 TreeAttributeColumnData (org.knime.base.node.mine.treeensemble2.data.TreeAttributeColumnData)5 NominalBinarySplitCandidate (org.knime.base.node.mine.treeensemble2.learner.NominalBinarySplitCandidate)5 NominalMultiwaySplitCandidate (org.knime.base.node.mine.treeensemble2.learner.NominalMultiwaySplitCandidate)5 ExecutionMonitor (org.knime.core.node.ExecutionMonitor)5 BitSet (java.util.BitSet)4 TreeTargetNumericColumnData (org.knime.base.node.mine.treeensemble2.data.TreeTargetNumericColumnData)4 ArrayList (java.util.ArrayList)3 Future (java.util.concurrent.Future)3