use of org.knime.base.node.mine.treeensemble2.data.TreeTargetNumericColumnData in project knime-core by knime.
the class AbstractGradientBoostingLearner method createResidualDataFromArray.
/**
* Creates a {@link TreeData} object that uses the values in <b>residualData</b> as target.
*
* @param residualData array containing the residuals
* @param actualData the TreeData as it is provided by the user
* @return data using the residuals as targets
*/
protected TreeData createResidualDataFromArray(final double[] residualData, final TreeData actualData) {
TreeTargetNumericColumnData actual = (TreeTargetNumericColumnData) actualData.getTargetColumn();
RowKey[] rowKeysAsArray = new RowKey[actual.getNrRows()];
for (int i = 0; i < rowKeysAsArray.length; i++) {
rowKeysAsArray[i] = actual.getRowKeyFor(i);
}
TreeTargetNumericColumnMetaData metaData = actual.getMetaData();
TreeTargetNumericColumnData residualTarget = new TreeTargetNumericColumnData(metaData, rowKeysAsArray, residualData);
return new TreeData(getData().getColumns(), residualTarget, getData().getTreeType());
}
use of org.knime.base.node.mine.treeensemble2.data.TreeTargetNumericColumnData in project knime-core by knime.
the class MGradientBoostedTreesLearner method learn.
/**
* {@inheritDoc}
*/
@Override
public AbstractGradientBoostingModel learn(final ExecutionMonitor exec) throws CanceledExecutionException {
final TreeData actualData = getData();
final GradientBoostingLearnerConfiguration config = getConfig();
final int nrModels = config.getNrModels();
final TreeTargetNumericColumnData actualTarget = getTarget();
final double initialValue = actualTarget.getMedian();
final ArrayList<TreeModelRegression> models = new ArrayList<TreeModelRegression>(nrModels);
final ArrayList<Map<TreeNodeSignature, Double>> coefficientMaps = new ArrayList<Map<TreeNodeSignature, Double>>(nrModels);
final double[] previousPrediction = new double[actualTarget.getNrRows()];
Arrays.fill(previousPrediction, initialValue);
final RandomData rd = config.createRandomData();
final double alpha = config.getAlpha();
TreeNodeSignatureFactory signatureFactory = null;
final int maxLevels = config.getMaxLevels();
// this should be the default
if (maxLevels < TreeEnsembleLearnerConfiguration.MAX_LEVEL_INFINITE) {
final int capacity = IntMath.pow(2, maxLevels - 1);
signatureFactory = new TreeNodeSignatureFactory(capacity);
} else {
signatureFactory = new TreeNodeSignatureFactory();
}
exec.setMessage("Learning model");
TreeData residualData;
for (int i = 0; i < nrModels; i++) {
final double[] residuals = new double[actualTarget.getNrRows()];
for (int j = 0; j < actualTarget.getNrRows(); j++) {
residuals[j] = actualTarget.getValueFor(j) - previousPrediction[j];
}
final double quantile = calculateAlphaQuantile(residuals, alpha);
final double[] gradients = new double[residuals.length];
for (int j = 0; j < gradients.length; j++) {
gradients[j] = Math.abs(residuals[j]) <= quantile ? residuals[j] : quantile * Math.signum(residuals[j]);
}
residualData = createResidualDataFromArray(gradients, actualData);
final RandomData rdSingle = TreeEnsembleLearnerConfiguration.createRandomData(rd.nextLong(Long.MIN_VALUE, Long.MAX_VALUE));
final RowSample rowSample = getRowSampler().createRowSample(rdSingle);
final TreeLearnerRegression treeLearner = new TreeLearnerRegression(getConfig(), residualData, getIndexManager(), signatureFactory, rdSingle, rowSample);
final TreeModelRegression tree = treeLearner.learnSingleTree(exec, rdSingle);
final Map<TreeNodeSignature, Double> coefficientMap = calcCoefficientMap(residuals, quantile, tree);
adaptPreviousPrediction(previousPrediction, tree, coefficientMap);
models.add(tree);
coefficientMaps.add(coefficientMap);
exec.setProgress(((double) i) / nrModels, "Finished level " + i + "/" + nrModels);
}
return new GradientBoostedTreesModel(getConfig(), actualData.getMetaData(), models.toArray(new TreeModelRegression[models.size()]), actualData.getTreeType(), initialValue, coefficientMaps);
}
use of org.knime.base.node.mine.treeensemble2.data.TreeTargetNumericColumnData in project knime-core by knime.
the class TreeNumericColumnDataTest method testCalcBestSplitRegression.
@Test
public void testCalcBestSplitRegression() throws InvalidSettingsException {
String dataCSV = "1,2,3,4,5,6,7,8,9,10";
String targetCSV = "1,5,4,4.3,6.5,6.5,4,3,3,4";
TreeEnsembleLearnerConfiguration config = new TreeEnsembleLearnerConfiguration(true);
config.setNrModels(1);
config.setDataSelectionWithReplacement(false);
config.setUseDifferentAttributesAtEachNode(false);
config.setDataFractionPerTree(1.0);
config.setColumnSamplingMode(ColumnSamplingMode.None);
TestDataGenerator dataGen = new TestDataGenerator(config);
RandomData rd = config.createRandomData();
TreeTargetNumericColumnData target = TestDataGenerator.createNumericTargetColumn(targetCSV);
TreeNumericColumnData attribute = dataGen.createNumericAttributeColumn(dataCSV, "test-col", 0);
TreeData data = new TreeData(new TreeAttributeColumnData[] { attribute }, target, TreeType.Ordinary);
double[] weights = new double[10];
Arrays.fill(weights, 1.0);
DataMemberships rootMem = new RootDataMemberships(weights, data, new DefaultDataIndexManager(data));
SplitCandidate firstSplit = attribute.calcBestSplitRegression(rootMem, target.getPriors(rootMem, config), target, rd);
// calculated via OpenOffice calc
assertEquals(10.885444, firstSplit.getGainValue(), 1e-5);
TreeNodeCondition[] firstConditions = firstSplit.getChildConditions();
assertEquals(2, firstConditions.length);
for (int i = 0; i < firstConditions.length; i++) {
assertThat(firstConditions[i], instanceOf(TreeNodeNumericCondition.class));
TreeNodeNumericCondition numCond = (TreeNodeNumericCondition) firstConditions[i];
assertEquals(1.5, numCond.getSplitValue(), 0);
}
// left child contains only one row therefore only look at right child
BitSet expectedInChild = new BitSet(10);
expectedInChild.set(1, 10);
BitSet inChild = attribute.updateChildMemberships(firstConditions[1], rootMem);
assertEquals(expectedInChild, inChild);
DataMemberships childMem = rootMem.createChildMemberships(inChild);
SplitCandidate secondSplit = attribute.calcBestSplitRegression(childMem, target.getPriors(childMem, config), target, rd);
assertEquals(6.883555, secondSplit.getGainValue(), 1e-5);
TreeNodeCondition[] secondConditions = secondSplit.getChildConditions();
for (int i = 0; i < secondConditions.length; i++) {
assertThat(secondConditions[i], instanceOf(TreeNodeNumericCondition.class));
TreeNodeNumericCondition numCond = (TreeNodeNumericCondition) secondConditions[i];
assertEquals(6.5, numCond.getSplitValue(), 0);
}
}
use of org.knime.base.node.mine.treeensemble2.data.TreeTargetNumericColumnData in project knime-core by knime.
the class TreeTargetNumericColumnDataTest method testGetPriors.
/**
* Tests the {@link TreeTargetNumericColumnData#getPriors(DataMemberships, TreeEnsembleLearnerConfiguration)} and
* {@link TreeTargetNumericColumnData#getPriors(double[], TreeEnsembleLearnerConfiguration)} methods.
*/
@Test
public void testGetPriors() {
String targetCSV = "1,4,3,5,6,7,8,12,22,1";
// irrelevant but necessary to build TreeDataObject
String someAttributeCSV = "A,B,A,B,A,A,B,A,A,B";
TreeEnsembleLearnerConfiguration config = new TreeEnsembleLearnerConfiguration(true);
TestDataGenerator dataGen = new TestDataGenerator(config);
TreeTargetNumericColumnData target = TestDataGenerator.createNumericTargetColumn(targetCSV);
TreeNominalColumnData attribute = dataGen.createNominalAttributeColumn(someAttributeCSV, "test-col", 0);
TreeData data = new TreeData(new TreeAttributeColumnData[] { attribute }, target, TreeType.Ordinary);
double[] weights = new double[10];
Arrays.fill(weights, 1.0);
DataMemberships rootMem = new RootDataMemberships(weights, data, new DefaultDataIndexManager(data));
RegressionPriors datMemPriors = target.getPriors(rootMem, config);
assertEquals(6.9, datMemPriors.getMean(), DELTA);
assertEquals(69, datMemPriors.getYSum(), DELTA);
assertEquals(352.9, datMemPriors.getSumSquaredDeviation(), DELTA);
}
use of org.knime.base.node.mine.treeensemble2.data.TreeTargetNumericColumnData in project knime-core by knime.
the class TreeNominalColumnDataTest method testCalcBestSplitRegressionMultiway.
/**
* Tests the method
* {@link TreeNominalColumnData#calcBestSplitRegression(DataMemberships, RegressionPriors, TreeTargetNumericColumnData, RandomData)}
* using multiway splits.
*
* @throws Exception
*/
@Test
public void testCalcBestSplitRegressionMultiway() throws Exception {
TreeEnsembleLearnerConfiguration config = createConfig(true);
config.setUseBinaryNominalSplits(false);
Pair<TreeNominalColumnData, TreeTargetNumericColumnData> tennisDataRegression = tennisDataRegression(config);
TreeNominalColumnData columnData = tennisDataRegression.getFirst();
TreeTargetNumericColumnData targetData = tennisDataRegression.getSecond();
TreeData treeData = createTreeDataRegression(tennisDataRegression);
double[] rowWeights = new double[SMALL_COLUMN_DATA.length];
Arrays.fill(rowWeights, 1.0);
IDataIndexManager indexManager = new DefaultDataIndexManager(treeData);
DataMemberships dataMemberships = new RootDataMemberships(rowWeights, treeData, indexManager);
RegressionPriors priors = targetData.getPriors(rowWeights, config);
SplitCandidate splitCandidate = columnData.calcBestSplitRegression(dataMemberships, priors, targetData, null);
assertNotNull(splitCandidate);
assertThat(splitCandidate, instanceOf(NominalMultiwaySplitCandidate.class));
assertFalse(splitCandidate.canColumnBeSplitFurther());
assertEquals(36.9643, splitCandidate.getGainValue(), 0.0001);
NominalMultiwaySplitCandidate multiwaySplitCandidate = (NominalMultiwaySplitCandidate) splitCandidate;
TreeNodeNominalCondition[] childConditions = multiwaySplitCandidate.getChildConditions();
assertEquals(3, childConditions.length);
assertEquals("S", childConditions[0].getValue());
assertEquals("O", childConditions[1].getValue());
assertEquals("R", childConditions[2].getValue());
}
Aggregations