use of org.knime.base.node.mine.treeensemble2.model.TreeModelRegression in project knime-core by knime.
the class LKGradientBoostedTreesLearner method calculateCoefficientMap.
private Map<TreeNodeSignature, Double> calculateCoefficientMap(final TreeModelRegression tree, final TreeData pseudoResiduals, final double numClasses) {
final List<TreeNodeRegression> leafs = tree.getLeafs();
final Map<TreeNodeSignature, Double> coefficientMap = new HashMap<TreeNodeSignature, Double>();
final TreeTargetNumericColumnData pseudoTarget = (TreeTargetNumericColumnData) pseudoResiduals.getTargetColumn();
double learningRate = getConfig().getLearningRate();
for (TreeNodeRegression leaf : leafs) {
final int[] indices = leaf.getRowIndicesInTreeData();
double sumTop = 0;
double sumBottom = 0;
for (int index : indices) {
double val = pseudoTarget.getValueFor(index);
sumTop += val;
double absVal = Math.abs(val);
sumBottom += Math.abs(absVal) * (1 - Math.abs(absVal));
}
final double coefficient = (numClasses - 1) / numClasses * (sumTop / sumBottom);
coefficientMap.put(leaf.getSignature(), learningRate * coefficient);
}
return coefficientMap;
}
use of org.knime.base.node.mine.treeensemble2.model.TreeModelRegression in project knime-core by knime.
the class LKGradientBoostedTreesLearner method adaptPreviousFunction.
private void adaptPreviousFunction(final double[] previousFunction, final TreeModelRegression tree, final Map<TreeNodeSignature, Double> coefficientMap) {
final TreeData data = getData();
final IDataIndexManager indexManager = getIndexManager();
for (int i = 0; i < previousFunction.length; i++) {
final PredictorRecord record = createPredictorRecord(data, indexManager, i);
final TreeNodeSignature signature = tree.findMatchingNode(record).getSignature();
previousFunction[i] += coefficientMap.get(signature);
}
}
use of org.knime.base.node.mine.treeensemble2.model.TreeModelRegression in project knime-core by knime.
the class LKGradientBoostedTreesLearner method learn.
/**
* {@inheritDoc}
*
* @throws ExecutionException
* @throws InterruptedException
*/
@Override
public MultiClassGradientBoostedTreesModel learn(final ExecutionMonitor exec) throws CanceledExecutionException, InterruptedException, ExecutionException {
final TreeData data = getData();
final TreeTargetNominalColumnData target = (TreeTargetNominalColumnData) data.getTargetColumn();
final NominalValueRepresentation[] classNomVals = target.getMetaData().getValues();
final int numClasses = classNomVals.length;
final String[] classLabels = new String[numClasses];
final int nrModels = getConfig().getNrModels();
final int nrRows = target.getNrRows();
final TreeModelRegression[][] models = new TreeModelRegression[nrModels][numClasses];
final ArrayList<ArrayList<Map<TreeNodeSignature, Double>>> coefficientMaps = new ArrayList<ArrayList<Map<TreeNodeSignature, Double>>>(nrModels);
// variables for parallelization
final ThreadPool tp = KNIMEConstants.GLOBAL_THREAD_POOL;
final AtomicReference<Throwable> learnThrowableRef = new AtomicReference<Throwable>();
final int procCount = 3 * Runtime.getRuntime().availableProcessors() / 2;
exec.setMessage("Transforming problem");
// transform the original k class classification problem into k regression problems
final TreeData[] actual = new TreeData[numClasses];
for (int i = 0; i < numClasses; i++) {
final double[] newTarget = calculateNewTarget(target, i);
actual[i] = createNumericDataFromArray(newTarget);
classLabels[i] = classNomVals[i].getNominalValue();
}
final RandomData rd = getConfig().createRandomData();
final double[][] previousFunctions = new double[numClasses][nrRows];
TreeNodeSignatureFactory signatureFactory = null;
final int maxLevels = getConfig().getMaxLevels();
if (maxLevels < TreeEnsembleLearnerConfiguration.MAX_LEVEL_INFINITE) {
int capacity = IntMath.pow(2, maxLevels - 1);
signatureFactory = new TreeNodeSignatureFactory(capacity);
} else {
signatureFactory = new TreeNodeSignatureFactory();
}
exec.setMessage("Learn trees");
for (int i = 0; i < nrModels; i++) {
final Semaphore semaphore = new Semaphore(procCount);
final ArrayList<Map<TreeNodeSignature, Double>> classCoefficientMaps = new ArrayList<Map<TreeNodeSignature, Double>>(numClasses);
// prepare calculation of pseudoResiduals
final double[][] probs = new double[numClasses][nrRows];
for (int r = 0; r < nrRows; r++) {
double sumExpF = 0;
for (int j = 0; j < numClasses; j++) {
sumExpF += Math.exp(previousFunctions[j][r]);
}
for (int j = 0; j < numClasses; j++) {
probs[j][r] = Math.exp(previousFunctions[j][r]) / sumExpF;
}
}
final Future<?>[] treeCoefficientMapPairs = new Future<?>[numClasses];
for (int j = 0; j < numClasses; j++) {
checkThrowable(learnThrowableRef);
final RandomData rdSingle = TreeEnsembleLearnerConfiguration.createRandomData(rd.nextLong(Long.MIN_VALUE, Long.MAX_VALUE));
final ExecutionMonitor subExec = exec.createSubProgress(0.0);
semaphore.acquire();
treeCoefficientMapPairs[j] = tp.enqueue(new TreeLearnerCallable(rdSingle, probs[j], actual[j], subExec, numClasses, previousFunctions[j], semaphore, learnThrowableRef, signatureFactory));
}
for (int j = 0; j < numClasses; j++) {
checkThrowable(learnThrowableRef);
semaphore.acquire();
final Pair<TreeModelRegression, Map<TreeNodeSignature, Double>> pair = (Pair<TreeModelRegression, Map<TreeNodeSignature, Double>>) treeCoefficientMapPairs[j].get();
models[i][j] = pair.getFirst();
classCoefficientMaps.add(pair.getSecond());
semaphore.release();
}
checkThrowable(learnThrowableRef);
coefficientMaps.add(classCoefficientMaps);
exec.setProgress((double) i / nrModels, "Finished level " + i + "/" + nrModels);
}
return MultiClassGradientBoostedTreesModel.createMultiClassGradientBoostedTreesModel(getConfig(), data.getMetaData(), models, data.getTreeType(), 0, numClasses, coefficientMaps, classLabels);
}
use of org.knime.base.node.mine.treeensemble2.model.TreeModelRegression in project knime-core by knime.
the class MGradientBoostedTreesLearner method learn.
/**
* {@inheritDoc}
*/
@Override
public AbstractGradientBoostingModel learn(final ExecutionMonitor exec) throws CanceledExecutionException {
final TreeData actualData = getData();
final GradientBoostingLearnerConfiguration config = getConfig();
final int nrModels = config.getNrModels();
final TreeTargetNumericColumnData actualTarget = getTarget();
final double initialValue = actualTarget.getMedian();
final ArrayList<TreeModelRegression> models = new ArrayList<TreeModelRegression>(nrModels);
final ArrayList<Map<TreeNodeSignature, Double>> coefficientMaps = new ArrayList<Map<TreeNodeSignature, Double>>(nrModels);
final double[] previousPrediction = new double[actualTarget.getNrRows()];
Arrays.fill(previousPrediction, initialValue);
final RandomData rd = config.createRandomData();
final double alpha = config.getAlpha();
TreeNodeSignatureFactory signatureFactory = null;
final int maxLevels = config.getMaxLevels();
// this should be the default
if (maxLevels < TreeEnsembleLearnerConfiguration.MAX_LEVEL_INFINITE) {
final int capacity = IntMath.pow(2, maxLevels - 1);
signatureFactory = new TreeNodeSignatureFactory(capacity);
} else {
signatureFactory = new TreeNodeSignatureFactory();
}
exec.setMessage("Learning model");
TreeData residualData;
for (int i = 0; i < nrModels; i++) {
final double[] residuals = new double[actualTarget.getNrRows()];
for (int j = 0; j < actualTarget.getNrRows(); j++) {
residuals[j] = actualTarget.getValueFor(j) - previousPrediction[j];
}
final double quantile = calculateAlphaQuantile(residuals, alpha);
final double[] gradients = new double[residuals.length];
for (int j = 0; j < gradients.length; j++) {
gradients[j] = Math.abs(residuals[j]) <= quantile ? residuals[j] : quantile * Math.signum(residuals[j]);
}
residualData = createResidualDataFromArray(gradients, actualData);
final RandomData rdSingle = TreeEnsembleLearnerConfiguration.createRandomData(rd.nextLong(Long.MIN_VALUE, Long.MAX_VALUE));
final RowSample rowSample = getRowSampler().createRowSample(rdSingle);
final TreeLearnerRegression treeLearner = new TreeLearnerRegression(getConfig(), residualData, getIndexManager(), signatureFactory, rdSingle, rowSample);
final TreeModelRegression tree = treeLearner.learnSingleTree(exec, rdSingle);
final Map<TreeNodeSignature, Double> coefficientMap = calcCoefficientMap(residuals, quantile, tree);
adaptPreviousPrediction(previousPrediction, tree, coefficientMap);
models.add(tree);
coefficientMaps.add(coefficientMap);
exec.setProgress(((double) i) / nrModels, "Finished level " + i + "/" + nrModels);
}
return new GradientBoostedTreesModel(getConfig(), actualData.getMetaData(), models.toArray(new TreeModelRegression[models.size()]), actualData.getTreeType(), initialValue, coefficientMaps);
}
use of org.knime.base.node.mine.treeensemble2.model.TreeModelRegression in project knime-core by knime.
the class TreeEnsembleModel method createDecisionTree.
public DecisionTree createDecisionTree(final int modelIndex, final DataTable sampleForHiliting) {
final DecisionTree result;
final TreeMetaData metaData = getMetaData();
if (metaData.isRegression()) {
TreeModelRegression treeModel = getTreeModelRegression(modelIndex);
result = treeModel.createDecisionTree(metaData);
} else {
TreeModelClassification treeModel = getTreeModelClassification(modelIndex);
result = treeModel.createDecisionTree(metaData);
}
if (sampleForHiliting != null) {
final DataTableSpec dataSpec = sampleForHiliting.getDataTableSpec();
final DataTableSpec spec = getLearnAttributeSpec(dataSpec);
for (DataRow r : sampleForHiliting) {
try {
DataRow fullAttributeRow = createLearnAttributeRow(r, spec);
result.addCoveredPattern(fullAttributeRow, spec);
} catch (Exception e) {
// dunno what to do with that
NodeLogger.getLogger(getClass()).error("Error updating hilite info in tree view", e);
break;
}
}
}
return result;
}
Aggregations