use of org.knime.base.node.mine.treeensemble2.model.TreeModelRegression in project knime-core by knime.
the class RegressionGBTModelExporter method doWrite.
/**
* {@inheritDoc}
*/
@Override
protected void doWrite(final MiningModel model) {
// write the initial value
Targets targets = model.addNewTargets();
Target target = targets.addNewTarget();
GradientBoostedTreesModel gbtModel = getGBTModel();
target.setField(gbtModel.getMetaData().getTargetMetaData().getAttributeName());
target.setRescaleConstant(gbtModel.getInitialValue());
// write the model
Segmentation segmentation = model.addNewSegmentation();
List<TreeModelRegression> trees = IntStream.range(0, gbtModel.getNrModels()).mapToObj(gbtModel::getTreeModelRegression).collect(Collectors.toList());
writeSumSegmentation(segmentation, trees, gbtModel.getCoeffientMaps());
}
use of org.knime.base.node.mine.treeensemble2.model.TreeModelRegression in project knime-core by knime.
the class TreeEnsembleRegressionPredictorCellFactory method getCells.
/**
* {@inheritDoc}
*/
@Override
public DataCell[] getCells(final DataRow row) {
TreeEnsembleModelPortObject modelObject = m_predictor.getModelObject();
TreeEnsemblePredictorConfiguration cfg = m_predictor.getConfiguration();
final TreeEnsembleModel ensembleModel = modelObject.getEnsembleModel();
int size = 1;
final boolean appendConfidence = cfg.isAppendPredictionConfidence();
final boolean appendModelCount = cfg.isAppendModelCount();
if (appendConfidence) {
size += 1;
}
if (appendModelCount) {
size += 1;
}
final boolean hasOutOfBagFilter = m_predictor.hasOutOfBagFilter();
DataCell[] result = new DataCell[size];
DataRow filterRow = new FilterColumnRow(row, m_learnColumnInRealDataIndices);
PredictorRecord record = ensembleModel.createPredictorRecord(filterRow, m_learnSpec);
if (record == null) {
// missing value
Arrays.fill(result, DataType.getMissingCell());
return result;
}
Mean mean = new Mean();
Variance variance = new Variance();
final int nrModels = ensembleModel.getNrModels();
for (int i = 0; i < nrModels; i++) {
if (hasOutOfBagFilter && m_predictor.isRowPartOfTrainingData(row.getKey(), i)) {
// ignore, row was used to train the model
} else {
TreeModelRegression m = ensembleModel.getTreeModelRegression(i);
TreeNodeRegression match = m.findMatchingNode(record);
double nodeMean = match.getMean();
mean.increment(nodeMean);
variance.increment(nodeMean);
}
}
int nrValidModels = (int) mean.getN();
int index = 0;
result[index++] = nrValidModels == 0 ? DataType.getMissingCell() : new DoubleCell(mean.getResult());
if (appendConfidence) {
result[index++] = nrValidModels == 0 ? DataType.getMissingCell() : new DoubleCell(variance.getResult());
}
if (appendModelCount) {
result[index++] = new IntCell(nrValidModels);
}
return result;
}
use of org.knime.base.node.mine.treeensemble2.model.TreeModelRegression in project knime-core by knime.
the class MGradientBoostedTreesLearner method calcCoefficientMap.
private Map<TreeNodeSignature, Double> calcCoefficientMap(final double[] residuals, final double quantile, final TreeModelRegression tree) {
final List<TreeNodeRegression> leafs = tree.getLeafs();
final Map<TreeNodeSignature, Double> coefficientMap = new HashMap<TreeNodeSignature, Double>((int) (leafs.size() / 0.75 + 1));
final double learningRate = getConfig().getLearningRate();
for (TreeNodeRegression leaf : leafs) {
final int[] indices = leaf.getRowIndicesInTreeData();
final double[] values = new double[indices.length];
for (int i = 0; i < indices.length; i++) {
values[i] = residuals[indices[i]];
}
final double median = calcMedian(values);
double sum = 0;
for (int i = 0; i < values.length; i++) {
sum += Math.signum(values[i] - median) * Math.min(quantile, Math.abs(values[i] - median));
}
final double coefficient = median + (1.0 / values.length) * sum;
coefficientMap.put(leaf.getSignature(), coefficient * learningRate);
}
return coefficientMap;
}
use of org.knime.base.node.mine.treeensemble2.model.TreeModelRegression in project knime-core by knime.
the class RegressionTreeLearnerNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
BufferedDataTable t = (BufferedDataTable) inObjects[0];
DataTableSpec spec = t.getDataTableSpec();
final FilterLearnColumnRearranger learnRearranger = m_configuration.filterLearnColumns(spec);
String warn = learnRearranger.getWarning();
BufferedDataTable learnTable = exec.createColumnRearrangeTable(t, learnRearranger, exec.createSubProgress(0.0));
DataTableSpec learnSpec = learnTable.getDataTableSpec();
ExecutionMonitor readInExec = exec.createSubProgress(0.1);
ExecutionMonitor learnExec = exec.createSubProgress(0.9);
TreeDataCreator dataCreator = new TreeDataCreator(m_configuration, learnSpec, learnTable.getRowCount());
exec.setProgress("Reading data into memory");
TreeData data = dataCreator.readData(learnTable, m_configuration, readInExec);
m_hiliteRowSample = dataCreator.getDataRowsForHilite();
m_viewMessage = dataCreator.getViewMessage();
String dataCreationWarning = dataCreator.getAndClearWarningMessage();
if (dataCreationWarning != null) {
if (warn == null) {
warn = dataCreationWarning;
} else {
warn = warn + "\n" + dataCreationWarning;
}
}
readInExec.setProgress(1.0);
exec.setMessage("Learning tree");
RandomData rd = m_configuration.createRandomData();
final IDataIndexManager indexManager;
if (data.getTreeType() == TreeType.BitVector) {
indexManager = new BitVectorDataIndexManager(data.getNrRows());
} else {
indexManager = new DefaultDataIndexManager(data);
}
TreeNodeSignatureFactory signatureFactory = null;
int maxLevels = m_configuration.getMaxLevels();
if (maxLevels < TreeEnsembleLearnerConfiguration.MAX_LEVEL_INFINITE) {
int capacity = IntMath.pow(2, maxLevels - 1);
signatureFactory = new TreeNodeSignatureFactory(capacity);
} else {
signatureFactory = new TreeNodeSignatureFactory();
}
final RowSample rowSample = m_configuration.createRowSampler(data).createRowSample(rd);
TreeLearnerRegression treeLearner = new TreeLearnerRegression(m_configuration, data, indexManager, signatureFactory, rd, rowSample);
TreeModelRegression regTree = treeLearner.learnSingleTree(learnExec, rd);
RegressionTreeModel model = new RegressionTreeModel(m_configuration, data.getMetaData(), regTree, data.getTreeType());
RegressionTreeModelPortObjectSpec treePortObjectSpec = new RegressionTreeModelPortObjectSpec(learnSpec);
RegressionTreeModelPortObject treePortObject = new RegressionTreeModelPortObject(model, treePortObjectSpec);
learnExec.setProgress(1.0);
m_treeModelPortObject = treePortObject;
if (warn != null) {
setWarningMessage(warn);
}
return new PortObject[] { treePortObject };
}
use of org.knime.base.node.mine.treeensemble2.model.TreeModelRegression in project knime-core by knime.
the class RegressionTreePredictorCellFactory method getCells.
/**
* {@inheritDoc}
*/
@Override
public DataCell[] getCells(final DataRow row) {
final RegressionTreeModel treeModel = m_predictor.getModel();
int size = 1;
DataCell[] result = new DataCell[size];
DataRow filterRow = new FilterColumnRow(row, m_learnColumnInRealDataIndices);
PredictorRecord record = treeModel.createPredictorRecord(filterRow, m_learnSpec);
if (record == null) {
// missing value
Arrays.fill(result, DataType.getMissingCell());
return result;
}
TreeModelRegression tree = treeModel.getTreeModel();
TreeNodeRegression match = tree.findMatchingNode(record);
double nodeMean = match.getMean();
result[0] = new DoubleCell(nodeMean);
return result;
}
Aggregations