use of org.knime.base.node.mine.treeensemble2.data.memberships.DefaultDataIndexManager in project knime-core by knime.
the class TreeTargetNominalColumnDataTest method testGetDistribution.
/**
* Tests the {@link TreeTargetNominalColumnData#getDistribution(DataMemberships, TreeEnsembleLearnerConfiguration)}
* and {@link TreeTargetNominalColumnData#getDistribution(double[], TreeEnsembleLearnerConfiguration)} methods.
* @throws InvalidSettingsException
*/
@Test
public void testGetDistribution() throws InvalidSettingsException {
String targetCSV = "A,A,A,B,B,B,A";
String attributeCSV = "1,2,3,4,5,6,7";
TreeEnsembleLearnerConfiguration config = new TreeEnsembleLearnerConfiguration(false);
TestDataGenerator dataGen = new TestDataGenerator(config);
TreeTargetNominalColumnData target = TestDataGenerator.createNominalTargetColumn(targetCSV);
TreeNumericColumnData attribute = dataGen.createNumericAttributeColumn(attributeCSV, "test-col", 0);
TreeData data = new TreeData(new TreeAttributeColumnData[] { attribute }, target, TreeType.Ordinary);
double[] weights = new double[7];
Arrays.fill(weights, 1.0);
DataMemberships rootMemberships = new RootDataMemberships(weights, data, new DefaultDataIndexManager(data));
// Gini
config.setSplitCriterion(SplitCriterion.Gini);
double expectedGini = 0.4897959184;
double[] expectedDistribution = new double[] { 4.0, 3.0 };
ClassificationPriors giniPriorsDatMem = target.getDistribution(rootMemberships, config);
assertEquals(expectedGini, giniPriorsDatMem.getPriorImpurity(), DELTA);
assertArrayEquals(expectedDistribution, giniPriorsDatMem.getDistribution(), DELTA);
ClassificationPriors giniPriorsWeights = target.getDistribution(weights, config);
assertEquals(expectedGini, giniPriorsWeights.getPriorImpurity(), DELTA);
assertArrayEquals(expectedDistribution, giniPriorsWeights.getDistribution(), DELTA);
// Information Gain
config.setSplitCriterion(SplitCriterion.InformationGain);
double expectedEntropy = 0.985228136;
ClassificationPriors igPriorsDatMem = target.getDistribution(rootMemberships, config);
assertEquals(expectedEntropy, igPriorsDatMem.getPriorImpurity(), DELTA);
assertArrayEquals(expectedDistribution, igPriorsDatMem.getDistribution(), DELTA);
ClassificationPriors igPriorsWeights = target.getDistribution(weights, config);
assertEquals(expectedEntropy, igPriorsWeights.getPriorImpurity(), DELTA);
assertArrayEquals(expectedDistribution, igPriorsWeights.getDistribution(), DELTA);
// Information Gain Ratio
config.setSplitCriterion(SplitCriterion.InformationGainRatio);
// prior impurity is the same as IG
ClassificationPriors igrPriorsDatMem = target.getDistribution(rootMemberships, config);
assertEquals(expectedEntropy, igrPriorsDatMem.getPriorImpurity(), DELTA);
assertArrayEquals(expectedDistribution, igrPriorsDatMem.getDistribution(), DELTA);
ClassificationPriors igrPriorsWeights = target.getDistribution(weights, config);
assertEquals(expectedEntropy, igrPriorsWeights.getPriorImpurity(), DELTA);
assertArrayEquals(expectedDistribution, igrPriorsWeights.getDistribution(), DELTA);
}
use of org.knime.base.node.mine.treeensemble2.data.memberships.DefaultDataIndexManager in project knime-core by knime.
the class RegressionTreeLearnerNodeModel method execute.
/**
* {@inheritDoc}
*/
@Override
protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception {
BufferedDataTable t = (BufferedDataTable) inObjects[0];
DataTableSpec spec = t.getDataTableSpec();
final FilterLearnColumnRearranger learnRearranger = m_configuration.filterLearnColumns(spec);
String warn = learnRearranger.getWarning();
BufferedDataTable learnTable = exec.createColumnRearrangeTable(t, learnRearranger, exec.createSubProgress(0.0));
DataTableSpec learnSpec = learnTable.getDataTableSpec();
ExecutionMonitor readInExec = exec.createSubProgress(0.1);
ExecutionMonitor learnExec = exec.createSubProgress(0.9);
TreeDataCreator dataCreator = new TreeDataCreator(m_configuration, learnSpec, learnTable.getRowCount());
exec.setProgress("Reading data into memory");
TreeData data = dataCreator.readData(learnTable, m_configuration, readInExec);
m_hiliteRowSample = dataCreator.getDataRowsForHilite();
m_viewMessage = dataCreator.getViewMessage();
String dataCreationWarning = dataCreator.getAndClearWarningMessage();
if (dataCreationWarning != null) {
if (warn == null) {
warn = dataCreationWarning;
} else {
warn = warn + "\n" + dataCreationWarning;
}
}
readInExec.setProgress(1.0);
exec.setMessage("Learning tree");
RandomData rd = m_configuration.createRandomData();
final IDataIndexManager indexManager;
if (data.getTreeType() == TreeType.BitVector) {
indexManager = new BitVectorDataIndexManager(data.getNrRows());
} else {
indexManager = new DefaultDataIndexManager(data);
}
TreeNodeSignatureFactory signatureFactory = null;
int maxLevels = m_configuration.getMaxLevels();
if (maxLevels < TreeEnsembleLearnerConfiguration.MAX_LEVEL_INFINITE) {
int capacity = IntMath.pow(2, maxLevels - 1);
signatureFactory = new TreeNodeSignatureFactory(capacity);
} else {
signatureFactory = new TreeNodeSignatureFactory();
}
final RowSample rowSample = m_configuration.createRowSampler(data).createRowSample(rd);
TreeLearnerRegression treeLearner = new TreeLearnerRegression(m_configuration, data, indexManager, signatureFactory, rd, rowSample);
TreeModelRegression regTree = treeLearner.learnSingleTree(learnExec, rd);
RegressionTreeModel model = new RegressionTreeModel(m_configuration, data.getMetaData(), regTree, data.getTreeType());
RegressionTreeModelPortObjectSpec treePortObjectSpec = new RegressionTreeModelPortObjectSpec(learnSpec);
RegressionTreeModelPortObject treePortObject = new RegressionTreeModelPortObject(model, treePortObjectSpec);
learnExec.setProgress(1.0);
m_treeModelPortObject = treePortObject;
if (warn != null) {
setWarningMessage(warn);
}
return new PortObject[] { treePortObject };
}
Aggregations