use of ml.shifu.shifu.core.binning.MunroPatBinning in project shifu by ShifuML.
the class BinningDataUDF method exec.
/*
* (non-Javadoc)
*
* @see org.apache.pig.EvalFunc#exec(org.apache.pig.data.Tuple)
*/
@Override
public Tuple exec(Tuple input) throws IOException {
if (input == null || input.size() < 2) {
return null;
}
Integer columnId = (Integer) input.get(0);
DataBag databag = (DataBag) input.get(1);
ColumnConfig columnConfig = super.columnConfigList.get(columnId);
AbstractBinning<?> binning = null;
if (columnConfig.isCategorical()) {
binning = new CategoricalBinning(-1, super.modelConfig.getMissingOrInvalidValues(), this.maxCategorySize);
} else {
if (super.modelConfig.getBinningMethod().equals(BinningMethod.EqualInterval)) {
binning = new EqualIntervalBinning(modelConfig.getStats().getMaxNumBin());
} else {
switch(this.modelConfig.getBinningAlgorithm()) {
case Native:
log.info("Invoke Native binning method, memory cosuming!!");
// always merge bins
binning = new NativeBinning(modelConfig.getStats().getMaxNumBin(), true);
break;
case SPDT:
case SPDTI:
log.info("Invoke SPDT(Streaming Parallel Decision Tree) binning method, ");
binning = new EqualPopulationBinning(modelConfig.getStats().getMaxNumBin());
break;
case MunroPat:
case MunroPatI:
log.info("Invoke Munro & Paterson selecting algorithm");
binning = new MunroPatBinning(modelConfig.getStats().getMaxNumBin());
break;
default:
log.info("Default: Invoke Munro & Paterson selecting algorithm");
binning = new MunroPatBinning(modelConfig.getStats().getMaxNumBin());
break;
}
}
}
Iterator<Tuple> iterator = databag.iterator();
while (iterator.hasNext()) {
Tuple element = iterator.next();
if (element == null || element.size() < 2) {
continue;
}
Object value = element.get(1);
if (value != null) {
binning.addData(value.toString());
}
}
Tuple output = TupleFactory.getInstance().newTuple(2);
output.set(0, columnId);
// Do check here. It's because if there are too many value for categorical variable,
// it will consume too much memory when join them together, that will cause OOM exception
List<?> dataBin = binning.getDataBin();
if (dataBin.size() > this.maxCategorySize) {
output.set(1, "");
} else {
output.set(1, StringUtils.join(dataBin, CalculateStatsUDF.CATEGORY_VAL_SEPARATOR));
}
log.info("Finish merging bin info for columnId - " + columnId);
return output;
}
Aggregations