Search in sources :

Example 1 with SourceFile

use of ml.shifu.shifu.fs.SourceFile in project shifu by ShifuML.

the class StatsModelProcessor method run.

/**
 * runner for statistics
 */
@Override
public int run() throws Exception {
    log.info("Step Start: stats");
    long start = System.currentTimeMillis();
    try {
        // 0. set up and sync to HDFS
        setUp(ModelStep.STATS);
        // resync ModelConfig.json/ColumnConfig.json to HDFS
        syncDataToHdfs(modelConfig.getDataSet().getSource());
        if (getBooleanParam(this.params, Constants.IS_COMPUTE_CORR)) {
            // 1. validate if run stats before run stats -correlation
            boolean foundValidMeanValueColumn = isMeanCalculated();
            if (!foundValidMeanValueColumn) {
                log.warn("Some mean value of column is null, could you check if you run 'shifu stats'.");
                return -1;
            }
            // 2. compute correlation
            log.info("Start computing correlation value ...");
            SourceType source = this.modelConfig.getDataSet().getSource();
            String corrPath = super.getPathFinder().getCorrelationPath(source);
            // check if can start from existing output
            boolean reuseCorrResult = Environment.getBoolean("shifu.stats.corr.reuse", Boolean.FALSE);
            if (reuseCorrResult && ShifuFileUtils.isFileExists(corrPath, SourceType.HDFS)) {
                dumpAndCalculateCorrelationResult(source, corrPath);
            } else {
                runCorrMapReduceJob();
            }
            // 3. save column config list
            saveColumnConfigList();
        } else if (getBooleanParam(this.params, Constants.IS_COMPUTE_PSI)) {
            boolean foundValidMeanValueColumn = isMeanCalculated();
            if (!foundValidMeanValueColumn) {
                log.warn("Some mean value of column is null, could you check if you run 'shifu stats'.");
                return -1;
            }
            if (StringUtils.isNotEmpty(modelConfig.getPsiColumnName())) {
                new MapReducerStatsWorker(this, modelConfig, columnConfigList).runPSI();
                // save column config list after running PSI successfully
                saveColumnConfigList();
            } else {
                log.warn("To Run PSI please set your PSI column in dataSet::psiColumnName.");
            }
        } else if (getBooleanParam(this.params, Constants.IS_REBIN)) {
            // run the re-binning
            String backupColumnConfigPath = this.pathFinder.getBackupColumnConfig();
            if (!ShifuFileUtils.isFileExists(new Path(backupColumnConfigPath), SourceType.LOCAL)) {
                ShifuFileUtils.createDirIfNotExists(new SourceFile(Constants.TMP, SourceType.LOCAL));
                saveColumnConfigList(backupColumnConfigPath, this.columnConfigList);
            } else {
                // existing backup ColumnConfig.json, use binning info in it to do rebin
                List<ColumnConfig> backColumnConfigList = CommonUtils.loadColumnConfigList(backupColumnConfigPath, SourceType.LOCAL, false);
                for (ColumnConfig backupColumnConfig : backColumnConfigList) {
                    for (ColumnConfig columnConfig : this.columnConfigList) {
                        if (NSColumnUtils.isColumnEqual(backupColumnConfig.getColumnName(), columnConfig.getColumnName())) {
                            columnConfig.setColumnBinning(backupColumnConfig.getColumnBinning());
                        }
                    }
                }
            }
            // user provide candidate variable list or not
            boolean hasCandidates = CommonUtils.hasCandidateColumns(this.columnConfigList);
            List<ColumnConfig> rebinColumns = new ArrayList<ColumnConfig>();
            List<String> catVariables = getStringList(this.params, Constants.REQUEST_VARS, ",");
            for (ColumnConfig columnConfig : this.columnConfigList) {
                if (CollectionUtils.isEmpty(catVariables) || isRequestColumn(catVariables, columnConfig)) {
                    if (CommonUtils.isGoodCandidate(columnConfig, hasCandidates)) {
                        rebinColumns.add(columnConfig);
                    } else {
                        log.warn("Column - {} is not a good candidate. Skip it.", columnConfig.getColumnName());
                    }
                }
            }
            if (CollectionUtils.isNotEmpty(rebinColumns)) {
                for (ColumnConfig columnConfig : rebinColumns) {
                    doReBin(columnConfig);
                }
            }
            // use the merge ColumnConfig.json to replace current one
            saveColumnConfigList();
        } else {
            AbstractStatsExecutor statsExecutor = null;
            if (modelConfig.isMapReduceRunMode()) {
                if (modelConfig.getBinningAlgorithm().equals(ModelStatsConf.BinningAlgorithm.DynamicBinning)) {
                    statsExecutor = new DIBStatsExecutor(this, modelConfig, columnConfigList);
                } else if (modelConfig.getBinningAlgorithm().equals(ModelStatsConf.BinningAlgorithm.MunroPat)) {
                    statsExecutor = new MunroPatStatsExecutor(this, modelConfig, columnConfigList);
                } else if (modelConfig.getBinningAlgorithm().equals(ModelStatsConf.BinningAlgorithm.MunroPatI)) {
                    statsExecutor = new MunroPatIStatsExecutor(this, modelConfig, columnConfigList);
                } else if (modelConfig.getBinningAlgorithm().equals(ModelStatsConf.BinningAlgorithm.SPDT)) {
                    statsExecutor = new SPDTStatsExecutor(this, modelConfig, columnConfigList);
                } else if (modelConfig.getBinningAlgorithm().equals(ModelStatsConf.BinningAlgorithm.SPDTI)) {
                    statsExecutor = new SPDTIStatsExecutor(this, modelConfig, columnConfigList);
                } else {
                    statsExecutor = new SPDTIStatsExecutor(this, modelConfig, columnConfigList);
                }
            } else if (modelConfig.isLocalRunMode()) {
                statsExecutor = new AkkaStatsWorker(this, modelConfig, columnConfigList);
            } else {
                throw new ShifuException(ShifuErrorCode.ERROR_UNSUPPORT_MODE);
            }
            statsExecutor.doStats();
            // update the backup ColumnConfig.json after running stats
            String backupColumnConfigPath = this.pathFinder.getBackupColumnConfig();
            ShifuFileUtils.createDirIfNotExists(new SourceFile(Constants.TMP, SourceType.LOCAL));
            saveColumnConfigList(backupColumnConfigPath, this.columnConfigList);
        }
        // back up current column config each time as stats will always change CC.json
        this.backupCurrentColumnConfigToLocal(SDF.format(new Date()));
        syncDataToHdfs(modelConfig.getDataSet().getSource());
        clearUp(ModelStep.STATS);
    } catch (ShifuException e) {
        log.error("Error:" + e.getError().toString() + "; msg:" + e.getMessage(), e);
        return -1;
    } catch (Exception e) {
        log.error("Error:" + e.getMessage(), e);
        return -1;
    }
    log.info("Step Finished: stats with {} ms", (System.currentTimeMillis() - start));
    return 0;
}
Also used : Path(org.apache.hadoop.fs.Path) MapReducerStatsWorker(ml.shifu.shifu.core.processor.stats.MapReducerStatsWorker) ColumnConfig(ml.shifu.shifu.container.obj.ColumnConfig) SourceType(ml.shifu.shifu.container.obj.RawSourceData.SourceType) ArrayList(java.util.ArrayList) AbstractStatsExecutor(ml.shifu.shifu.core.processor.stats.AbstractStatsExecutor) AkkaStatsWorker(ml.shifu.shifu.core.processor.stats.AkkaStatsWorker) Date(java.util.Date) ShifuException(ml.shifu.shifu.exception.ShifuException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) JexlException(org.apache.commons.jexl2.JexlException) IOException(java.io.IOException) DIBStatsExecutor(ml.shifu.shifu.core.processor.stats.DIBStatsExecutor) MunroPatIStatsExecutor(ml.shifu.shifu.core.processor.stats.MunroPatIStatsExecutor) SPDTIStatsExecutor(ml.shifu.shifu.core.processor.stats.SPDTIStatsExecutor) SPDTStatsExecutor(ml.shifu.shifu.core.processor.stats.SPDTStatsExecutor) MunroPatStatsExecutor(ml.shifu.shifu.core.processor.stats.MunroPatStatsExecutor) SourceFile(ml.shifu.shifu.fs.SourceFile) ShifuException(ml.shifu.shifu.exception.ShifuException)

Example 2 with SourceFile

use of ml.shifu.shifu.fs.SourceFile in project shifu by ShifuML.

the class VarSelectModelProcessor method run.

/**
 * Run for the variable selection
 */
@Override
public int run() throws Exception {
    log.info("Step Start: varselect");
    long start = System.currentTimeMillis();
    try {
        setUp(ModelStep.VARSELECT);
        validateParameters();
        // reset all selections if user specify or select by absolute number
        if (getIsToReset()) {
            log.info("Reset all selections data including type final select etc!");
            resetAllFinalSelect();
        } else if (getIsToList()) {
            log.info("Below variables are selected - ");
            for (ColumnConfig columnConfig : this.columnConfigList) {
                if (columnConfig.isFinalSelect()) {
                    log.info(columnConfig.getColumnName());
                }
            }
            log.info("-----  Done -----");
        } else if (getIsToAutoFilter()) {
            log.info("Start to run variable auto filter.");
            runAutoVarFilter();
            log.info("-----  Done -----");
        } else if (getIsRecoverAuto()) {
            String varselHistory = pathFinder.getVarSelHistory();
            if (ShifuFileUtils.isFileExists(varselHistory, SourceType.LOCAL)) {
                log.info("!!! Auto filtered variables will be recovered from history.");
                recoverVarselStatusFromHist(varselHistory);
                log.info("-----  Done -----");
            } else {
                log.warn("No variables auto filter history is found.");
            }
        } else {
            // sync to make sure load from hdfs config is consistent with local configuration
            syncDataToHdfs(super.modelConfig.getDataSet().getSource());
            String filterExpressions = super.modelConfig.getSegmentFilterExpressionsAsString();
            Environment.getProperties().put("shifu.segment.expressions", filterExpressions);
            if (StringUtils.isNotBlank(filterExpressions)) {
                String[] splits = CommonUtils.split(filterExpressions, Constants.SHIFU_STATS_FILTER_EXPRESSIONS_DELIMETER);
                for (int i = 0; i < super.columnConfigList.size(); i++) {
                    ColumnConfig config = super.columnConfigList.get(i);
                    int rawSize = super.columnConfigList.size() / (1 + splits.length);
                    if (config.isTarget()) {
                        for (int j = 0; j < splits.length; j++) {
                            ColumnConfig otherConfig = super.columnConfigList.get((j + 1) * rawSize + i);
                            otherConfig.setColumnFlag(ColumnFlag.ForceRemove);
                            otherConfig.setFinalSelect(false);
                        }
                        break;
                    }
                }
                this.saveColumnConfigList();
                // sync to make sure load from hdfs config is consistent with local configuration
                syncDataToHdfs(super.modelConfig.getDataSet().getSource());
            }
            if (modelConfig.isRegression()) {
                String filterBy = this.modelConfig.getVarSelectFilterBy();
                if (filterBy.equalsIgnoreCase(Constants.FILTER_BY_KS) || filterBy.equalsIgnoreCase(Constants.FILTER_BY_IV) || filterBy.equalsIgnoreCase(Constants.FILTER_BY_PARETO) || filterBy.equalsIgnoreCase(Constants.FILTER_BY_MIX)) {
                    VariableSelector selector = new VariableSelector(this.modelConfig, this.columnConfigList);
                    this.columnConfigList = selector.selectByFilter();
                } else if (filterBy.equalsIgnoreCase(Constants.FILTER_BY_FI)) {
                    if (!CommonUtils.isTreeModel(modelConfig.getAlgorithm())) {
                        throw new IllegalArgumentException("Filter by FI only works well in GBT/RF. Please check your modelconfig::train.");
                    }
                    selectByFeatureImportance();
                } else if (filterBy.equalsIgnoreCase(Constants.FILTER_BY_SE) || filterBy.equalsIgnoreCase(Constants.FILTER_BY_ST)) {
                    if (!Constants.NN.equalsIgnoreCase(modelConfig.getAlgorithm()) && !Constants.LR.equalsIgnoreCase(modelConfig.getAlgorithm())) {
                        throw new IllegalArgumentException("Filter by SE/ST only works well in NN/LR. Please check your modelconfig::train.");
                    }
                    int recursiveCnt = getRecursiveCnt();
                    int i = 0;
                    // create varsel directory and write original copy of ColumnConfig.json
                    ShifuFileUtils.createDirIfNotExists(pathFinder.getVarSelDir(), SourceType.LOCAL);
                    super.saveColumnConfigList(pathFinder.getVarSelColumnConfig(i), this.columnConfigList);
                    while ((i++) < recursiveCnt) {
                        String trainLogFile = TRAIN_LOG_PREFIX + "-" + (i - 1) + ".log";
                        distributedSEWrapper(trainLogFile);
                        // copy training log to SE train.log
                        ShifuFileUtils.move(trainLogFile, new File(pathFinder.getVarSelDir(), trainLogFile).getPath(), SourceType.LOCAL);
                        String varSelectMSEOutputPath = pathFinder.getVarSelectMSEOutputPath(modelConfig.getDataSet().getSource());
                        // even fail to run SE, still to create an empty se.x file
                        String varSelMSEHistPath = pathFinder.getVarSelMSEHistPath(i - 1);
                        ShifuFileUtils.createFileIfNotExists(varSelMSEHistPath, SourceType.LOCAL);
                        ShifuFileUtils.copyToLocal(new SourceFile(varSelectMSEOutputPath, modelConfig.getDataSet().getSource()), Constants.SHIFU_VARSELECT_SE_OUTPUT_NAME, varSelMSEHistPath);
                        // save as backup
                        super.saveColumnConfigList(pathFinder.getVarSelColumnConfig(i), this.columnConfigList);
                        // save as current copy
                        super.saveColumnConfigList();
                    }
                } else if (filterBy.equalsIgnoreCase(Constants.FILTER_BY_VOTED)) {
                    votedVariablesSelection();
                }
            } else {
                boolean hasCandidates = CommonUtils.hasCandidateColumns(this.columnConfigList);
                if (this.modelConfig.getVarSelect().getForceEnable() && CollectionUtils.isNotEmpty(this.modelConfig.getListForceSelect())) {
                    log.info("Force Selection is enabled ... " + "for multi-classification, currently only use it to selected variables.");
                    for (ColumnConfig config : this.columnConfigList) {
                        if (config.isForceSelect()) {
                            if (!CommonUtils.isGoodCandidate(config, hasCandidates, modelConfig.isRegression())) {
                                log.warn("!! Variable - {} is not a good candidate. But it is in forceselect list", config.getColumnName());
                            }
                            config.setFinalSelect(true);
                        }
                    }
                    log.info("{} variables are selected by force.", this.modelConfig.getListForceSelect().size());
                } else {
                    // multiple classification, select all candidate at first, TODO add SE for multi-classification
                    for (ColumnConfig config : this.columnConfigList) {
                        if (CommonUtils.isGoodCandidate(config, hasCandidates, modelConfig.isRegression())) {
                            config.setFinalSelect(true);
                        }
                    }
                }
            }
            // clean shadow targets for multi-segments
            cleanShadowTargetsForSegments();
            if (modelConfig.getVarSelect().getAutoFilterEnable()) {
                runAutoVarFilter();
            }
        }
        // save column config to file and sync to
        clearUp(ModelStep.VARSELECT);
    } catch (ShifuException e) {
        log.error("Error:" + e.getError().toString() + "; msg:" + e.getMessage(), e);
        return -1;
    } catch (Exception e) {
        log.error("Error:" + e.getMessage(), e);
        return -1;
    }
    log.info("Step Finished: varselect with {} ms", (System.currentTimeMillis() - start));
    return 0;
}
Also used : ColumnConfig(ml.shifu.shifu.container.obj.ColumnConfig) VariableSelector(ml.shifu.shifu.core.VariableSelector) SourceFile(ml.shifu.shifu.fs.SourceFile) SourceFile(ml.shifu.shifu.fs.SourceFile) File(java.io.File) ShifuException(ml.shifu.shifu.exception.ShifuException) ShifuException(ml.shifu.shifu.exception.ShifuException) JexlException(org.apache.commons.jexl2.JexlException) IOException(java.io.IOException)

Example 3 with SourceFile

use of ml.shifu.shifu.fs.SourceFile in project shifu by ShifuML.

the class BasicModelProcessor method backupCurrentColumnConfigToLocal.

/**
 * Backup current {@link #columnConfigList} to a local folder tmp cc.json with timestamp in 'YYYY-MM-dd-HH:mm:SS'
 *
 * @param timestamp - timestamp to back ColumnConfig
 * @throws IOException
 *             any IO exception
 * @throws IllegalArgumentException
 *             bad input in backupColumnConfigPath
 */
public void backupCurrentColumnConfigToLocal(String timestamp) throws IllegalArgumentException, IOException {
    String backupColumnConfigPath = this.pathFinder.getBackupColumnConfig(timestamp);
    if (!ShifuFileUtils.isFileExists(new Path(backupColumnConfigPath), SourceType.LOCAL)) {
        ShifuFileUtils.createDirIfNotExists(new SourceFile(Constants.TMP, SourceType.LOCAL));
        saveColumnConfigList(backupColumnConfigPath, this.columnConfigList);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) SourceFile(ml.shifu.shifu.fs.SourceFile)

Example 4 with SourceFile

use of ml.shifu.shifu.fs.SourceFile in project shifu by ShifuML.

the class MapReducerStatsWorker method runPSI.

/**
 * Calculate the PSI
 *
 * @throws IOException
 *             in scanners read exception
 */
public void runPSI() throws IOException {
    log.info("Run PSI to use {} to compute the PSI ", modelConfig.getPsiColumnName());
    ColumnConfig columnConfig = CommonUtils.findColumnConfigByName(columnConfigList, modelConfig.getPsiColumnName());
    if (columnConfig == null || (!columnConfig.isMeta() && !columnConfig.isCategorical())) {
        log.warn("Unable to use the PSI column {} specify in ModelConfig to compute PSI\n" + "neither meta nor categorical type", columnConfig != null ? columnConfig.getColumnName() : "unknown");
        return;
    }
    log.info("Start to use {} to compute the PSI ", columnConfig.getColumnName());
    Map<String, String> paramsMap = new HashMap<>();
    paramsMap.put("delimiter", CommonUtils.escapePigString(modelConfig.getDataSetDelimiter()));
    paramsMap.put("PSIColumn", modelConfig.getPsiColumnName().trim());
    paramsMap.put("column_parallel", Integer.toString(columnConfigList.size() / 10));
    paramsMap.put("value_index", "2");
    PigExecutor.getExecutor().submitJob(modelConfig, pathFinder.getScriptPath("scripts/PSI.pig"), paramsMap);
    List<Scanner> scanners = ShifuFileUtils.getDataScanners(pathFinder.getPSIInfoPath(), modelConfig.getDataSet().getSource());
    if (CollectionUtils.isEmpty(scanners)) {
        log.info("The PSI got failure during the computation");
        return;
    }
    String delimiter = Environment.getProperty(Constants.SHIFU_OUTPUT_DATA_DELIMITER, Constants.DEFAULT_DELIMITER);
    Splitter splitter = Splitter.on(delimiter).trimResults();
    List<String> unitStats = new ArrayList<String>(this.columnConfigList.size());
    for (Scanner scanner : scanners) {
        while (scanner.hasNext()) {
            // String[] output = scanner.nextLine().trim().split("\\|");
            String[] output = Lists.newArrayList(splitter.split(scanner.nextLine())).toArray(new String[0]);
            try {
                int columnNum = Integer.parseInt(output[0]);
                ColumnConfig config = this.columnConfigList.get(columnNum);
                config.setPSI(Double.parseDouble(output[1]));
                unitStats.add(output[0] + "|" + output[2]);
            // config.setUnitStats(
            // Arrays.asList(StringUtils.split(output[2], CalculateStatsUDF.CATEGORY_VAL_SEPARATOR)));
            } catch (Exception e) {
                log.error("error in parsing", e);
            }
        }
        // close scanner
        IOUtils.closeQuietly(scanner);
    }
    // write unit stat into a temporary file
    ShifuFileUtils.createDirIfNotExists(new SourceFile(Constants.TMP, RawSourceData.SourceType.LOCAL));
    String ccUnitStatsFile = this.pathFinder.getColumnConfigUnitStatsPath();
    ShifuFileUtils.writeLines(unitStats, ccUnitStatsFile, RawSourceData.SourceType.LOCAL);
    log.info("The Unit Stats is stored in - {}.", ccUnitStatsFile);
    log.info("Run PSI - done.");
}
Also used : Scanner(java.util.Scanner) Splitter(com.google.common.base.Splitter) ColumnConfig(ml.shifu.shifu.container.obj.ColumnConfig) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ShifuException(ml.shifu.shifu.exception.ShifuException) JexlException(org.apache.commons.jexl2.JexlException) IOException(java.io.IOException) SourceFile(ml.shifu.shifu.fs.SourceFile)

Aggregations

SourceFile (ml.shifu.shifu.fs.SourceFile)4 IOException (java.io.IOException)3 ColumnConfig (ml.shifu.shifu.container.obj.ColumnConfig)3 ShifuException (ml.shifu.shifu.exception.ShifuException)3 JexlException (org.apache.commons.jexl2.JexlException)3 ArrayList (java.util.ArrayList)2 Path (org.apache.hadoop.fs.Path)2 Splitter (com.google.common.base.Splitter)1 File (java.io.File)1 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 Date (java.util.Date)1 HashMap (java.util.HashMap)1 Scanner (java.util.Scanner)1 SourceType (ml.shifu.shifu.container.obj.RawSourceData.SourceType)1 VariableSelector (ml.shifu.shifu.core.VariableSelector)1 AbstractStatsExecutor (ml.shifu.shifu.core.processor.stats.AbstractStatsExecutor)1 AkkaStatsWorker (ml.shifu.shifu.core.processor.stats.AkkaStatsWorker)1 DIBStatsExecutor (ml.shifu.shifu.core.processor.stats.DIBStatsExecutor)1 MapReducerStatsWorker (ml.shifu.shifu.core.processor.stats.MapReducerStatsWorker)1 MunroPatIStatsExecutor (ml.shifu.shifu.core.processor.stats.MunroPatIStatsExecutor)1