Search in sources :

Example 1 with RequiredFieldList

use of org.apache.pig.LoadPushDown.RequiredFieldList in project parquet-mr by apache.

the class TestParquetLoader method testSchema.

@Test
public void testSchema() throws Exception {
    String location = "garbage";
    ParquetLoader pLoader = new ParquetLoader("a:chararray, " + "b:{t:(c:chararray, d:chararray)}, " + "p:[(q:chararray, r:chararray)]");
    Job job = new Job();
    pLoader.getSchema(location, job);
    RequiredFieldList list = new RequiredFieldList();
    RequiredField field = new RequiredField("a", 0, null, DataType.CHARARRAY);
    list.add(field);
    field = new RequiredField("b", 0, Arrays.asList(new RequiredField("t", 0, Arrays.asList(new RequiredField("d", 1, null, DataType.CHARARRAY)), DataType.TUPLE)), DataType.BAG);
    list.add(field);
    pLoader.pushProjection(list);
    pLoader.setLocation(location, job);
    assertEquals("{a: chararray,b: {t: (d: chararray)}}", TupleReadSupport.getPigSchema(job.getConfiguration()).toString());
}
Also used : RequiredFieldList(org.apache.pig.LoadPushDown.RequiredFieldList) RequiredField(org.apache.pig.LoadPushDown.RequiredField) ExecJob(org.apache.pig.backend.executionengine.ExecJob) Job(org.apache.hadoop.mapreduce.Job) Test(org.junit.Test)

Example 2 with RequiredFieldList

use of org.apache.pig.LoadPushDown.RequiredFieldList in project shifu by ShifuML.

the class TrainModelProcessor method runDistributedTrain.

protected int runDistributedTrain() throws IOException, InterruptedException, ClassNotFoundException {
    LOG.info("Started {}distributed training.", isDryTrain ? "dry " : "");
    int status = 0;
    Configuration conf = new Configuration();
    SourceType sourceType = super.getModelConfig().getDataSet().getSource();
    final List<String> args = new ArrayList<String>();
    GridSearch gs = new GridSearch(modelConfig.getTrain().getParams(), modelConfig.getTrain().getGridConfigFileContent());
    prepareCommonParams(gs.hasHyperParam(), args, sourceType);
    String alg = super.getModelConfig().getTrain().getAlgorithm();
    // add tmp models folder to config
    FileSystem fileSystem = ShifuFileUtils.getFileSystemBySourceType(sourceType);
    Path tmpModelsPath = fileSystem.makeQualified(new Path(super.getPathFinder().getPathBySourceType(new Path(Constants.TMP, Constants.DEFAULT_MODELS_TMP_FOLDER), sourceType)));
    args.add(String.format(CommonConstants.MAPREDUCE_PARAM_FORMAT, CommonConstants.SHIFU_TMP_MODELS_FOLDER, tmpModelsPath.toString()));
    int baggingNum = isForVarSelect ? 1 : super.getModelConfig().getBaggingNum();
    if (modelConfig.isClassification()) {
        int classes = modelConfig.getTags().size();
        if (classes == 2) {
            // binary classification, only need one job
            baggingNum = 1;
        } else {
            if (modelConfig.getTrain().isOneVsAll()) {
                // one vs all multiple classification, we need multiple bagging jobs to do ONEVSALL
                baggingNum = modelConfig.getTags().size();
            } else {
            // native classification, using bagging from setting job, no need set here
            }
        }
        if (baggingNum != super.getModelConfig().getBaggingNum()) {
            LOG.warn("'train:baggingNum' is set to {} because of ONEVSALL multiple classification.", baggingNum);
        }
    }
    boolean isKFoldCV = false;
    Integer kCrossValidation = this.modelConfig.getTrain().getNumKFold();
    if (kCrossValidation != null && kCrossValidation > 0) {
        isKFoldCV = true;
        baggingNum = modelConfig.getTrain().getNumKFold();
        if (baggingNum != super.getModelConfig().getBaggingNum() && gs.hasHyperParam()) {
            // if it is grid search mode, then kfold mode is disabled
            LOG.warn("'train:baggingNum' is set to {} because of k-fold cross validation is enabled by 'numKFold' not -1.", baggingNum);
        }
    }
    long start = System.currentTimeMillis();
    boolean isParallel = Boolean.valueOf(Environment.getProperty(Constants.SHIFU_DTRAIN_PARALLEL, SHIFU_DEFAULT_DTRAIN_PARALLEL)).booleanValue();
    GuaguaMapReduceClient guaguaClient;
    int[] inputOutputIndex = DTrainUtils.getInputOutputCandidateCounts(modelConfig.getNormalizeType(), this.columnConfigList);
    int inputNodeCount = inputOutputIndex[0] == 0 ? inputOutputIndex[2] : inputOutputIndex[0];
    int candidateCount = inputOutputIndex[2];
    boolean isAfterVarSelect = (inputOutputIndex[0] != 0);
    // cache all feature list for sampling features
    List<Integer> allFeatures = NormalUtils.getAllFeatureList(this.columnConfigList, isAfterVarSelect);
    if (modelConfig.getNormalize().getIsParquet()) {
        guaguaClient = new GuaguaParquetMapReduceClient();
        // set required field list to make sure we only load selected columns.
        RequiredFieldList requiredFieldList = new RequiredFieldList();
        boolean hasCandidates = CommonUtils.hasCandidateColumns(columnConfigList);
        for (ColumnConfig columnConfig : super.columnConfigList) {
            if (columnConfig.isTarget()) {
                requiredFieldList.add(new RequiredField(columnConfig.getColumnName(), columnConfig.getColumnNum(), null, DataType.FLOAT));
            } else {
                if (inputNodeCount == candidateCount) {
                    // no any variables are selected
                    if (!columnConfig.isMeta() && !columnConfig.isTarget() && CommonUtils.isGoodCandidate(columnConfig, hasCandidates)) {
                        requiredFieldList.add(new RequiredField(columnConfig.getColumnName(), columnConfig.getColumnNum(), null, DataType.FLOAT));
                    }
                } else {
                    if (!columnConfig.isMeta() && !columnConfig.isTarget() && columnConfig.isFinalSelect()) {
                        requiredFieldList.add(new RequiredField(columnConfig.getColumnName(), columnConfig.getColumnNum(), null, DataType.FLOAT));
                    }
                }
            }
        }
        // weight is added manually
        requiredFieldList.add(new RequiredField("weight", columnConfigList.size(), null, DataType.DOUBLE));
        args.add(String.format(CommonConstants.MAPREDUCE_PARAM_FORMAT, "parquet.private.pig.required.fields", serializeRequiredFieldList(requiredFieldList)));
        args.add(String.format(CommonConstants.MAPREDUCE_PARAM_FORMAT, "parquet.private.pig.column.index.access", "true"));
    } else {
        guaguaClient = new GuaguaMapReduceClient();
    }
    int parallelNum = Integer.parseInt(Environment.getProperty(CommonConstants.SHIFU_TRAIN_BAGGING_INPARALLEL, "5"));
    int parallelGroups = 1;
    if (gs.hasHyperParam()) {
        parallelGroups = (gs.getFlattenParams().size() % parallelNum == 0 ? gs.getFlattenParams().size() / parallelNum : gs.getFlattenParams().size() / parallelNum + 1);
        baggingNum = gs.getFlattenParams().size();
        LOG.warn("'train:baggingNum' is set to {} because of grid search enabled by settings in 'train#params'.", gs.getFlattenParams().size());
    } else {
        parallelGroups = baggingNum % parallelNum == 0 ? baggingNum / parallelNum : baggingNum / parallelNum + 1;
    }
    LOG.info("Distributed trainning with baggingNum: {}", baggingNum);
    List<String> progressLogList = new ArrayList<String>(baggingNum);
    boolean isOneJobNotContinuous = false;
    for (int j = 0; j < parallelGroups; j++) {
        int currBags = baggingNum;
        if (gs.hasHyperParam()) {
            if (j == parallelGroups - 1) {
                currBags = gs.getFlattenParams().size() % parallelNum == 0 ? parallelNum : gs.getFlattenParams().size() % parallelNum;
            } else {
                currBags = parallelNum;
            }
        } else {
            if (j == parallelGroups - 1) {
                currBags = baggingNum % parallelNum == 0 ? parallelNum : baggingNum % parallelNum;
            } else {
                currBags = parallelNum;
            }
        }
        for (int k = 0; k < currBags; k++) {
            int i = j * parallelNum + k;
            if (gs.hasHyperParam()) {
                LOG.info("Start the {}th grid search job with params: {}", i, gs.getParams(i));
            } else if (isKFoldCV) {
                LOG.info("Start the {}th k-fold cross validation job with params.", i);
            }
            List<String> localArgs = new ArrayList<String>(args);
            // set name for each bagging job.
            localArgs.add("-n");
            localArgs.add(String.format("Shifu Master-Workers %s Training Iteration: %s id:%s", alg, super.getModelConfig().getModelSetName(), i));
            LOG.info("Start trainer with id: {}", i);
            String modelName = getModelName(i);
            Path modelPath = fileSystem.makeQualified(new Path(super.getPathFinder().getModelsPath(sourceType), modelName));
            Path bModelPath = fileSystem.makeQualified(new Path(super.getPathFinder().getNNBinaryModelsPath(sourceType), modelName));
            // check if job is continuous training, this can be set multiple times and we only get last one
            boolean isContinuous = false;
            if (gs.hasHyperParam()) {
                isContinuous = false;
            } else {
                int intContinuous = checkContinuousTraining(fileSystem, localArgs, modelPath, modelConfig.getTrain().getParams());
                if (intContinuous == -1) {
                    LOG.warn("Model with index {} with size of trees is over treeNum, such training will not be started.", i);
                    continue;
                } else {
                    isContinuous = (intContinuous == 1);
                }
            }
            // training
            if (gs.hasHyperParam() || isKFoldCV) {
                isContinuous = false;
            }
            if (!isContinuous && !isOneJobNotContinuous) {
                isOneJobNotContinuous = true;
                // delete all old models if not continuous
                String srcModelPath = super.getPathFinder().getModelsPath(sourceType);
                String mvModelPath = srcModelPath + "_" + System.currentTimeMillis();
                LOG.info("Old model path has been moved to {}", mvModelPath);
                fileSystem.rename(new Path(srcModelPath), new Path(mvModelPath));
                fileSystem.mkdirs(new Path(srcModelPath));
                FileSystem.getLocal(conf).delete(new Path(super.getPathFinder().getModelsPath(SourceType.LOCAL)), true);
            }
            if (NNConstants.NN_ALG_NAME.equalsIgnoreCase(alg)) {
                // tree related parameters initialization
                Map<String, Object> params = gs.hasHyperParam() ? gs.getParams(i) : this.modelConfig.getTrain().getParams();
                Object fssObj = params.get("FeatureSubsetStrategy");
                FeatureSubsetStrategy featureSubsetStrategy = null;
                double featureSubsetRate = 0d;
                if (fssObj != null) {
                    try {
                        featureSubsetRate = Double.parseDouble(fssObj.toString());
                        // no need validate featureSubsetRate is in (0,1], as already validated in ModelInspector
                        featureSubsetStrategy = null;
                    } catch (NumberFormatException ee) {
                        featureSubsetStrategy = FeatureSubsetStrategy.of(fssObj.toString());
                    }
                } else {
                    LOG.warn("FeatureSubsetStrategy is not set, set to ALL by default.");
                    featureSubsetStrategy = FeatureSubsetStrategy.ALL;
                    featureSubsetRate = 0;
                }
                Set<Integer> subFeatures = null;
                if (isContinuous) {
                    BasicFloatNetwork existingModel = (BasicFloatNetwork) ModelSpecLoaderUtils.getBasicNetwork(ModelSpecLoaderUtils.loadModel(modelConfig, modelPath, ShifuFileUtils.getFileSystemBySourceType(this.modelConfig.getDataSet().getSource())));
                    if (existingModel == null) {
                        subFeatures = new HashSet<Integer>(getSubsamplingFeatures(allFeatures, featureSubsetStrategy, featureSubsetRate, inputNodeCount));
                    } else {
                        subFeatures = existingModel.getFeatureSet();
                    }
                } else {
                    subFeatures = new HashSet<Integer>(getSubsamplingFeatures(allFeatures, featureSubsetStrategy, featureSubsetRate, inputNodeCount));
                }
                if (subFeatures == null || subFeatures.size() == 0) {
                    localArgs.add(String.format(CommonConstants.MAPREDUCE_PARAM_FORMAT, CommonConstants.SHIFU_NN_FEATURE_SUBSET, ""));
                } else {
                    localArgs.add(String.format(CommonConstants.MAPREDUCE_PARAM_FORMAT, CommonConstants.SHIFU_NN_FEATURE_SUBSET, StringUtils.join(subFeatures, ',')));
                    LOG.debug("Size: {}, list: {}.", subFeatures.size(), StringUtils.join(subFeatures, ','));
                }
            }
            localArgs.add(String.format(CommonConstants.MAPREDUCE_PARAM_FORMAT, CommonConstants.GUAGUA_OUTPUT, modelPath.toString()));
            localArgs.add(String.format(CommonConstants.MAPREDUCE_PARAM_FORMAT, Constants.SHIFU_NN_BINARY_MODEL_PATH, bModelPath.toString()));
            if (gs.hasHyperParam() || isKFoldCV) {
                // k-fold cv need val error
                Path valErrPath = fileSystem.makeQualified(new Path(super.getPathFinder().getValErrorPath(sourceType), "val_error_" + i));
                localArgs.add(String.format(CommonConstants.MAPREDUCE_PARAM_FORMAT, CommonConstants.GS_VALIDATION_ERROR, valErrPath.toString()));
            }
            localArgs.add(String.format(CommonConstants.MAPREDUCE_PARAM_FORMAT, CommonConstants.SHIFU_TRAINER_ID, String.valueOf(i)));
            final String progressLogFile = getProgressLogFile(i);
            progressLogList.add(progressLogFile);
            localArgs.add(String.format(CommonConstants.MAPREDUCE_PARAM_FORMAT, CommonConstants.SHIFU_DTRAIN_PROGRESS_FILE, progressLogFile));
            String hdpVersion = HDPUtils.getHdpVersionForHDP224();
            if (StringUtils.isNotBlank(hdpVersion)) {
                localArgs.add(String.format(CommonConstants.MAPREDUCE_PARAM_FORMAT, "hdp.version", hdpVersion));
                HDPUtils.addFileToClassPath(HDPUtils.findContainingFile("hdfs-site.xml"), conf);
                HDPUtils.addFileToClassPath(HDPUtils.findContainingFile("core-site.xml"), conf);
                HDPUtils.addFileToClassPath(HDPUtils.findContainingFile("mapred-site.xml"), conf);
                HDPUtils.addFileToClassPath(HDPUtils.findContainingFile("yarn-site.xml"), conf);
            }
            if (isParallel) {
                guaguaClient.addJob(localArgs.toArray(new String[0]));
            } else {
                TailThread tailThread = startTailThread(new String[] { progressLogFile });
                boolean ret = guaguaClient.createJob(localArgs.toArray(new String[0])).waitForCompletion(true);
                status += (ret ? 0 : 1);
                stopTailThread(tailThread);
            }
        }
        if (isParallel) {
            TailThread tailThread = startTailThread(progressLogList.toArray(new String[0]));
            status += guaguaClient.run();
            stopTailThread(tailThread);
        }
    }
    if (isKFoldCV) {
        // k-fold we also copy model files at last, such models can be used for evaluation
        for (int i = 0; i < baggingNum; i++) {
            String modelName = getModelName(i);
            Path modelPath = fileSystem.makeQualified(new Path(super.getPathFinder().getModelsPath(sourceType), modelName));
            if (ShifuFileUtils.getFileSystemBySourceType(sourceType).exists(modelPath)) {
                copyModelToLocal(modelName, modelPath, sourceType);
            } else {
                LOG.warn("Model {} isn't there, maybe job is failed, for bagging it can be ignored.", modelPath.toString());
                status += 1;
            }
        }
        List<Double> valErrs = readAllValidationErrors(sourceType, fileSystem, kCrossValidation);
        double sum = 0d;
        for (Double err : valErrs) {
            sum += err;
        }
        LOG.info("Average validation error for current k-fold cross validation is {}.", sum / valErrs.size());
        LOG.info("K-fold cross validation on distributed training finished in {}ms.", System.currentTimeMillis() - start);
    } else if (gs.hasHyperParam()) {
        // select the best parameter composite in grid search
        LOG.info("Original grid search params: {}", modelConfig.getParams());
        Map<String, Object> params = findBestParams(sourceType, fileSystem, gs);
        // temp copy all models for evaluation
        for (int i = 0; i < baggingNum; i++) {
            String modelName = getModelName(i);
            Path modelPath = fileSystem.makeQualified(new Path(super.getPathFinder().getModelsPath(sourceType), modelName));
            if (ShifuFileUtils.getFileSystemBySourceType(sourceType).exists(modelPath) && (status == 0)) {
                copyModelToLocal(modelName, modelPath, sourceType);
            } else {
                LOG.warn("Model {} isn't there, maybe job is failed, for bagging it can be ignored.", modelPath.toString());
            }
        }
        LOG.info("The best parameters in grid search is {}", params);
        LOG.info("Grid search on distributed training finished in {}ms.", System.currentTimeMillis() - start);
    } else {
        // copy model files at last.
        for (int i = 0; i < baggingNum; i++) {
            String modelName = getModelName(i);
            Path modelPath = fileSystem.makeQualified(new Path(super.getPathFinder().getModelsPath(sourceType), modelName));
            if (ShifuFileUtils.getFileSystemBySourceType(sourceType).exists(modelPath) && (status == 0)) {
                copyModelToLocal(modelName, modelPath, sourceType);
            } else {
                LOG.warn("Model {} isn't there, maybe job is failed, for bagging it can be ignored.", modelPath.toString());
            }
        }
        // copy temp model files, for RF/GBT, not to copy tmp models because of larger space needed, for others
        // by default copy tmp models to local
        boolean copyTmpModelsToLocal = Boolean.TRUE.toString().equalsIgnoreCase(Environment.getProperty(Constants.SHIFU_TMPMODEL_COPYTOLOCAL, "true"));
        if (copyTmpModelsToLocal) {
            copyTmpModelsToLocal(tmpModelsPath, sourceType);
        } else {
            LOG.info("Tmp models are not copied into local, please find them in hdfs path: {}", tmpModelsPath);
        }
        LOG.info("Distributed training finished in {}ms.", System.currentTimeMillis() - start);
    }
    if (CommonUtils.isTreeModel(modelConfig.getAlgorithm())) {
        List<BasicML> models = ModelSpecLoaderUtils.loadBasicModels(this.modelConfig, null);
        // compute feature importance and write to local file after models are trained
        Map<Integer, MutablePair<String, Double>> featureImportances = CommonUtils.computeTreeModelFeatureImportance(models);
        String localFsFolder = pathFinder.getLocalFeatureImportanceFolder();
        String localFIPath = pathFinder.getLocalFeatureImportancePath();
        processRollupForFIFiles(localFsFolder, localFIPath);
        CommonUtils.writeFeatureImportance(localFIPath, featureImportances);
    }
    if (status != 0) {
        LOG.error("Error may occurred. There is no model generated. Please check!");
    }
    return status;
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) ColumnConfig(ml.shifu.shifu.container.obj.ColumnConfig) SourceType(ml.shifu.shifu.container.obj.RawSourceData.SourceType) FeatureSubsetStrategy(ml.shifu.shifu.core.dtrain.FeatureSubsetStrategy) BasicML(org.encog.ml.BasicML) GuaguaMapReduceClient(ml.shifu.guagua.mapreduce.GuaguaMapReduceClient) MutablePair(org.apache.commons.lang3.tuple.MutablePair) RequiredField(org.apache.pig.LoadPushDown.RequiredField) FileSystem(org.apache.hadoop.fs.FileSystem) BasicFloatNetwork(ml.shifu.shifu.core.dtrain.dataset.BasicFloatNetwork) Path(org.apache.hadoop.fs.Path) GuaguaParquetMapReduceClient(ml.shifu.shifu.guagua.GuaguaParquetMapReduceClient) GridSearch(ml.shifu.shifu.core.dtrain.gs.GridSearch) RequiredFieldList(org.apache.pig.LoadPushDown.RequiredFieldList)

Example 3 with RequiredFieldList

use of org.apache.pig.LoadPushDown.RequiredFieldList in project parquet-mr by apache.

the class TupleReadSupport method init.

@Override
public ReadContext init(InitContext initContext) {
    Schema pigSchema = getPigSchema(initContext.getConfiguration());
    RequiredFieldList requiredFields = getRequiredFields(initContext.getConfiguration());
    boolean columnIndexAccess = initContext.getConfiguration().getBoolean(PARQUET_COLUMN_INDEX_ACCESS, false);
    if (pigSchema == null) {
        return new ReadContext(initContext.getFileSchema());
    } else {
        // project the file schema according to the requested Pig schema
        MessageType parquetRequestedSchema = new PigSchemaConverter(columnIndexAccess).filter(initContext.getFileSchema(), pigSchema, requiredFields);
        return new ReadContext(parquetRequestedSchema);
    }
}
Also used : RequiredFieldList(org.apache.pig.LoadPushDown.RequiredFieldList) PigSchemaConverter.parsePigSchema(org.apache.parquet.pig.PigSchemaConverter.parsePigSchema) Schema(org.apache.pig.impl.logicalLayer.schema.Schema) FieldSchema(org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema) MessageType(org.apache.parquet.schema.MessageType)

Example 4 with RequiredFieldList

use of org.apache.pig.LoadPushDown.RequiredFieldList in project elephant-bird by twitter.

the class TestProtoToPig method evenFields.

private static RequiredFieldList evenFields(List<FieldDescriptor> protoFields) {
    RequiredFieldList reqList = new RequiredFieldList();
    int i = 0;
    for (FieldDescriptor fd : protoFields) {
        if (i % 2 == 0) {
            RequiredField field = new RequiredField();
            field.setAlias(fd.getName());
            field.setIndex(i);
            // field.setType() type is not used
            reqList.add(field);
        }
        i++;
    }
    return reqList;
}
Also used : RequiredFieldList(org.apache.pig.LoadPushDown.RequiredFieldList) RequiredField(org.apache.pig.LoadPushDown.RequiredField) FieldDescriptor(com.google.protobuf.Descriptors.FieldDescriptor)

Example 5 with RequiredFieldList

use of org.apache.pig.LoadPushDown.RequiredFieldList in project elephant-bird by twitter.

the class TestThriftToPig method thriftToPig.

static <M extends TBase<?, ?>> Tuple thriftToPig(M obj) throws TException {
    // it is very inefficient to create one ThriftToPig for each Thrift object,
    // but good enough for unit testing.
    TypeRef<M> typeRef = new TypeRef<M>(obj.getClass()) {
    };
    ThriftToPig<M> thriftToPig = ThriftToPig.newInstance(typeRef);
    Tuple t = thriftToPig.getPigTuple(obj);
    // test projected tuple. project a subset of fields based on field name.
    List<Field> tFields = thriftToPig.getTStructDescriptor().getFields();
    List<Integer> idxList = Lists.newArrayList();
    RequiredFieldList reqFieldList = new RequiredFieldList();
    for (int i = 0; i < tFields.size(); i++) {
        String name = tFields.get(i).getName();
        if (name.hashCode() % 2 == 0) {
            RequiredField rf = new RequiredField();
            rf.setAlias(name);
            rf.setIndex(i);
            reqFieldList.add(rf);
            idxList.add(i);
        }
    }
    try {
        Tuple pt = new ProjectedThriftTupleFactory<M>(typeRef, reqFieldList).newTuple(obj);
        int pidx = 0;
        for (int idx : idxList) {
            if (t.get(idx) != pt.get(pidx)) {
                // if both are not nulls
                assertEquals(t.get(idx).toString(), pt.get(pidx).toString());
            }
            pidx++;
        }
    } catch (ExecException e) {
        // not expected
        throw new TException(e);
    }
    // return the full tuple
    return t;
}
Also used : TException(org.apache.thrift.TException) TypeRef(com.twitter.elephantbird.util.TypeRef) ExecException(org.apache.pig.backend.executionengine.ExecException) Field(com.twitter.elephantbird.thrift.TStructDescriptor.Field) RequiredField(org.apache.pig.LoadPushDown.RequiredField) RequiredFieldList(org.apache.pig.LoadPushDown.RequiredFieldList) RequiredField(org.apache.pig.LoadPushDown.RequiredField) ThriftBytesToTuple(com.twitter.elephantbird.pig.piggybank.ThriftBytesToTuple) Tuple(org.apache.pig.data.Tuple)

Aggregations

RequiredFieldList (org.apache.pig.LoadPushDown.RequiredFieldList)5 RequiredField (org.apache.pig.LoadPushDown.RequiredField)4 FieldDescriptor (com.google.protobuf.Descriptors.FieldDescriptor)1 ThriftBytesToTuple (com.twitter.elephantbird.pig.piggybank.ThriftBytesToTuple)1 Field (com.twitter.elephantbird.thrift.TStructDescriptor.Field)1 TypeRef (com.twitter.elephantbird.util.TypeRef)1 GuaguaMapReduceClient (ml.shifu.guagua.mapreduce.GuaguaMapReduceClient)1 ColumnConfig (ml.shifu.shifu.container.obj.ColumnConfig)1 SourceType (ml.shifu.shifu.container.obj.RawSourceData.SourceType)1 FeatureSubsetStrategy (ml.shifu.shifu.core.dtrain.FeatureSubsetStrategy)1 BasicFloatNetwork (ml.shifu.shifu.core.dtrain.dataset.BasicFloatNetwork)1 GridSearch (ml.shifu.shifu.core.dtrain.gs.GridSearch)1 GuaguaParquetMapReduceClient (ml.shifu.shifu.guagua.GuaguaParquetMapReduceClient)1 MutablePair (org.apache.commons.lang3.tuple.MutablePair)1 Configuration (org.apache.hadoop.conf.Configuration)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 Job (org.apache.hadoop.mapreduce.Job)1 PigSchemaConverter.parsePigSchema (org.apache.parquet.pig.PigSchemaConverter.parsePigSchema)1 MessageType (org.apache.parquet.schema.MessageType)1