Search in sources :

Example 1 with VectorToColumns

use of com.alibaba.alink.pipeline.dataproc.format.VectorToColumns in project Alink by alibaba.

the class VectorToColumnsBatchOpTest method testPipeline.

@Test
public void testPipeline() throws Exception {
    Row[] rows = new Row[] { Row.of(1, 2, "$5$1:2 3:5") };
    BatchOperator data = new MemSourceBatchOp(rows, new String[] { "id", "id2", "vec" });
    VectorToColumns pipeline = new VectorToColumns().setVectorCol("vec").setSchemaStr("f0 double, f1 double, f2 double, f3 double, f4 double").setReservedCols("id", "id2");
    pipeline.transform(data).print();
}
Also used : MemSourceBatchOp(com.alibaba.alink.operator.batch.source.MemSourceBatchOp) VectorToColumns(com.alibaba.alink.pipeline.dataproc.format.VectorToColumns) Row(org.apache.flink.types.Row) BatchOperator(com.alibaba.alink.operator.batch.BatchOperator) Test(org.junit.Test)

Example 2 with VectorToColumns

use of com.alibaba.alink.pipeline.dataproc.format.VectorToColumns in project Alink by alibaba.

the class Chap13 method c_5.

static void c_5() throws Exception {
    BatchOperator.setParallelism(4);
    if (!new File(DATA_DIR + TABLE_TRAIN_FILE).exists()) {
        AkSourceBatchOp train_sparse = new AkSourceBatchOp().setFilePath(DATA_DIR + SPARSE_TRAIN_FILE);
        AkSourceBatchOp test_sparse = new AkSourceBatchOp().setFilePath(DATA_DIR + SPARSE_TEST_FILE);
        StringBuilder sbd = new StringBuilder();
        sbd.append("c_0 double");
        for (int i = 1; i < 784; i++) {
            sbd.append(", c_").append(i).append(" double");
        }
        new VectorToColumns().setVectorCol(VECTOR_COL_NAME).setSchemaStr(sbd.toString()).setReservedCols(LABEL_COL_NAME).transform(train_sparse).link(new AkSinkBatchOp().setFilePath(DATA_DIR + TABLE_TRAIN_FILE));
        new VectorToColumns().setVectorCol(VECTOR_COL_NAME).setSchemaStr(sbd.toString()).setReservedCols(LABEL_COL_NAME).transform(test_sparse).link(new AkSinkBatchOp().setFilePath(DATA_DIR + TABLE_TEST_FILE));
        BatchOperator.execute();
    }
    AkSourceBatchOp train_data = new AkSourceBatchOp().setFilePath(DATA_DIR + TABLE_TRAIN_FILE);
    AkSourceBatchOp test_data = new AkSourceBatchOp().setFilePath(DATA_DIR + TABLE_TEST_FILE);
    final String[] featureColNames = ArrayUtils.removeElement(train_data.getColNames(), LABEL_COL_NAME);
    train_data.lazyPrint(5);
    Stopwatch sw = new Stopwatch();
    for (TreeType treeType : new TreeType[] { TreeType.GINI, TreeType.INFOGAIN, TreeType.INFOGAINRATIO }) {
        sw.reset();
        sw.start();
        new DecisionTreeClassifier().setTreeType(treeType).setFeatureCols(featureColNames).setLabelCol(LABEL_COL_NAME).setPredictionCol(PREDICTION_COL_NAME).enableLazyPrintModelInfo().fit(train_data).transform(test_data).link(new EvalMultiClassBatchOp().setLabelCol(LABEL_COL_NAME).setPredictionCol(PREDICTION_COL_NAME).lazyPrintMetrics("DecisionTreeClassifier " + treeType.toString()));
        BatchOperator.execute();
        sw.stop();
        System.out.println(sw.getElapsedTimeSpan());
    }
    for (int numTrees : new int[] { 2, 4, 8, 16, 32, 64, 128 }) {
        sw.reset();
        sw.start();
        new RandomForestClassifier().setSubsamplingRatio(0.6).setNumTreesOfInfoGain(numTrees).setFeatureCols(featureColNames).setLabelCol(LABEL_COL_NAME).setPredictionCol(PREDICTION_COL_NAME).enableLazyPrintModelInfo().fit(train_data).transform(test_data).link(new EvalMultiClassBatchOp().setLabelCol(LABEL_COL_NAME).setPredictionCol(PREDICTION_COL_NAME).lazyPrintMetrics("RandomForestClassifier : " + numTrees));
        BatchOperator.execute();
        sw.stop();
        System.out.println(sw.getElapsedTimeSpan());
    }
}
Also used : TreeType(com.alibaba.alink.params.shared.tree.HasIndividualTreeType.TreeType) EvalMultiClassBatchOp(com.alibaba.alink.operator.batch.evaluation.EvalMultiClassBatchOp) Stopwatch(com.alibaba.alink.common.utils.Stopwatch) RandomForestClassifier(com.alibaba.alink.pipeline.classification.RandomForestClassifier) AkSourceBatchOp(com.alibaba.alink.operator.batch.source.AkSourceBatchOp) VectorToColumns(com.alibaba.alink.pipeline.dataproc.format.VectorToColumns) DecisionTreeClassifier(com.alibaba.alink.pipeline.classification.DecisionTreeClassifier) AkSinkBatchOp(com.alibaba.alink.operator.batch.sink.AkSinkBatchOp) File(java.io.File)

Aggregations

VectorToColumns (com.alibaba.alink.pipeline.dataproc.format.VectorToColumns)2 Stopwatch (com.alibaba.alink.common.utils.Stopwatch)1 BatchOperator (com.alibaba.alink.operator.batch.BatchOperator)1 EvalMultiClassBatchOp (com.alibaba.alink.operator.batch.evaluation.EvalMultiClassBatchOp)1 AkSinkBatchOp (com.alibaba.alink.operator.batch.sink.AkSinkBatchOp)1 AkSourceBatchOp (com.alibaba.alink.operator.batch.source.AkSourceBatchOp)1 MemSourceBatchOp (com.alibaba.alink.operator.batch.source.MemSourceBatchOp)1 TreeType (com.alibaba.alink.params.shared.tree.HasIndividualTreeType.TreeType)1 DecisionTreeClassifier (com.alibaba.alink.pipeline.classification.DecisionTreeClassifier)1 RandomForestClassifier (com.alibaba.alink.pipeline.classification.RandomForestClassifier)1 File (java.io.File)1 Row (org.apache.flink.types.Row)1 Test (org.junit.Test)1