use of com.alibaba.alink.pipeline.dataproc.format.VectorToColumns in project Alink by alibaba.
the class VectorToColumnsBatchOpTest method testPipeline.
@Test
public void testPipeline() throws Exception {
Row[] rows = new Row[] { Row.of(1, 2, "$5$1:2 3:5") };
BatchOperator data = new MemSourceBatchOp(rows, new String[] { "id", "id2", "vec" });
VectorToColumns pipeline = new VectorToColumns().setVectorCol("vec").setSchemaStr("f0 double, f1 double, f2 double, f3 double, f4 double").setReservedCols("id", "id2");
pipeline.transform(data).print();
}
use of com.alibaba.alink.pipeline.dataproc.format.VectorToColumns in project Alink by alibaba.
the class Chap13 method c_5.
static void c_5() throws Exception {
BatchOperator.setParallelism(4);
if (!new File(DATA_DIR + TABLE_TRAIN_FILE).exists()) {
AkSourceBatchOp train_sparse = new AkSourceBatchOp().setFilePath(DATA_DIR + SPARSE_TRAIN_FILE);
AkSourceBatchOp test_sparse = new AkSourceBatchOp().setFilePath(DATA_DIR + SPARSE_TEST_FILE);
StringBuilder sbd = new StringBuilder();
sbd.append("c_0 double");
for (int i = 1; i < 784; i++) {
sbd.append(", c_").append(i).append(" double");
}
new VectorToColumns().setVectorCol(VECTOR_COL_NAME).setSchemaStr(sbd.toString()).setReservedCols(LABEL_COL_NAME).transform(train_sparse).link(new AkSinkBatchOp().setFilePath(DATA_DIR + TABLE_TRAIN_FILE));
new VectorToColumns().setVectorCol(VECTOR_COL_NAME).setSchemaStr(sbd.toString()).setReservedCols(LABEL_COL_NAME).transform(test_sparse).link(new AkSinkBatchOp().setFilePath(DATA_DIR + TABLE_TEST_FILE));
BatchOperator.execute();
}
AkSourceBatchOp train_data = new AkSourceBatchOp().setFilePath(DATA_DIR + TABLE_TRAIN_FILE);
AkSourceBatchOp test_data = new AkSourceBatchOp().setFilePath(DATA_DIR + TABLE_TEST_FILE);
final String[] featureColNames = ArrayUtils.removeElement(train_data.getColNames(), LABEL_COL_NAME);
train_data.lazyPrint(5);
Stopwatch sw = new Stopwatch();
for (TreeType treeType : new TreeType[] { TreeType.GINI, TreeType.INFOGAIN, TreeType.INFOGAINRATIO }) {
sw.reset();
sw.start();
new DecisionTreeClassifier().setTreeType(treeType).setFeatureCols(featureColNames).setLabelCol(LABEL_COL_NAME).setPredictionCol(PREDICTION_COL_NAME).enableLazyPrintModelInfo().fit(train_data).transform(test_data).link(new EvalMultiClassBatchOp().setLabelCol(LABEL_COL_NAME).setPredictionCol(PREDICTION_COL_NAME).lazyPrintMetrics("DecisionTreeClassifier " + treeType.toString()));
BatchOperator.execute();
sw.stop();
System.out.println(sw.getElapsedTimeSpan());
}
for (int numTrees : new int[] { 2, 4, 8, 16, 32, 64, 128 }) {
sw.reset();
sw.start();
new RandomForestClassifier().setSubsamplingRatio(0.6).setNumTreesOfInfoGain(numTrees).setFeatureCols(featureColNames).setLabelCol(LABEL_COL_NAME).setPredictionCol(PREDICTION_COL_NAME).enableLazyPrintModelInfo().fit(train_data).transform(test_data).link(new EvalMultiClassBatchOp().setLabelCol(LABEL_COL_NAME).setPredictionCol(PREDICTION_COL_NAME).lazyPrintMetrics("RandomForestClassifier : " + numTrees));
BatchOperator.execute();
sw.stop();
System.out.println(sw.getElapsedTimeSpan());
}
}
Aggregations