use of com.alibaba.alink.common.io.filesystem.FilePath in project Alink by alibaba.
the class PipelineSaveAndLoadTest method test.
@Test
public void test() throws Exception {
String schemaStr = "sepal_length double, sepal_width double, petal_length double, petal_width double, category string";
CsvSourceBatchOp source = new CsvSourceBatchOp().setSchemaStr(schemaStr).setFilePath("https://alink-test-data.oss-cn-hangzhou.aliyuncs.com/iris.csv");
String modelFilename = "/tmp/model123";
QuantileDiscretizer stage1 = new QuantileDiscretizer().setNumBuckets(2).setSelectedCols("sepal_length");
Binarizer stage2 = new Binarizer().setSelectedCol("petal_width").setThreshold(1.);
QuantileDiscretizer stage3 = new QuantileDiscretizer().setNumBuckets(4).setSelectedCols("petal_length");
PipelineModel pipelineModel = new Pipeline(stage1, stage2, stage3).fit(source);
// System.out.println(pipelineModel.transform(source).getSchema().toString());
pipelineModel.save(new FilePath(modelFilename), true);
BatchOperator.execute();
LocalPredictor predictor = new LocalPredictor(modelFilename, schemaStr);
Row res = predictor.map(Row.of(1.2, 3.4, 2.4, 3.6, "1"));
Assert.assertEquals(res.getArity(), 5);
}
use of com.alibaba.alink.common.io.filesystem.FilePath in project Alink by alibaba.
the class AkExample method main.
public static void main(String[] args) throws Exception {
String URL = "https://alink-release.oss-cn-beijing.aliyuncs.com/data-files/iris.csv";
String SCHEMA_STR = "sepal_length double, sepal_width double, petal_length double, petal_width double, category string";
// Note: Complete the parameter below with the right oss configure.
BaseFileSystem<?> ossFileSystem = new OssFileSystem("OssVersion", "OssEndPoint", "OssBucket", "OssId", "OssKey");
// Note: Complete the parameter below with the right hdfs configure.
BaseFileSystem<?> hadoopFileSystem = new HadoopFileSystem("HadoopVersion", "HdfsFileSystemUri");
// csv to oss
CsvSourceBatchOp csvSourceBatchOp = new CsvSourceBatchOp().setFilePath(URL).setSchemaStr(SCHEMA_STR);
AkSinkBatchOp akSinkToOss = new AkSinkBatchOp().setFilePath(new FilePath("iris", ossFileSystem)).setOverwriteSink(true);
csvSourceBatchOp.link(akSinkToOss);
BatchOperator.execute();
// oss to hdfs
AkSourceBatchOp akSourceFromOss = new AkSourceBatchOp().setFilePath(new FilePath("iris", ossFileSystem));
AkSinkBatchOp akSinkToHdfs = new AkSinkBatchOp().setFilePath(new FilePath("iris", hadoopFileSystem)).setOverwriteSink(true);
akSourceFromOss.link(akSinkToHdfs);
BatchOperator.execute();
// hdfs to stdout
AkSourceBatchOp akSourceFromHdfs = new AkSourceBatchOp().setFilePath(new FilePath("iris", hadoopFileSystem));
akSourceFromHdfs.firstN(10).print();
}
use of com.alibaba.alink.common.io.filesystem.FilePath in project Alink by alibaba.
the class DeepARTrainBatchOpTest method testSingleVar.
@Test
public void testSingleVar() throws Exception {
BatchOperator.setParallelism(1);
final String timeColName = "ts";
BatchOperator<?> source = new RandomTableSourceBatchOp().setNumRows(1000L).setNumCols(1);
String colName = source.getColNames()[0];
AppendIdBatchOp appendIdBatchOp = new AppendIdBatchOp().setIdCol(timeColName).linkFrom(source);
BatchOperator<?> timeBatchOp = new SelectBatchOp().setClause(String.format("%s, FLOOR(TO_TIMESTAMP(%s * 3600000) TO HOUR) as %s", colName, timeColName, timeColName)).linkFrom(appendIdBatchOp);
StringBuilder groupByPredicate = new StringBuilder();
String selectClause = timeColName + String.format(", SUM(%s) as %s", colName, colName);
groupByPredicate.append(timeColName);
BatchOperator<?> groupedTimeBatchOp = new GroupByBatchOp().setSelectClause(selectClause).setGroupByPredicate(groupByPredicate.toString()).linkFrom(timeBatchOp);
BatchOperator<?> deepArTrainBatchOp = new DeepARTrainBatchOp().setSelectedCol(colName).setTimeCol(timeColName).setWindow(24 * 7).setStride(24).setNumEpochs(1).linkFrom(groupedTimeBatchOp);
StreamOperator<?> sourceStreamOp = new RandomTableSourceStreamOp().setNumCols(1).setMaxRows(1000L);
AppendIdStreamOp appendIdStreamOp = new AppendIdStreamOp().setIdCol(timeColName).linkFrom(sourceStreamOp);
StreamOperator<?> timeStreamOp = new SelectStreamOp().setClause(String.format("%s, FLOOR(TO_TIMESTAMP(%s * 3600000) TO HOUR) as %s", colName, timeColName, timeColName)).linkFrom(appendIdStreamOp);
String selectClausePred = String.format("TUMBLE_START() as %s", timeColName) + String.format(", SUM(%s) as %s", colName, colName);
TumbleTimeWindowStreamOp timeWindowStreamOp = new TumbleTimeWindowStreamOp().setWindowTime(3600).setTimeCol(timeColName).setClause(selectClausePred).linkFrom(timeStreamOp);
HopTimeWindowStreamOp hopTimeWindowStreamOp = new HopTimeWindowStreamOp().setTimeCol(timeColName).setClause(String.format("MTABLE_AGG(%s, %s) as %s", timeColName, colName, "mt")).setHopTime(24 * 3600).setWindowTime((24 * 7 - 24) * 3600).linkFrom(timeWindowStreamOp);
DeepARPredictStreamOp deepARPredictStreamOp = new DeepARPredictStreamOp(deepArTrainBatchOp).setValueCol("mt").setPredictionCol("pred").setPredictNum(24).linkFrom(hopTimeWindowStreamOp);
FilePath tmpAkFile = new FilePath(new Path(folder.getRoot().getPath(), "deepar_test_stream_single_var_result.ak"));
deepARPredictStreamOp.link(new AkSinkStreamOp().setOverwriteSink(true).setFilePath(tmpAkFile));
StreamOperator.execute();
}
use of com.alibaba.alink.common.io.filesystem.FilePath in project Alink by alibaba.
the class DeepARTrainBatchOpTest method testMultiVar.
@Test
public void testMultiVar() throws Exception {
BatchOperator.setParallelism(1);
final String timeColName = "ts";
final int numCols = 10;
final String vecColName = "vec";
BatchOperator<?> source = new RandomTableSourceBatchOp().setNumRows(1000L).setNumCols(numCols);
String[] colNames = source.getColNames();
AppendIdBatchOp appendIdBatchOp = new AppendIdBatchOp().setIdCol(timeColName).linkFrom(source);
BatchOperator<?> timeBatchOp = new SelectBatchOp().setClause(String.format("%s, FLOOR(TO_TIMESTAMP(%s * 3600000) TO HOUR) as %s", Joiner.on(",").join(colNames), timeColName, timeColName)).linkFrom(appendIdBatchOp);
StringBuilder selectClause = new StringBuilder();
StringBuilder groupByPredicate = new StringBuilder();
selectClause.append(timeColName);
for (int i = 0; i < numCols; ++i) {
selectClause.append(", ");
selectClause.append(String.format("SUM(%s) as %s", colNames[i], colNames[i]));
}
groupByPredicate.append(timeColName);
BatchOperator<?> groupedTimeBatchOp = new GroupByBatchOp().setSelectClause(selectClause.toString()).setGroupByPredicate(groupByPredicate.toString()).linkFrom(timeBatchOp);
ColumnsToVectorBatchOp columnsToVectorBatchOp = new ColumnsToVectorBatchOp().setSelectedCols(colNames).setVectorCol(vecColName).linkFrom(groupedTimeBatchOp);
BatchOperator<?> deepArTrainBatchOp = new DeepARTrainBatchOp().setVectorCol(vecColName).setTimeCol(timeColName).setWindow(24 * 7).setStride(24).setNumEpochs(1).linkFrom(columnsToVectorBatchOp);
StreamOperator<?> sourceStreamOp = new RandomTableSourceStreamOp().setNumCols(numCols).setMaxRows(1000L);
AppendIdStreamOp appendIdStreamOp = new AppendIdStreamOp().setIdCol(timeColName).linkFrom(sourceStreamOp);
StreamOperator<?> timeStreamOp = new SelectStreamOp().setClause(String.format("%s, FLOOR(TO_TIMESTAMP(%s * 3600000) TO HOUR) as %s", Joiner.on(",").join(colNames), timeColName, timeColName)).linkFrom(appendIdStreamOp);
StringBuilder selectClausePred = new StringBuilder();
selectClausePred.append(String.format("TUMBLE_START() as %s", timeColName));
for (int i = 0; i < numCols; ++i) {
selectClausePred.append(", ");
selectClausePred.append(String.format("SUM(%s) as %s", colNames[i], colNames[i]));
}
TumbleTimeWindowStreamOp timeWindowStreamOp = new TumbleTimeWindowStreamOp().setWindowTime(3600).setTimeCol(timeColName).setClause(selectClausePred.toString()).linkFrom(timeStreamOp);
ColumnsToVectorStreamOp columnsToVectorStreamOp = new ColumnsToVectorStreamOp().setSelectedCols(colNames).setVectorCol(vecColName).linkFrom(timeWindowStreamOp);
HopTimeWindowStreamOp hopTimeWindowStreamOp = new HopTimeWindowStreamOp().setTimeCol(timeColName).setClause(String.format("MTABLE_AGG(%s, %s) as %s", timeColName, vecColName, "mt")).setHopTime(24 * 3600).setWindowTime((24 * 7 - 24) * 3600).linkFrom(columnsToVectorStreamOp);
DeepARPredictStreamOp deepARPredictStreamOp = new DeepARPredictStreamOp(deepArTrainBatchOp).setValueCol("mt").setPredictionCol("pred").linkFrom(hopTimeWindowStreamOp);
FilePath tmpAkFile = new FilePath(new Path(folder.getRoot().getPath(), "deepar_test_stream_multi_var_result.ak"));
deepARPredictStreamOp.link(new AkSinkStreamOp().setOverwriteSink(true).setFilePath(tmpAkFile));
StreamOperator.execute();
}
use of com.alibaba.alink.common.io.filesystem.FilePath in project Alink by alibaba.
the class LSTNetTrainBatchOpTest method testStreamSingleVar.
@Test
public void testStreamSingleVar() throws Exception {
BatchOperator.setParallelism(1);
final int numCols = 1;
final String timeColName = "ts";
BatchOperator<?> source = new RandomTableSourceBatchOp().setNumRows(1000L).setNumCols(numCols);
String colName = source.getColNames()[0];
final String selectClause = "TO_TIMESTAMP(" + timeColName + ") as " + timeColName + ", " + colName;
AppendIdBatchOp appendIdBatchOp = new AppendIdBatchOp().setIdCol(timeColName).linkFrom(source);
BatchOperator<?> timeBatchOp = new SelectBatchOp().setClause(selectClause).linkFrom(appendIdBatchOp);
LSTNetTrainBatchOp trainOp = new LSTNetTrainBatchOp().setSelectedCol(colName).setTimeCol(timeColName).setWindow(24 * 7).setHorizon(12).setNumEpochs(1).linkFrom(timeBatchOp);
StreamOperator<?> sourceStreamOp = new RandomTableSourceStreamOp().setNumCols(numCols).setMaxRows(6000L);
AppendIdStreamOp appendIdStreamOp = new AppendIdStreamOp().setIdCol(timeColName).linkFrom(sourceStreamOp);
StreamOperator<?> timestampStreamOp = new SelectStreamOp().setClause(selectClause).linkFrom(appendIdStreamOp);
OverCountWindowStreamOp overTimeWindowStreamOp = new OverCountWindowStreamOp().setClause("MTABLE_AGG_PRECEDING(" + timeColName + ", " + colName + ") as col_agg").setTimeCol(timeColName).setPrecedingRows(24 * 7).linkFrom(timestampStreamOp);
LSTNetPredictStreamOp predictStreamOp = new LSTNetPredictStreamOp(trainOp).setValueCol("col_agg").setPredictionCol("pred").setReservedCols(timeColName).setNumThreads(4).linkFrom(overTimeWindowStreamOp);
FilePath tmpAkFile = new FilePath(new Path(folder.getRoot().getPath(), "lstnet_test_stream_single_var_result.ak"));
predictStreamOp.link(new AkSinkStreamOp().setOverwriteSink(true).setFilePath(tmpAkFile));
StreamOperator.execute();
}
Aggregations