use of com.alibaba.alink.operator.batch.source.AkSourceBatchOp in project Alink by alibaba.
the class Chap01 method c_5_3.
static void c_5_3() throws Exception {
MemSourceStreamOp pred_set = new MemSourceStreamOp(new Integer[] { 2018, 2019 }, "x");
BatchOperator<?> lr_model = new AkSourceBatchOp().setFilePath(DATA_DIR + "gmv_reg.model");
LinearRegPredictStreamOp predictor = new LinearRegPredictStreamOp(lr_model).setPredictionCol("pred");
pred_set.select("x, x*x AS x2").link(predictor).print();
StreamOperator.execute();
}
use of com.alibaba.alink.operator.batch.source.AkSourceBatchOp in project Alink by alibaba.
the class Chap01 method c_5_2.
static void c_5_2() throws Exception {
BatchOperator<?> train_set = new MemSourceBatchOp(new Row[] { Row.of(2009, 0.5), Row.of(2010, 9.36), Row.of(2011, 52.0), Row.of(2012, 191.0), Row.of(2013, 350.0), Row.of(2014, 571.0), Row.of(2015, 912.0), Row.of(2016, 1207.0), Row.of(2017, 1682.0) }, new String[] { "x", "gmv" });
BatchOperator<?> pred_set = new MemSourceBatchOp(new Integer[] { 2018, 2019 }, "x");
train_set = train_set.select("x, x*x AS x2, gmv");
LinearRegTrainBatchOp trainer = new LinearRegTrainBatchOp().setFeatureCols("x", "x2").setLabelCol("gmv");
train_set.link(trainer);
trainer.link(new AkSinkBatchOp().setFilePath(DATA_DIR + "gmv_reg.model").setOverwriteSink(true));
BatchOperator.execute();
BatchOperator<?> lr_model = new AkSourceBatchOp().setFilePath(DATA_DIR + "gmv_reg.model");
pred_set = pred_set.select("x, x*x AS x2");
LinearRegPredictBatchOp predictor = new LinearRegPredictBatchOp().setPredictionCol("pred");
predictor.linkFrom(lr_model, pred_set).print();
}
use of com.alibaba.alink.operator.batch.source.AkSourceBatchOp in project Alink by alibaba.
the class Chap03 method c_2_3_1.
static void c_2_3_1() throws Exception {
HadoopFileSystem hdfs = new HadoopFileSystem(HADOOP_VERSION, HDFS_URI);
OssFileSystem oss = new OssFileSystem(OSS_VERSION, OSS_END_POINT, OSS_BUCKET_NAME, OSS_ACCESS_ID, OSS_ACCESS_KEY);
FilePath[] filePaths = new FilePath[] { new FilePath(LOCAL_DIR + "iris.ak"), new FilePath(HDFS_URI + "user/yangxu/alink/data/temp/iris.ak", hdfs), new FilePath(OSS_PREFIX_URI + "alink/data/temp/iris.ak", oss) };
for (FilePath filePath : filePaths) {
new CsvSourceBatchOp().setFilePath(IRIS_HTTP_URL).setSchemaStr(IRIS_SCHEMA_STR).link(new AkSinkBatchOp().setFilePath(filePath).setOverwriteSink(true));
BatchOperator.execute();
System.out.println(new AkSourceBatchOp().setFilePath(filePath).count());
}
for (FilePath filePath : filePaths) {
new CsvSourceStreamOp().setFilePath(IRIS_HTTP_URL).setSchemaStr(IRIS_SCHEMA_STR).link(new AkSinkStreamOp().setFilePath(filePath).setOverwriteSink(true));
StreamOperator.execute();
new AkSourceStreamOp().setFilePath(filePath).filter("sepal_length < 4.5").print();
StreamOperator.execute();
}
}
use of com.alibaba.alink.operator.batch.source.AkSourceBatchOp in project Alink by alibaba.
the class Chap08 method c_5.
static void c_5() throws Exception {
AkSourceBatchOp train_data = new AkSourceBatchOp().setFilePath(DATA_DIR + TRAIN_FILE);
AkSourceBatchOp test_data = new AkSourceBatchOp().setFilePath(DATA_DIR + TEST_FILE);
LogisticRegressionTrainBatchOp lrTrainer = new LogisticRegressionTrainBatchOp().setFeatureCols(FEATURE_COL_NAMES).setLabelCol(LABEL_COL_NAME);
LogisticRegressionPredictBatchOp lrPredictor = new LogisticRegressionPredictBatchOp().setPredictionCol(PREDICTION_COL_NAME).setPredictionDetailCol(PRED_DETAIL_COL_NAME);
train_data.link(lrTrainer);
lrPredictor.linkFrom(lrTrainer, test_data);
lrTrainer.lazyPrintTrainInfo().lazyPrintModelInfo();
lrPredictor.lazyPrint(5, "< Prediction >").link(new AkSinkBatchOp().setFilePath(DATA_DIR + LR_PRED_FILE).setOverwriteSink(true));
BatchOperator.execute();
}
use of com.alibaba.alink.operator.batch.source.AkSourceBatchOp in project Alink by alibaba.
the class Chap08 method c_6.
static void c_6() throws Exception {
AkSourceBatchOp train_data = new AkSourceBatchOp().setFilePath(DATA_DIR + TRAIN_FILE);
AkSourceBatchOp test_data = new AkSourceBatchOp().setFilePath(DATA_DIR + TEST_FILE);
LinearSvmTrainBatchOp svmTrainer = new LinearSvmTrainBatchOp().setFeatureCols(FEATURE_COL_NAMES).setLabelCol(LABEL_COL_NAME);
LinearSvmPredictBatchOp svmPredictor = new LinearSvmPredictBatchOp().setPredictionCol(PREDICTION_COL_NAME).setPredictionDetailCol(PRED_DETAIL_COL_NAME);
train_data.link(svmTrainer);
svmPredictor.linkFrom(svmTrainer, test_data);
svmTrainer.lazyPrintTrainInfo().lazyPrintModelInfo();
svmPredictor.lazyPrint(5, "< Prediction >").link(new AkSinkBatchOp().setFilePath(DATA_DIR + SVM_PRED_FILE).setOverwriteSink(true));
BatchOperator.execute();
}
Aggregations