use of com.alibaba.alink.common.io.filesystem.OssFileSystem in project Alink by alibaba.
the class Chap03 method c_1_3_1.
static void c_1_3_1() throws Exception {
OssFileSystem oss = new OssFileSystem(OSS_VERSION, OSS_END_POINT, OSS_BUCKET_NAME, OSS_ACCESS_ID, OSS_ACCESS_KEY);
System.out.println(oss.getKind());
final String ossDir = OSS_PREFIX_URI + "alink/data/temp/";
if (!oss.exists(new Path(ossDir))) {
oss.mkdirs(new Path(ossDir));
}
String path = ossDir + "hello.txt";
OutputStream outputStream = oss.create(path, WriteMode.OVERWRITE);
outputStream.write("Hello Alink!".getBytes());
outputStream.close();
InputStream inputStream = oss.open(path);
String readString = IOUtils.toString(inputStream);
System.out.println(readString);
}
use of com.alibaba.alink.common.io.filesystem.OssFileSystem in project Alink by alibaba.
the class Chap03 method c_1_3_2.
static void c_1_3_2() throws Exception {
LocalFileSystem local = new LocalFileSystem();
OssFileSystem oss = new OssFileSystem(OSS_VERSION, OSS_END_POINT, OSS_BUCKET_NAME, OSS_ACCESS_ID, OSS_ACCESS_KEY);
copy(oss.open(OSS_PREFIX_URI + "alink/data/temp/hello.txt"), local.create(LOCAL_DIR + "hello_1.txt", WriteMode.OVERWRITE));
copy(local.open(LOCAL_DIR + "hello_1.txt"), oss.create(OSS_PREFIX_URI + "alink/data/temp/hello_2.txt", WriteMode.OVERWRITE));
for (FileStatus status : oss.listStatus(new Path(OSS_PREFIX_URI + "alink/data/temp/"))) {
System.out.println(status.getPath().toUri() + " \t" + status.getLen() + " \t" + new Date(status.getModificationTime()));
}
}
use of com.alibaba.alink.common.io.filesystem.OssFileSystem in project Alink by alibaba.
the class Chap03 method c_2_3_1.
static void c_2_3_1() throws Exception {
HadoopFileSystem hdfs = new HadoopFileSystem(HADOOP_VERSION, HDFS_URI);
OssFileSystem oss = new OssFileSystem(OSS_VERSION, OSS_END_POINT, OSS_BUCKET_NAME, OSS_ACCESS_ID, OSS_ACCESS_KEY);
FilePath[] filePaths = new FilePath[] { new FilePath(LOCAL_DIR + "iris.ak"), new FilePath(HDFS_URI + "user/yangxu/alink/data/temp/iris.ak", hdfs), new FilePath(OSS_PREFIX_URI + "alink/data/temp/iris.ak", oss) };
for (FilePath filePath : filePaths) {
new CsvSourceBatchOp().setFilePath(IRIS_HTTP_URL).setSchemaStr(IRIS_SCHEMA_STR).link(new AkSinkBatchOp().setFilePath(filePath).setOverwriteSink(true));
BatchOperator.execute();
System.out.println(new AkSourceBatchOp().setFilePath(filePath).count());
}
for (FilePath filePath : filePaths) {
new CsvSourceStreamOp().setFilePath(IRIS_HTTP_URL).setSchemaStr(IRIS_SCHEMA_STR).link(new AkSinkStreamOp().setFilePath(filePath).setOverwriteSink(true));
StreamOperator.execute();
new AkSourceStreamOp().setFilePath(filePath).filter("sepal_length < 4.5").print();
StreamOperator.execute();
}
}
use of com.alibaba.alink.common.io.filesystem.OssFileSystem in project Alink by alibaba.
the class AkExample method main.
public static void main(String[] args) throws Exception {
String URL = "https://alink-release.oss-cn-beijing.aliyuncs.com/data-files/iris.csv";
String SCHEMA_STR = "sepal_length double, sepal_width double, petal_length double, petal_width double, category string";
// Note: Complete the parameter below with the right oss configure.
BaseFileSystem<?> ossFileSystem = new OssFileSystem("OssVersion", "OssEndPoint", "OssBucket", "OssId", "OssKey");
// Note: Complete the parameter below with the right hdfs configure.
BaseFileSystem<?> hadoopFileSystem = new HadoopFileSystem("HadoopVersion", "HdfsFileSystemUri");
// csv to oss
CsvSourceBatchOp csvSourceBatchOp = new CsvSourceBatchOp().setFilePath(URL).setSchemaStr(SCHEMA_STR);
AkSinkBatchOp akSinkToOss = new AkSinkBatchOp().setFilePath(new FilePath("iris", ossFileSystem)).setOverwriteSink(true);
csvSourceBatchOp.link(akSinkToOss);
BatchOperator.execute();
// oss to hdfs
AkSourceBatchOp akSourceFromOss = new AkSourceBatchOp().setFilePath(new FilePath("iris", ossFileSystem));
AkSinkBatchOp akSinkToHdfs = new AkSinkBatchOp().setFilePath(new FilePath("iris", hadoopFileSystem)).setOverwriteSink(true);
akSourceFromOss.link(akSinkToHdfs);
BatchOperator.execute();
// hdfs to stdout
AkSourceBatchOp akSourceFromHdfs = new AkSourceBatchOp().setFilePath(new FilePath("iris", hadoopFileSystem));
akSourceFromHdfs.firstN(10).print();
}
use of com.alibaba.alink.common.io.filesystem.OssFileSystem in project Alink by alibaba.
the class Chap03 method c_2_1_2.
static void c_2_1_2() throws Exception {
HadoopFileSystem hdfs = new HadoopFileSystem(HADOOP_VERSION, HDFS_URI);
OssFileSystem oss = new OssFileSystem(OSS_VERSION, OSS_END_POINT, OSS_BUCKET_NAME, OSS_ACCESS_ID, OSS_ACCESS_KEY);
FilePath[] filePaths = new FilePath[] { new FilePath(LOCAL_DIR + "iris.csv"), new FilePath(HDFS_URI + "user/yangxu/alink/data/temp/iris.csv", hdfs), new FilePath(OSS_PREFIX_URI + "alink/data/temp/iris.csv", oss) };
for (FilePath filePath : filePaths) {
new CsvSourceBatchOp().setFilePath(IRIS_HTTP_URL).setSchemaStr(IRIS_SCHEMA_STR).link(new CsvSinkBatchOp().setFilePath(filePath).setOverwriteSink(true));
BatchOperator.execute();
System.out.println(new CsvSourceBatchOp().setFilePath(filePath).setSchemaStr(IRIS_SCHEMA_STR).count());
}
for (FilePath filePath : filePaths) {
new CsvSourceStreamOp().setFilePath(IRIS_HTTP_URL).setSchemaStr(IRIS_SCHEMA_STR).link(new CsvSinkStreamOp().setFilePath(filePath).setOverwriteSink(true));
StreamOperator.execute();
new CsvSourceStreamOp().setFilePath(filePath).setSchemaStr(IRIS_SCHEMA_STR).filter("sepal_length < 4.5").print();
StreamOperator.execute();
}
}
Aggregations