use of com.alibaba.datax.common.plugin.RecordReceiver in project plugins by qlangtech.
the class TestDataXHudiWriterTask method testCsvWrite.
@Test
public void testCsvWrite() throws Exception {
DefaultRecord[] record = new DefaultRecord[1];
ByteArrayOutputStream output = new ByteArrayOutputStream();
TisDataXHudiWriter.Task task = new TisDataXHudiWriter.Task() {
@Override
public void init() {
this.fileType = "csv";
this.writerSliceConfig = Configuration.from(IOUtils.loadResourceFromClasspath(TestDataXHudiWriterTask.class, TestDataXHudiWriter.hudi_datax_writer_assert_without_optional)).getConfiguration("parameter");
List<HdfsColMeta> colsMeta = HdfsColMeta.getColsMeta(this.writerSliceConfig);
record[0] = new DefaultRecord();
for (HdfsColMeta col : colsMeta) {
// col.csvType
switch(col.csvType) {
case STRING:
record[0].addColumn(new StringColumn("{\"name\":\"" + RandomStringUtils.randomAlphanumeric(4) + "\"}"));
break;
case BOOLEAN:
break;
case NUMBER:
record[0].addColumn(new LongColumn((long) (Math.random() * 1000)));
break;
}
}
}
@Override
public void prepare() {
super.prepare();
}
@Override
protected OutputStream getOutputStream(Path targetPath) {
return output;
}
};
RecordReceiver records = new RecordReceiver() {
int index = 0;
@Override
public Record getFromReader() {
if (index++ < 1) {
return record[0];
}
return null;
}
@Override
public void shutdown() {
}
};
task.init();
task.prepare();
task.startWrite(records);
System.out.println(new String(output.toByteArray()));
}
Aggregations