Search in sources :

Example 1 with RecordReceiver

use of com.alibaba.datax.common.plugin.RecordReceiver in project plugins by qlangtech.

the class TestDataXHudiWriterTask method testCsvWrite.

@Test
public void testCsvWrite() throws Exception {
    DefaultRecord[] record = new DefaultRecord[1];
    ByteArrayOutputStream output = new ByteArrayOutputStream();
    TisDataXHudiWriter.Task task = new TisDataXHudiWriter.Task() {

        @Override
        public void init() {
            this.fileType = "csv";
            this.writerSliceConfig = Configuration.from(IOUtils.loadResourceFromClasspath(TestDataXHudiWriterTask.class, TestDataXHudiWriter.hudi_datax_writer_assert_without_optional)).getConfiguration("parameter");
            List<HdfsColMeta> colsMeta = HdfsColMeta.getColsMeta(this.writerSliceConfig);
            record[0] = new DefaultRecord();
            for (HdfsColMeta col : colsMeta) {
                // col.csvType
                switch(col.csvType) {
                    case STRING:
                        record[0].addColumn(new StringColumn("{\"name\":\"" + RandomStringUtils.randomAlphanumeric(4) + "\"}"));
                        break;
                    case BOOLEAN:
                        break;
                    case NUMBER:
                        record[0].addColumn(new LongColumn((long) (Math.random() * 1000)));
                        break;
                }
            }
        }

        @Override
        public void prepare() {
            super.prepare();
        }

        @Override
        protected OutputStream getOutputStream(Path targetPath) {
            return output;
        }
    };
    RecordReceiver records = new RecordReceiver() {

        int index = 0;

        @Override
        public Record getFromReader() {
            if (index++ < 1) {
                return record[0];
            }
            return null;
        }

        @Override
        public void shutdown() {
        }
    };
    task.init();
    task.prepare();
    task.startWrite(records);
    System.out.println(new String(output.toByteArray()));
}
Also used : Path(org.apache.hadoop.fs.Path) StringColumn(com.alibaba.datax.common.element.StringColumn) DefaultRecord(com.alibaba.datax.core.transport.record.DefaultRecord) ByteArrayOutputStream(java.io.ByteArrayOutputStream) RecordReceiver(com.alibaba.datax.common.plugin.RecordReceiver) LongColumn(com.alibaba.datax.common.element.LongColumn) HdfsColMeta(com.alibaba.datax.plugin.writer.hdfswriter.HdfsColMeta) TisDataXHudiWriter(com.alibaba.datax.plugin.writer.hudi.TisDataXHudiWriter) Test(org.junit.Test)

Aggregations

LongColumn (com.alibaba.datax.common.element.LongColumn)1 StringColumn (com.alibaba.datax.common.element.StringColumn)1 RecordReceiver (com.alibaba.datax.common.plugin.RecordReceiver)1 DefaultRecord (com.alibaba.datax.core.transport.record.DefaultRecord)1 HdfsColMeta (com.alibaba.datax.plugin.writer.hdfswriter.HdfsColMeta)1 TisDataXHudiWriter (com.alibaba.datax.plugin.writer.hudi.TisDataXHudiWriter)1 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 Path (org.apache.hadoop.fs.Path)1 Test (org.junit.Test)1