Search in sources :

Example 1 with Transformer

use of org.apache.hudi.sink.transform.Transformer in project hudi by apache.

the class HoodieFlinkStreamer method main.

public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    final FlinkStreamerConfig cfg = new FlinkStreamerConfig();
    JCommander cmd = new JCommander(cfg, null, args);
    if (cfg.help || args.length == 0) {
        cmd.usage();
        System.exit(1);
    }
    env.enableCheckpointing(cfg.checkpointInterval);
    env.getConfig().setGlobalJobParameters(cfg);
    // We use checkpoint to trigger write operation, including instant generating and committing,
    // There can only be one checkpoint at one time.
    env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
    env.setStateBackend(cfg.stateBackend);
    if (cfg.flinkCheckPointPath != null) {
        env.getCheckpointConfig().setCheckpointStorage(cfg.flinkCheckPointPath);
    }
    TypedProperties kafkaProps = DFSPropertiesConfiguration.getGlobalProps();
    kafkaProps.putAll(StreamerUtil.appendKafkaProps(cfg));
    // Read from kafka source
    RowType rowType = (RowType) AvroSchemaConverter.convertToDataType(StreamerUtil.getSourceSchema(cfg)).getLogicalType();
    Configuration conf = FlinkStreamerConfig.toFlinkConfig(cfg);
    long ckpTimeout = env.getCheckpointConfig().getCheckpointTimeout();
    int parallelism = env.getParallelism();
    conf.setLong(FlinkOptions.WRITE_COMMIT_ACK_TIMEOUT, ckpTimeout);
    DataStream<RowData> dataStream = env.addSource(new FlinkKafkaConsumer<>(cfg.kafkaTopic, new JsonRowDataDeserializationSchema(rowType, InternalTypeInfo.of(rowType), false, true, TimestampFormat.ISO_8601), kafkaProps)).name("kafka_source").uid("uid_kafka_source");
    if (cfg.transformerClassNames != null && !cfg.transformerClassNames.isEmpty()) {
        Option<Transformer> transformer = StreamerUtil.createTransformer(cfg.transformerClassNames);
        if (transformer.isPresent()) {
            dataStream = transformer.get().apply(dataStream);
        }
    }
    DataStream<HoodieRecord> hoodieRecordDataStream = Pipelines.bootstrap(conf, rowType, parallelism, dataStream);
    DataStream<Object> pipeline = Pipelines.hoodieStreamWrite(conf, parallelism, hoodieRecordDataStream);
    if (StreamerUtil.needsAsyncCompaction(conf)) {
        Pipelines.compact(conf, pipeline);
    } else {
        Pipelines.clean(conf, pipeline);
    }
    env.execute(cfg.targetTableName);
}
Also used : Transformer(org.apache.hudi.sink.transform.Transformer) Configuration(org.apache.flink.configuration.Configuration) DFSPropertiesConfiguration(org.apache.hudi.common.config.DFSPropertiesConfiguration) HoodieRecord(org.apache.hudi.common.model.HoodieRecord) RowType(org.apache.flink.table.types.logical.RowType) TypedProperties(org.apache.hudi.common.config.TypedProperties) JsonRowDataDeserializationSchema(org.apache.flink.formats.json.JsonRowDataDeserializationSchema) RowData(org.apache.flink.table.data.RowData) JCommander(com.beust.jcommander.JCommander) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)

Example 2 with Transformer

use of org.apache.hudi.sink.transform.Transformer in project hudi by apache.

the class ITTestDataStreamWrite method testChainedTransformersBeforeWriting.

@Test
public void testChainedTransformersBeforeWriting() throws Exception {
    Transformer t1 = (ds) -> ds.map((rowdata) -> {
        if (rowdata instanceof GenericRowData) {
            GenericRowData genericRD = (GenericRowData) rowdata;
            // update age field to age + 1
            genericRD.setField(2, genericRD.getInt(2) + 1);
            return genericRD;
        } else {
            throw new RuntimeException("Unrecognized row type : " + rowdata.getClass().getSimpleName());
        }
    });
    ChainedTransformer chainedTransformer = new ChainedTransformer(Arrays.asList(t1, t1));
    testWriteToHoodie(chainedTransformer, EXPECTED_CHAINED_TRANSFORMER);
}
Also used : FilePathFilter(org.apache.flink.api.common.io.FilePathFilter) Arrays(java.util.Arrays) FileProcessingMode(org.apache.flink.streaming.api.functions.source.FileProcessingMode) TestConfigurations(org.apache.hudi.utils.TestConfigurations) CheckpointingMode(org.apache.flink.streaming.api.CheckpointingMode) HashMap(java.util.HashMap) JobStatus(org.apache.flink.api.common.JobStatus) RowType(org.apache.flink.table.types.logical.RowType) ChainedTransformer(org.apache.hudi.sink.transform.ChainedTransformer) BasicTypeInfo(org.apache.flink.api.common.typeinfo.BasicTypeInfo) HoodieTableType(org.apache.hudi.common.model.HoodieTableType) GenericRowData(org.apache.flink.table.data.GenericRowData) Path(org.apache.flink.core.fs.Path) Map(java.util.Map) TestLogger(org.apache.flink.util.TestLogger) StreamerUtil(org.apache.hudi.util.StreamerUtil) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) Pipelines(org.apache.hudi.sink.utils.Pipelines) ValueSource(org.junit.jupiter.params.provider.ValueSource) HoodieRecord(org.apache.hudi.common.model.HoodieRecord) RowData(org.apache.flink.table.data.RowData) AvroSchemaConverter(org.apache.hudi.util.AvroSchemaConverter) Configuration(org.apache.flink.configuration.Configuration) TestData(org.apache.hudi.utils.TestData) TimestampFormat(org.apache.flink.formats.common.TimestampFormat) JobClient(org.apache.flink.core.execution.JobClient) File(java.io.File) StandardCharsets(java.nio.charset.StandardCharsets) DataStream(org.apache.flink.streaming.api.datastream.DataStream) Test(org.junit.jupiter.api.Test) Objects(java.util.Objects) TimeUnit(java.util.concurrent.TimeUnit) ContinuousFileSource(org.apache.hudi.utils.source.ContinuousFileSource) TextInputFormat(org.apache.flink.api.java.io.TextInputFormat) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) List(java.util.List) InternalTypeInfo(org.apache.flink.table.runtime.typeutils.InternalTypeInfo) TempDir(org.junit.jupiter.api.io.TempDir) JsonRowDataDeserializationSchema(org.apache.flink.formats.json.JsonRowDataDeserializationSchema) FlinkOptions(org.apache.hudi.configuration.FlinkOptions) Transformer(org.apache.hudi.sink.transform.Transformer) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) ChainedTransformer(org.apache.hudi.sink.transform.ChainedTransformer) Transformer(org.apache.hudi.sink.transform.Transformer) ChainedTransformer(org.apache.hudi.sink.transform.ChainedTransformer) GenericRowData(org.apache.flink.table.data.GenericRowData) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 3 with Transformer

use of org.apache.hudi.sink.transform.Transformer in project hudi by apache.

the class ITTestDataStreamWrite method testTransformerBeforeWriting.

@Test
public void testTransformerBeforeWriting() throws Exception {
    Transformer transformer = (ds) -> ds.map((rowdata) -> {
        if (rowdata instanceof GenericRowData) {
            GenericRowData genericRD = (GenericRowData) rowdata;
            // update age field to age + 1
            genericRD.setField(2, genericRD.getInt(2) + 1);
            return genericRD;
        } else {
            throw new RuntimeException("Unrecognized row type information: " + rowdata.getClass().getSimpleName());
        }
    });
    testWriteToHoodie(transformer, EXPECTED_TRANSFORMER);
}
Also used : FilePathFilter(org.apache.flink.api.common.io.FilePathFilter) Arrays(java.util.Arrays) FileProcessingMode(org.apache.flink.streaming.api.functions.source.FileProcessingMode) TestConfigurations(org.apache.hudi.utils.TestConfigurations) CheckpointingMode(org.apache.flink.streaming.api.CheckpointingMode) HashMap(java.util.HashMap) JobStatus(org.apache.flink.api.common.JobStatus) RowType(org.apache.flink.table.types.logical.RowType) ChainedTransformer(org.apache.hudi.sink.transform.ChainedTransformer) BasicTypeInfo(org.apache.flink.api.common.typeinfo.BasicTypeInfo) HoodieTableType(org.apache.hudi.common.model.HoodieTableType) GenericRowData(org.apache.flink.table.data.GenericRowData) Path(org.apache.flink.core.fs.Path) Map(java.util.Map) TestLogger(org.apache.flink.util.TestLogger) StreamerUtil(org.apache.hudi.util.StreamerUtil) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) Pipelines(org.apache.hudi.sink.utils.Pipelines) ValueSource(org.junit.jupiter.params.provider.ValueSource) HoodieRecord(org.apache.hudi.common.model.HoodieRecord) RowData(org.apache.flink.table.data.RowData) AvroSchemaConverter(org.apache.hudi.util.AvroSchemaConverter) Configuration(org.apache.flink.configuration.Configuration) TestData(org.apache.hudi.utils.TestData) TimestampFormat(org.apache.flink.formats.common.TimestampFormat) JobClient(org.apache.flink.core.execution.JobClient) File(java.io.File) StandardCharsets(java.nio.charset.StandardCharsets) DataStream(org.apache.flink.streaming.api.datastream.DataStream) Test(org.junit.jupiter.api.Test) Objects(java.util.Objects) TimeUnit(java.util.concurrent.TimeUnit) ContinuousFileSource(org.apache.hudi.utils.source.ContinuousFileSource) TextInputFormat(org.apache.flink.api.java.io.TextInputFormat) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) List(java.util.List) InternalTypeInfo(org.apache.flink.table.runtime.typeutils.InternalTypeInfo) TempDir(org.junit.jupiter.api.io.TempDir) JsonRowDataDeserializationSchema(org.apache.flink.formats.json.JsonRowDataDeserializationSchema) FlinkOptions(org.apache.hudi.configuration.FlinkOptions) Transformer(org.apache.hudi.sink.transform.Transformer) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) ChainedTransformer(org.apache.hudi.sink.transform.ChainedTransformer) Transformer(org.apache.hudi.sink.transform.Transformer) GenericRowData(org.apache.flink.table.data.GenericRowData) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 4 with Transformer

use of org.apache.hudi.sink.transform.Transformer in project hudi by apache.

the class ITTestDataStreamWrite method testWriteToHoodie.

private void testWriteToHoodie(Transformer transformer, Map<String, List<String>> expected) throws Exception {
    Configuration conf = TestConfigurations.getDefaultConf(tempFile.getAbsolutePath());
    StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    execEnv.getConfig().disableObjectReuse();
    execEnv.setParallelism(4);
    // set up checkpoint interval
    execEnv.enableCheckpointing(4000, CheckpointingMode.EXACTLY_ONCE);
    execEnv.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
    // Read from file source
    RowType rowType = (RowType) AvroSchemaConverter.convertToDataType(StreamerUtil.getSourceSchema(conf)).getLogicalType();
    JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(rowType, InternalTypeInfo.of(rowType), false, true, TimestampFormat.ISO_8601);
    String sourcePath = Objects.requireNonNull(Thread.currentThread().getContextClassLoader().getResource("test_source.data")).toString();
    DataStream<RowData> dataStream = execEnv.addSource(new ContinuousFileSource.BoundedSourceFunction(new Path(sourcePath), 2)).name("continuous_file_source").setParallelism(1).map(record -> deserializationSchema.deserialize(record.getBytes(StandardCharsets.UTF_8))).setParallelism(4);
    if (transformer != null) {
        dataStream = transformer.apply(dataStream);
    }
    int parallelism = execEnv.getParallelism();
    DataStream<HoodieRecord> hoodieRecordDataStream = Pipelines.bootstrap(conf, rowType, parallelism, dataStream);
    DataStream<Object> pipeline = Pipelines.hoodieStreamWrite(conf, parallelism, hoodieRecordDataStream);
    execEnv.addOperator(pipeline.getTransformation());
    JobClient client = execEnv.executeAsync(conf.getString(FlinkOptions.TABLE_NAME));
    // wait for the streaming job to finish
    client.getJobExecutionResult().get();
    TestData.checkWrittenFullData(tempFile, expected);
}
Also used : Path(org.apache.flink.core.fs.Path) FilePathFilter(org.apache.flink.api.common.io.FilePathFilter) Arrays(java.util.Arrays) FileProcessingMode(org.apache.flink.streaming.api.functions.source.FileProcessingMode) TestConfigurations(org.apache.hudi.utils.TestConfigurations) CheckpointingMode(org.apache.flink.streaming.api.CheckpointingMode) HashMap(java.util.HashMap) JobStatus(org.apache.flink.api.common.JobStatus) RowType(org.apache.flink.table.types.logical.RowType) ChainedTransformer(org.apache.hudi.sink.transform.ChainedTransformer) BasicTypeInfo(org.apache.flink.api.common.typeinfo.BasicTypeInfo) HoodieTableType(org.apache.hudi.common.model.HoodieTableType) GenericRowData(org.apache.flink.table.data.GenericRowData) Path(org.apache.flink.core.fs.Path) Map(java.util.Map) TestLogger(org.apache.flink.util.TestLogger) StreamerUtil(org.apache.hudi.util.StreamerUtil) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) Pipelines(org.apache.hudi.sink.utils.Pipelines) ValueSource(org.junit.jupiter.params.provider.ValueSource) HoodieRecord(org.apache.hudi.common.model.HoodieRecord) RowData(org.apache.flink.table.data.RowData) AvroSchemaConverter(org.apache.hudi.util.AvroSchemaConverter) Configuration(org.apache.flink.configuration.Configuration) TestData(org.apache.hudi.utils.TestData) TimestampFormat(org.apache.flink.formats.common.TimestampFormat) JobClient(org.apache.flink.core.execution.JobClient) File(java.io.File) StandardCharsets(java.nio.charset.StandardCharsets) DataStream(org.apache.flink.streaming.api.datastream.DataStream) Test(org.junit.jupiter.api.Test) Objects(java.util.Objects) TimeUnit(java.util.concurrent.TimeUnit) ContinuousFileSource(org.apache.hudi.utils.source.ContinuousFileSource) TextInputFormat(org.apache.flink.api.java.io.TextInputFormat) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) List(java.util.List) InternalTypeInfo(org.apache.flink.table.runtime.typeutils.InternalTypeInfo) TempDir(org.junit.jupiter.api.io.TempDir) JsonRowDataDeserializationSchema(org.apache.flink.formats.json.JsonRowDataDeserializationSchema) FlinkOptions(org.apache.hudi.configuration.FlinkOptions) Transformer(org.apache.hudi.sink.transform.Transformer) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) Configuration(org.apache.flink.configuration.Configuration) HoodieRecord(org.apache.hudi.common.model.HoodieRecord) RowType(org.apache.flink.table.types.logical.RowType) JobClient(org.apache.flink.core.execution.JobClient) ContinuousFileSource(org.apache.hudi.utils.source.ContinuousFileSource) JsonRowDataDeserializationSchema(org.apache.flink.formats.json.JsonRowDataDeserializationSchema) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)

Aggregations

Configuration (org.apache.flink.configuration.Configuration)4 JsonRowDataDeserializationSchema (org.apache.flink.formats.json.JsonRowDataDeserializationSchema)4 StreamExecutionEnvironment (org.apache.flink.streaming.api.environment.StreamExecutionEnvironment)4 RowData (org.apache.flink.table.data.RowData)4 RowType (org.apache.flink.table.types.logical.RowType)4 HoodieRecord (org.apache.hudi.common.model.HoodieRecord)4 Transformer (org.apache.hudi.sink.transform.Transformer)4 File (java.io.File)3 StandardCharsets (java.nio.charset.StandardCharsets)3 Arrays (java.util.Arrays)3 HashMap (java.util.HashMap)3 List (java.util.List)3 Map (java.util.Map)3 Objects (java.util.Objects)3 TimeUnit (java.util.concurrent.TimeUnit)3 JobStatus (org.apache.flink.api.common.JobStatus)3 FilePathFilter (org.apache.flink.api.common.io.FilePathFilter)3 BasicTypeInfo (org.apache.flink.api.common.typeinfo.BasicTypeInfo)3 TypeInformation (org.apache.flink.api.common.typeinfo.TypeInformation)3 TextInputFormat (org.apache.flink.api.java.io.TextInputFormat)3