Search in sources :

Example 1 with InputBatch

use of org.apache.hudi.utilities.sources.InputBatch in project hudi by apache.

the class TestAbstractDebeziumSource method testDebeziumEvents.

@ParameterizedTest
@MethodSource("testArguments")
public void testDebeziumEvents(Operation operation) throws Exception {
    String sourceClass = getSourceClass();
    // topic setup.
    testUtils.createTopic(TEST_TOPIC_NAME, 2);
    TypedProperties props = createPropsForJsonSource();
    SchemaProvider schemaProvider = new MockSchemaRegistryProvider(props, jsc, this);
    SourceFormatAdapter debeziumSource = new SourceFormatAdapter(UtilHelpers.createSource(sourceClass, props, jsc, sparkSession, schemaProvider, metrics));
    testUtils.sendMessages(TEST_TOPIC_NAME, new String[] { generateDebeziumEvent(operation).toString() });
    InputBatch<Dataset<Row>> fetch = debeziumSource.fetchNewDataInRowFormat(Option.empty(), 10);
    assertEquals(1, fetch.getBatch().get().count());
    // Ensure the before fields are picked for DELETE CDC Events,
    // and after fields are picked for INSERT and UPDATE CDC Events.
    final String fieldPrefix = (operation.equals(Operation.DELETE)) ? "before_" : "after_";
    assertTrue(fetch.getBatch().get().select("type").collectAsList().stream().allMatch(r -> r.getString(0).startsWith(fieldPrefix)));
    assertTrue(fetch.getBatch().get().select("type").collectAsList().stream().allMatch(r -> r.getString(0).startsWith(fieldPrefix)));
    // Validate DB specific meta fields
    validateMetaFields(fetch.getBatch().get());
}
Also used : BeforeEach(org.junit.jupiter.api.BeforeEach) Dataset(org.apache.spark.sql.Dataset) SchemaRegistryProvider(org.apache.hudi.utilities.schema.SchemaRegistryProvider) DebeziumConstants(org.apache.hudi.common.model.debezium.DebeziumConstants) JavaSparkContext(org.apache.spark.api.java.JavaSparkContext) Option(org.apache.hudi.common.util.Option) HoodieDeltaStreamerMetrics(org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamerMetrics) GenericData(org.apache.avro.generic.GenericData) AfterAll(org.junit.jupiter.api.AfterAll) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) BeforeAll(org.junit.jupiter.api.BeforeAll) Assertions.assertEquals(org.junit.jupiter.api.Assertions.assertEquals) Arguments.arguments(org.junit.jupiter.params.provider.Arguments.arguments) MethodSource(org.junit.jupiter.params.provider.MethodSource) SchemaProvider(org.apache.hudi.utilities.schema.SchemaProvider) GenericRecord(org.apache.avro.generic.GenericRecord) Schema(org.apache.avro.Schema) TypedProperties(org.apache.hudi.common.config.TypedProperties) UtilHelpers(org.apache.hudi.utilities.UtilHelpers) UtilitiesTestBase(org.apache.hudi.utilities.testutils.UtilitiesTestBase) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) IOException(java.io.IOException) Row(org.apache.spark.sql.Row) KafkaTestUtils(org.apache.spark.streaming.kafka010.KafkaTestUtils) UUID(java.util.UUID) Arguments(org.junit.jupiter.params.provider.Arguments) InputBatch(org.apache.hudi.utilities.sources.InputBatch) AfterEach(org.junit.jupiter.api.AfterEach) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Stream(java.util.stream.Stream) Assertions.assertTrue(org.junit.jupiter.api.Assertions.assertTrue) SourceFormatAdapter(org.apache.hudi.utilities.deltastreamer.SourceFormatAdapter) Mockito.mock(org.mockito.Mockito.mock) Dataset(org.apache.spark.sql.Dataset) SchemaProvider(org.apache.hudi.utilities.schema.SchemaProvider) TypedProperties(org.apache.hudi.common.config.TypedProperties) SourceFormatAdapter(org.apache.hudi.utilities.deltastreamer.SourceFormatAdapter) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) MethodSource(org.junit.jupiter.params.provider.MethodSource)

Example 2 with InputBatch

use of org.apache.hudi.utilities.sources.InputBatch in project hudi by apache.

the class DistributedTestDataSource method fetchNewData.

@Override
protected InputBatch<JavaRDD<GenericRecord>> fetchNewData(Option<String> lastCkptStr, long sourceLimit) {
    int nextCommitNum = lastCkptStr.map(s -> Integer.parseInt(s) + 1).orElse(0);
    String instantTime = String.format("%05d", nextCommitNum);
    LOG.info("Source Limit is set to " + sourceLimit);
    // No new data.
    if (sourceLimit <= 0) {
        return new InputBatch<>(Option.empty(), instantTime);
    }
    TypedProperties newProps = new TypedProperties();
    newProps.putAll(props);
    // Set the maxUniqueRecords per partition for TestDataSource
    int maxUniqueRecords = props.getInteger(SourceConfigs.MAX_UNIQUE_RECORDS_PROP, SourceConfigs.DEFAULT_MAX_UNIQUE_RECORDS);
    String maxUniqueRecordsPerPartition = String.valueOf(Math.max(1, maxUniqueRecords / numTestSourcePartitions));
    newProps.setProperty(SourceConfigs.MAX_UNIQUE_RECORDS_PROP, maxUniqueRecordsPerPartition);
    int perPartitionSourceLimit = Math.max(1, (int) (sourceLimit / numTestSourcePartitions));
    JavaRDD<GenericRecord> avroRDD = sparkContext.parallelize(IntStream.range(0, numTestSourcePartitions).boxed().collect(Collectors.toList()), numTestSourcePartitions).mapPartitionsWithIndex((p, idx) -> {
        LOG.info("Initializing source with newProps=" + newProps);
        if (!dataGeneratorMap.containsKey(p)) {
            initDataGen(newProps, p);
        }
        return fetchNextBatch(newProps, perPartitionSourceLimit, instantTime, p).iterator();
    }, true);
    return new InputBatch<>(Option.of(avroRDD), instantTime);
}
Also used : IntStream(java.util.stream.IntStream) SchemaProvider(org.apache.hudi.utilities.schema.SchemaProvider) GenericRecord(org.apache.avro.generic.GenericRecord) TypedProperties(org.apache.hudi.common.config.TypedProperties) SourceConfigs(org.apache.hudi.utilities.testutils.sources.config.SourceConfigs) JavaSparkContext(org.apache.spark.api.java.JavaSparkContext) Option(org.apache.hudi.common.util.Option) InputBatch(org.apache.hudi.utilities.sources.InputBatch) Collectors(java.util.stream.Collectors) Logger(org.apache.log4j.Logger) LogManager(org.apache.log4j.LogManager) JavaRDD(org.apache.spark.api.java.JavaRDD) SparkSession(org.apache.spark.sql.SparkSession) InputBatch(org.apache.hudi.utilities.sources.InputBatch) TypedProperties(org.apache.hudi.common.config.TypedProperties) GenericRecord(org.apache.avro.generic.GenericRecord)

Example 3 with InputBatch

use of org.apache.hudi.utilities.sources.InputBatch in project hudi by apache.

the class DeltaSync method fetchFromSource.

private Pair<SchemaProvider, Pair<String, JavaRDD<HoodieRecord>>> fetchFromSource(Option<String> resumeCheckpointStr) {
    final Option<JavaRDD<GenericRecord>> avroRDDOptional;
    final String checkpointStr;
    SchemaProvider schemaProvider;
    if (transformer.isPresent()) {
        // Transformation is needed. Fetch New rows in Row Format, apply transformation and then convert them
        // to generic records for writing
        InputBatch<Dataset<Row>> dataAndCheckpoint = formatAdapter.fetchNewDataInRowFormat(resumeCheckpointStr, cfg.sourceLimit);
        Option<Dataset<Row>> transformed = dataAndCheckpoint.getBatch().map(data -> transformer.get().apply(jssc, sparkSession, data, props));
        checkpointStr = dataAndCheckpoint.getCheckpointForNextBatch();
        boolean reconcileSchema = props.getBoolean(DataSourceWriteOptions.RECONCILE_SCHEMA().key());
        if (this.userProvidedSchemaProvider != null && this.userProvidedSchemaProvider.getTargetSchema() != null) {
            // If the target schema is specified through Avro schema,
            // pass in the schema for the Row-to-Avro conversion
            // to avoid nullability mismatch between Avro schema and Row schema
            avroRDDOptional = transformed.map(t -> HoodieSparkUtils.createRdd(t, HOODIE_RECORD_STRUCT_NAME, HOODIE_RECORD_NAMESPACE, reconcileSchema, Option.of(this.userProvidedSchemaProvider.getTargetSchema())).toJavaRDD());
            schemaProvider = this.userProvidedSchemaProvider;
        } else {
            // Use Transformed Row's schema if not overridden. If target schema is not specified
            // default to RowBasedSchemaProvider
            schemaProvider = transformed.map(r -> {
                // determine the targetSchemaProvider. use latestTableSchema if reconcileSchema is enabled.
                SchemaProvider targetSchemaProvider = null;
                if (reconcileSchema) {
                    targetSchemaProvider = UtilHelpers.createLatestSchemaProvider(r.schema(), jssc, fs, cfg.targetBasePath);
                } else {
                    targetSchemaProvider = UtilHelpers.createRowBasedSchemaProvider(r.schema(), props, jssc);
                }
                return (SchemaProvider) new DelegatingSchemaProvider(props, jssc, dataAndCheckpoint.getSchemaProvider(), targetSchemaProvider);
            }).orElse(dataAndCheckpoint.getSchemaProvider());
            avroRDDOptional = transformed.map(t -> HoodieSparkUtils.createRdd(t, HOODIE_RECORD_STRUCT_NAME, HOODIE_RECORD_NAMESPACE, reconcileSchema, Option.ofNullable(schemaProvider.getTargetSchema())).toJavaRDD());
        }
    } else {
        // Pull the data from the source & prepare the write
        InputBatch<JavaRDD<GenericRecord>> dataAndCheckpoint = formatAdapter.fetchNewDataInAvroFormat(resumeCheckpointStr, cfg.sourceLimit);
        avroRDDOptional = dataAndCheckpoint.getBatch();
        checkpointStr = dataAndCheckpoint.getCheckpointForNextBatch();
        schemaProvider = dataAndCheckpoint.getSchemaProvider();
    }
    if (!cfg.allowCommitOnNoCheckpointChange && Objects.equals(checkpointStr, resumeCheckpointStr.orElse(null))) {
        LOG.info("No new data, source checkpoint has not changed. Nothing to commit. Old checkpoint=(" + resumeCheckpointStr + "). New Checkpoint=(" + checkpointStr + ")");
        String commitActionType = CommitUtils.getCommitActionType(cfg.operation, HoodieTableType.valueOf(cfg.tableType));
        hoodieMetrics.updateMetricsForEmptyData(commitActionType);
        return null;
    }
    jssc.setJobGroup(this.getClass().getSimpleName(), "Checking if input is empty");
    if ((!avroRDDOptional.isPresent()) || (avroRDDOptional.get().isEmpty())) {
        LOG.info("No new data, perform empty commit.");
        return Pair.of(schemaProvider, Pair.of(checkpointStr, jssc.emptyRDD()));
    }
    boolean shouldCombine = cfg.filterDupes || cfg.operation.equals(WriteOperationType.UPSERT);
    JavaRDD<GenericRecord> avroRDD = avroRDDOptional.get();
    JavaRDD<HoodieRecord> records = avroRDD.map(gr -> {
        HoodieRecordPayload payload = shouldCombine ? DataSourceUtils.createPayload(cfg.payloadClassName, gr, (Comparable) HoodieAvroUtils.getNestedFieldVal(gr, cfg.sourceOrderingField, false, props.getBoolean(KeyGeneratorOptions.KEYGENERATOR_CONSISTENT_LOGICAL_TIMESTAMP_ENABLED.key(), Boolean.parseBoolean(KeyGeneratorOptions.KEYGENERATOR_CONSISTENT_LOGICAL_TIMESTAMP_ENABLED.defaultValue())))) : DataSourceUtils.createPayload(cfg.payloadClassName, gr);
        return new HoodieAvroRecord<>(keyGenerator.getKey(gr), payload);
    });
    return Pair.of(schemaProvider, Pair.of(checkpointStr, records));
}
Also used : ARCHIVELOG_FOLDER(org.apache.hudi.common.table.HoodieTableConfig.ARCHIVELOG_FOLDER) Arrays(java.util.Arrays) FileSystem(org.apache.hadoop.fs.FileSystem) INLINE_COMPACT(org.apache.hudi.config.HoodieCompactionConfig.INLINE_COMPACT) HoodieInstant(org.apache.hudi.common.table.timeline.HoodieInstant) HoodieException(org.apache.hudi.exception.HoodieException) KafkaOffsetGen(org.apache.hudi.utilities.sources.helpers.KafkaOffsetGen) EmbeddedTimelineServerHelper(org.apache.hudi.client.embedded.EmbeddedTimelineServerHelper) COMBINE_BEFORE_UPSERT(org.apache.hudi.config.HoodieWriteConfig.COMBINE_BEFORE_UPSERT) DataSourceWriteOptions(org.apache.hudi.DataSourceWriteOptions) HOODIE_RECORD_NAMESPACE(org.apache.hudi.utilities.schema.RowBasedSchemaProvider.HOODIE_RECORD_NAMESPACE) HiveSyncTool(org.apache.hudi.hive.HiveSyncTool) Logger(org.apache.log4j.Logger) HoodieTableType(org.apache.hudi.common.model.HoodieTableType) HoodieSparkUtils(org.apache.hudi.HoodieSparkUtils) HoodieTableConfig(org.apache.hudi.common.table.HoodieTableConfig) Configuration(org.apache.hadoop.conf.Configuration) Transformer(org.apache.hudi.utilities.transform.Transformer) Path(org.apache.hadoop.fs.Path) HoodieSparkEngineContext(org.apache.hudi.client.common.HoodieSparkEngineContext) HoodieDeltaStreamerException(org.apache.hudi.utilities.exception.HoodieDeltaStreamerException) HoodieActiveTimeline(org.apache.hudi.common.table.timeline.HoodieActiveTimeline) HoodieSparkKeyGeneratorFactory(org.apache.hudi.keygen.factory.HoodieSparkKeyGeneratorFactory) HOODIE_RECORD_STRUCT_NAME(org.apache.hudi.utilities.schema.RowBasedSchemaProvider.HOODIE_RECORD_STRUCT_NAME) ValidationUtils(org.apache.hudi.common.util.ValidationUtils) SchemaProvider(org.apache.hudi.utilities.schema.SchemaProvider) HoodieMetrics(org.apache.hudi.metrics.HoodieMetrics) Schema(org.apache.avro.Schema) HoodiePayloadConfig(org.apache.hudi.config.HoodiePayloadConfig) UtilHelpers(org.apache.hudi.utilities.UtilHelpers) KeyGeneratorOptions(org.apache.hudi.keygen.constant.KeyGeneratorOptions) Set(java.util.Set) HoodieWriteCommitPulsarCallbackConfig(org.apache.hudi.utilities.callback.pulsar.HoodieWriteCommitPulsarCallbackConfig) SimpleKeyGenerator(org.apache.hudi.keygen.SimpleKeyGenerator) InputBatch(org.apache.hudi.utilities.sources.InputBatch) Collectors(java.util.stream.Collectors) Serializable(java.io.Serializable) HoodieSourceTimeoutException(org.apache.hudi.utilities.exception.HoodieSourceTimeoutException) Objects(java.util.Objects) List(java.util.List) EmbeddedTimelineService(org.apache.hudi.client.embedded.EmbeddedTimelineService) INLINE_CLUSTERING(org.apache.hudi.config.HoodieClusteringConfig.INLINE_CLUSTERING) Timer(com.codahale.metrics.Timer) WriteOperationType(org.apache.hudi.common.model.WriteOperationType) ReflectionUtils(org.apache.hudi.common.util.ReflectionUtils) CHECKPOINT_RESET_KEY(org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer.CHECKPOINT_RESET_KEY) HoodieAvroUtils(org.apache.hudi.avro.HoodieAvroUtils) Dataset(org.apache.spark.sql.Dataset) JavaSparkContext(org.apache.spark.api.java.JavaSparkContext) AbstractSyncTool(org.apache.hudi.sync.common.AbstractSyncTool) CHECKPOINT_KEY(org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer.CHECKPOINT_KEY) Option(org.apache.hudi.common.util.Option) HashMap(java.util.HashMap) DataSourceUtils(org.apache.hudi.DataSourceUtils) CommitUtils(org.apache.hudi.common.util.CommitUtils) Function(java.util.function.Function) SchemaSet(org.apache.hudi.utilities.schema.SchemaSet) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) AUTO_COMMIT_ENABLE(org.apache.hudi.config.HoodieWriteConfig.AUTO_COMMIT_ENABLE) StringUtils(org.apache.hudi.common.util.StringUtils) KeyGenerator(org.apache.hudi.keygen.KeyGenerator) HoodieTableMetaClient(org.apache.hudi.common.table.HoodieTableMetaClient) DelegatingSchemaProvider(org.apache.hudi.utilities.schema.DelegatingSchemaProvider) HoodieTimeline(org.apache.hudi.common.table.timeline.HoodieTimeline) JavaRDD(org.apache.spark.api.java.JavaRDD) SparkSession(org.apache.spark.sql.SparkSession) HoodieRecord(org.apache.hudi.common.model.HoodieRecord) Config(org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer.Config) JavaConversions(scala.collection.JavaConversions) GenericRecord(org.apache.avro.generic.GenericRecord) Properties(java.util.Properties) ASYNC_CLUSTERING_ENABLE(org.apache.hudi.config.HoodieClusteringConfig.ASYNC_CLUSTERING_ENABLE) TypedProperties(org.apache.hudi.common.config.TypedProperties) HoodieWriteConfig(org.apache.hudi.config.HoodieWriteConfig) HoodieWriteCommitPulsarCallback(org.apache.hudi.utilities.callback.pulsar.HoodieWriteCommitPulsarCallback) HiveSyncConfig(org.apache.hudi.hive.HiveSyncConfig) HiveConf(org.apache.hadoop.hive.conf.HiveConf) HoodieCommitMetadata(org.apache.hudi.common.model.HoodieCommitMetadata) IOException(java.io.IOException) Row(org.apache.spark.sql.Row) HoodieWriteCommitKafkaCallbackConfig(org.apache.hudi.utilities.callback.kafka.HoodieWriteCommitKafkaCallbackConfig) HoodieAvroRecord(org.apache.hudi.common.model.HoodieAvroRecord) HoodieCompactionConfig(org.apache.hudi.config.HoodieCompactionConfig) WriteStatus(org.apache.hudi.client.WriteStatus) HoodieRecordPayload(org.apache.hudi.common.model.HoodieRecordPayload) SparkRDDWriteClient(org.apache.hudi.client.SparkRDDWriteClient) HoodieIOException(org.apache.hudi.exception.HoodieIOException) HoodieClusteringConfig(org.apache.hudi.config.HoodieClusteringConfig) LogManager(org.apache.log4j.LogManager) Collections(java.util.Collections) FSUtils(org.apache.hudi.common.fs.FSUtils) Pair(org.apache.hudi.common.util.collection.Pair) HoodieWriteCommitKafkaCallback(org.apache.hudi.utilities.callback.kafka.HoodieWriteCommitKafkaCallback) COMBINE_BEFORE_INSERT(org.apache.hudi.config.HoodieWriteConfig.COMBINE_BEFORE_INSERT) Dataset(org.apache.spark.sql.Dataset) DelegatingSchemaProvider(org.apache.hudi.utilities.schema.DelegatingSchemaProvider) HoodieRecord(org.apache.hudi.common.model.HoodieRecord) HoodieRecordPayload(org.apache.hudi.common.model.HoodieRecordPayload) JavaRDD(org.apache.spark.api.java.JavaRDD) HoodieAvroRecord(org.apache.hudi.common.model.HoodieAvroRecord) SchemaProvider(org.apache.hudi.utilities.schema.SchemaProvider) DelegatingSchemaProvider(org.apache.hudi.utilities.schema.DelegatingSchemaProvider) GenericRecord(org.apache.avro.generic.GenericRecord)

Aggregations

GenericRecord (org.apache.avro.generic.GenericRecord)3 TypedProperties (org.apache.hudi.common.config.TypedProperties)3 Option (org.apache.hudi.common.util.Option)3 SchemaProvider (org.apache.hudi.utilities.schema.SchemaProvider)3 InputBatch (org.apache.hudi.utilities.sources.InputBatch)3 JavaSparkContext (org.apache.spark.api.java.JavaSparkContext)3 IOException (java.io.IOException)2 Collectors (java.util.stream.Collectors)2 Schema (org.apache.avro.Schema)2 UtilHelpers (org.apache.hudi.utilities.UtilHelpers)2 LogManager (org.apache.log4j.LogManager)2 Logger (org.apache.log4j.Logger)2 JavaRDD (org.apache.spark.api.java.JavaRDD)2 Dataset (org.apache.spark.sql.Dataset)2 Row (org.apache.spark.sql.Row)2 SparkSession (org.apache.spark.sql.SparkSession)2 Timer (com.codahale.metrics.Timer)1 Serializable (java.io.Serializable)1 ArrayList (java.util.ArrayList)1 Arrays (java.util.Arrays)1