use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class HoodieTestHiveBase method getTimeWait.
private int getTimeWait() {
try (InputStream stream = HoodieTestHiveBase.class.getClassLoader().getResourceAsStream("hoodie-docker.properties")) {
TypedProperties properties = new TypedProperties();
properties.load(stream);
return properties.getInteger("hoodie.hiveserver.time.wait", DEFAULT_TIME_WAIT);
} catch (IOException e) {
LOG.warn("Can not load property file, use default time wait for hiveserver.");
return DEFAULT_TIME_WAIT;
}
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestHoodieDeltaStreamer method prepareJsonKafkaDFSSource.
private void prepareJsonKafkaDFSSource(String propsFileName, String autoResetValue, String topicName) throws IOException {
// Properties used for testing delta-streamer with JsonKafka source
TypedProperties props = new TypedProperties();
populateAllCommonProps(props, dfsBasePath, testUtils.brokerAddress());
props.setProperty("include", "base.properties");
props.setProperty("hoodie.embed.timeline.server", "false");
props.setProperty("hoodie.datasource.write.recordkey.field", "_row_key");
props.setProperty("hoodie.datasource.write.partitionpath.field", "not_there");
props.setProperty("hoodie.deltastreamer.source.dfs.root", JSON_KAFKA_SOURCE_ROOT);
props.setProperty("hoodie.deltastreamer.source.kafka.topic", topicName);
props.setProperty("hoodie.deltastreamer.source.kafka.checkpoint.type", kafkaCheckpointType);
props.setProperty("hoodie.deltastreamer.schemaprovider.source.schema.file", dfsBasePath + "/source_uber.avsc");
props.setProperty("hoodie.deltastreamer.schemaprovider.target.schema.file", dfsBasePath + "/target_uber.avsc");
props.setProperty("auto.offset.reset", autoResetValue);
UtilitiesTestBase.Helpers.savePropsToDFS(props, dfs, dfsBasePath + "/" + propsFileName);
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestHoodieDeltaStreamer method testORCDFSSource.
private void testORCDFSSource(boolean useSchemaProvider, List<String> transformerClassNames) throws Exception {
// prepare ORCDFSSource
TypedProperties orcProps = new TypedProperties();
// Properties used for testing delta-streamer with orc source
orcProps.setProperty("include", "base.properties");
orcProps.setProperty("hoodie.embed.timeline.server", "false");
orcProps.setProperty("hoodie.datasource.write.recordkey.field", "_row_key");
orcProps.setProperty("hoodie.datasource.write.partitionpath.field", "not_there");
if (useSchemaProvider) {
orcProps.setProperty("hoodie.deltastreamer.schemaprovider.source.schema.file", dfsBasePath + "/" + "source.avsc");
if (transformerClassNames != null) {
orcProps.setProperty("hoodie.deltastreamer.schemaprovider.target.schema.file", dfsBasePath + "/" + "target.avsc");
}
}
orcProps.setProperty("hoodie.deltastreamer.source.dfs.root", ORC_SOURCE_ROOT);
UtilitiesTestBase.Helpers.savePropsToDFS(orcProps, dfs, dfsBasePath + "/" + PROPS_FILENAME_TEST_ORC);
String tableBasePath = dfsBasePath + "/test_orc_source_table" + testNum;
HoodieDeltaStreamer deltaStreamer = new HoodieDeltaStreamer(TestHelpers.makeConfig(tableBasePath, WriteOperationType.INSERT, ORCDFSSource.class.getName(), transformerClassNames, PROPS_FILENAME_TEST_ORC, false, useSchemaProvider, 100000, false, null, null, "timestamp", null), jsc);
deltaStreamer.sync();
TestHelpers.assertRecordCount(ORC_NUM_RECORDS, tableBasePath + "/*/*.parquet", sqlContext);
testNum++;
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestHoodieDeltaStreamer method prepareParquetDFSSource.
private void prepareParquetDFSSource(boolean useSchemaProvider, boolean hasTransformer, String sourceSchemaFile, String targetSchemaFile, String propsFileName, String parquetSourceRoot, boolean addCommonProps, String partitionPath) throws IOException {
// Properties used for testing delta-streamer with Parquet source
TypedProperties parquetProps = new TypedProperties();
if (addCommonProps) {
populateCommonProps(parquetProps, dfsBasePath);
}
parquetProps.setProperty("include", "base.properties");
parquetProps.setProperty("hoodie.embed.timeline.server", "false");
parquetProps.setProperty("hoodie.datasource.write.recordkey.field", "_row_key");
parquetProps.setProperty("hoodie.datasource.write.partitionpath.field", partitionPath);
if (useSchemaProvider) {
parquetProps.setProperty("hoodie.deltastreamer.schemaprovider.source.schema.file", dfsBasePath + "/" + sourceSchemaFile);
if (hasTransformer) {
parquetProps.setProperty("hoodie.deltastreamer.schemaprovider.target.schema.file", dfsBasePath + "/" + targetSchemaFile);
}
}
parquetProps.setProperty("hoodie.deltastreamer.source.dfs.root", parquetSourceRoot);
UtilitiesTestBase.Helpers.savePropsToDFS(parquetProps, dfs, dfsBasePath + "/" + propsFileName);
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestHoodieDeltaStreamerWithMultiWriter method prepareMultiWriterProps.
private static TypedProperties prepareMultiWriterProps(FileSystem fs, String basePath, String propsFilePath) throws IOException {
TypedProperties props = new TypedProperties();
HoodieDeltaStreamerTestBase.populateCommonProps(props, basePath);
HoodieDeltaStreamerTestBase.populateCommonHiveProps(props);
props.setProperty("include", "sql-transformer.properties");
props.setProperty("hoodie.datasource.write.keygenerator.class", TestHoodieDeltaStreamer.TestGenerator.class.getName());
props.setProperty("hoodie.datasource.write.recordkey.field", "_row_key");
props.setProperty("hoodie.datasource.write.partitionpath.field", "not_there");
props.setProperty("hoodie.deltastreamer.schemaprovider.source.schema.file", basePath + "/source.avsc");
props.setProperty("hoodie.deltastreamer.schemaprovider.target.schema.file", basePath + "/target.avsc");
props.setProperty("include", "base.properties");
props.setProperty("hoodie.write.concurrency.mode", "optimistic_concurrency_control");
props.setProperty("hoodie.cleaner.policy.failed.writes", "LAZY");
props.setProperty("hoodie.write.lock.provider", "org.apache.hudi.client.transaction.lock.ZookeeperBasedLockProvider");
props.setProperty("hoodie.write.lock.hivemetastore.database", "testdb1");
props.setProperty("hoodie.write.lock.hivemetastore.table", "table1");
props.setProperty("hoodie.write.lock.zookeeper.url", "127.0.0.1");
props.setProperty("hoodie.write.lock.zookeeper.port", "2828");
props.setProperty("hoodie.write.lock.wait_time_ms", "1200000");
props.setProperty("hoodie.write.lock.num_retries", "10");
props.setProperty("hoodie.write.lock.zookeeper.lock_key", "test_table");
props.setProperty("hoodie.write.lock.zookeeper.base_path", "/test");
props.setProperty(INSERT_PARALLELISM_VALUE.key(), "4");
props.setProperty(UPSERT_PARALLELISM_VALUE.key(), "4");
props.setProperty(BULKINSERT_PARALLELISM_VALUE.key(), "4");
props.setProperty(FINALIZE_WRITE_PARALLELISM_VALUE.key(), "4");
props.setProperty(BULK_INSERT_SORT_MODE.key(), BulkInsertSortMode.NONE.name());
UtilitiesTestBase.Helpers.savePropsToDFS(props, fs, propsFilePath);
return props;
}
Aggregations