use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestHoodieAvroKeyGeneratorFactory method getCommonProps.
private TypedProperties getCommonProps() {
TypedProperties properties = new TypedProperties();
properties.put(KeyGeneratorOptions.RECORDKEY_FIELD_NAME.key(), "_row_key");
properties.put(KeyGeneratorOptions.HIVE_STYLE_PARTITIONING_ENABLE.key(), "true");
properties.put(KeyGeneratorOptions.PARTITIONPATH_FIELD_NAME.key(), "timestamp");
return properties;
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestMetricsReporterFactory method metricsReporterFactoryShouldReturnUserDefinedReporter.
@Test
public void metricsReporterFactoryShouldReturnUserDefinedReporter() {
when(config.getMetricReporterClassName()).thenReturn(DummyMetricsReporter.class.getName());
TypedProperties props = new TypedProperties();
props.setProperty("testKey", "testValue");
when(config.getProps()).thenReturn(props);
MetricsReporter reporter = MetricsReporterFactory.createReporter(config, registry);
assertTrue(reporter instanceof CustomizableMetricsReporter);
assertEquals(props, ((DummyMetricsReporter) reporter).getProps());
assertEquals(registry, ((DummyMetricsReporter) reporter).getRegistry());
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestHoodieKeyLocationFetchHandle method testFetchHandle.
@ParameterizedTest
@ValueSource(booleans = { true, false })
public void testFetchHandle(boolean populateMetaFields) throws Exception {
metaClient = HoodieTestUtils.init(hadoopConf, basePath, HoodieTableType.COPY_ON_WRITE, populateMetaFields ? new Properties() : getPropertiesForKeyGen());
config = getConfigBuilder().withProperties(getPropertiesForKeyGen()).withIndexConfig(HoodieIndexConfig.newBuilder().build()).build();
List<HoodieRecord> records = dataGen.generateInserts(makeNewCommitTime(), 100);
Map<String, List<HoodieRecord>> partitionRecordsMap = recordsToPartitionRecordsMap(records);
HoodieTable hoodieTable = HoodieSparkTable.create(config, context, metaClient);
HoodieSparkWriteableTestTable testTable = HoodieSparkWriteableTestTable.of(hoodieTable, AVRO_SCHEMA_WITH_METADATA_FIELDS);
Map<Tuple2<String, String>, List<Tuple2<HoodieKey, HoodieRecordLocation>>> expectedList = writeToParquetAndGetExpectedRecordLocations(partitionRecordsMap, testTable);
List<Tuple2<String, HoodieBaseFile>> partitionPathFileIdPairs = loadAllFilesForPartitions(new ArrayList<>(partitionRecordsMap.keySet()), context, hoodieTable);
BaseKeyGenerator keyGenerator = (BaseKeyGenerator) HoodieSparkKeyGeneratorFactory.createKeyGenerator(new TypedProperties(getPropertiesForKeyGen()));
for (Tuple2<String, HoodieBaseFile> entry : partitionPathFileIdPairs) {
HoodieKeyLocationFetchHandle fetcherHandle = new HoodieKeyLocationFetchHandle(config, hoodieTable, Pair.of(entry._1, entry._2), populateMetaFields ? Option.empty() : Option.of(keyGenerator));
Iterator<Pair<HoodieKey, HoodieRecordLocation>> result = fetcherHandle.locations().iterator();
List<Tuple2<HoodieKey, HoodieRecordLocation>> actualList = new ArrayList<>();
result.forEachRemaining(x -> actualList.add(new Tuple2<>(x.getLeft(), x.getRight())));
assertEquals(expectedList.get(new Tuple2<>(entry._1, entry._2.getFileId())), actualList);
}
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class UtilHelpers method buildProperties.
public static TypedProperties buildProperties(List<String> props) {
TypedProperties properties = DFSPropertiesConfiguration.getGlobalProps();
props.forEach(x -> {
String[] kv = x.split("=");
ValidationUtils.checkArgument(kv.length == 2);
properties.setProperty(kv[0], kv[1]);
});
return properties;
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class S3EventsMetaSelector method createSourceSelector.
/**
* Factory method for creating custom CloudObjectsMetaSelector. Default selector to use is {@link
* S3EventsMetaSelector}
*/
public static S3EventsMetaSelector createSourceSelector(TypedProperties props) {
String sourceSelectorClass = props.getString(S3EventsMetaSelector.Config.SOURCE_INPUT_SELECTOR, S3EventsMetaSelector.class.getName());
try {
S3EventsMetaSelector selector = (S3EventsMetaSelector) ReflectionUtils.loadClass(sourceSelectorClass, new Class<?>[] { TypedProperties.class }, props);
log.info("Using path selector " + selector.getClass().getName());
return selector;
} catch (Exception e) {
throw new HoodieException("Could not load source selector class " + sourceSelectorClass, e);
}
}
Aggregations