use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestDFSPropertiesConfiguration method testIncludes.
@Test
public void testIncludes() {
DFSPropertiesConfiguration cfg = new DFSPropertiesConfiguration(dfs.getConf(), new Path(dfsBasePath + "/t3.props"));
TypedProperties props = cfg.getProps();
assertEquals(123, props.getInteger("int.prop"));
assertEquals(243.4, props.getDouble("double.prop"), 0.001);
assertTrue(props.getBoolean("boolean.prop"));
assertEquals("t3.value", props.getString("string.prop"));
assertEquals(1354354354, props.getLong("long.prop"));
assertThrows(IllegalStateException.class, () -> {
cfg.addPropsFromFile(new Path(dfsBasePath + "/t4.props"));
}, "Should error out on a self-included file.");
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestDFSPropertiesConfiguration method testLocalFileSystemLoading.
@Test
public void testLocalFileSystemLoading() throws IOException {
DFSPropertiesConfiguration cfg = new DFSPropertiesConfiguration(dfs.getConf(), new Path(dfsBasePath + "/t1.props"));
cfg.addPropsFromFile(new Path(String.format("file:%s", getClass().getClassLoader().getResource("props/test.properties").getPath())));
TypedProperties props = cfg.getProps();
assertEquals(123, props.getInteger("int.prop"));
assertEquals(113.4, props.getDouble("double.prop"), 0.001);
assertTrue(props.getBoolean("boolean.prop"));
assertEquals("str", props.getString("string.prop"));
assertEquals(1354354354, props.getLong("long.prop"));
assertEquals(123, props.getInteger("some.random.prop"));
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestDFSPropertiesConfiguration method testParsing.
@Test
public void testParsing() {
DFSPropertiesConfiguration cfg = new DFSPropertiesConfiguration(dfs.getConf(), new Path(dfsBasePath + "/t1.props"));
TypedProperties props = cfg.getProps();
assertEquals(5, props.size());
assertThrows(IllegalArgumentException.class, () -> {
props.getString("invalid.key");
}, "Should error out here.");
assertEquals(123, props.getInteger("int.prop"));
assertEquals(113.4, props.getDouble("double.prop"), 0.001);
assertTrue(props.getBoolean("boolean.prop"));
assertEquals("str", props.getString("string.prop"));
assertEquals(1354354354, props.getLong("long.prop"));
assertEquals(123, props.getInteger("int.prop", 456));
assertEquals(113.4, props.getDouble("double.prop", 223.4), 0.001);
assertTrue(props.getBoolean("boolean.prop", false));
assertEquals("str", props.getString("string.prop", "default"));
assertEquals(1354354354, props.getLong("long.prop", 8578494434L));
assertEquals(456, props.getInteger("bad.int.prop", 456));
assertEquals(223.4, props.getDouble("bad.double.prop", 223.4), 0.001);
assertFalse(props.getBoolean("bad.boolean.prop", false));
assertEquals("default", props.getString("bad.string.prop", "default"));
assertEquals(8578494434L, props.getLong("bad.long.prop", 8578494434L));
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestJsonDFSSource method prepareDFSSource.
@Override
public Source prepareDFSSource() {
TypedProperties props = new TypedProperties();
props.setProperty("hoodie.deltastreamer.source.dfs.root", dfsRoot);
return new JsonDFSSource(props, jsc, sparkSession, schemaProvider);
}
use of org.apache.hudi.common.config.TypedProperties in project hudi by apache.
the class TestJsonKafkaSource method testJsonKafkaSourceFilterNullMsg.
// test whether empty messages can be filtered
@Test
public void testJsonKafkaSourceFilterNullMsg() {
// topic setup.
final String topic = TEST_TOPIC_PREFIX + "testJsonKafkaSourceFilterNullMsg";
testUtils.createTopic(topic, 2);
HoodieTestDataGenerator dataGenerator = new HoodieTestDataGenerator();
TypedProperties props = createPropsForJsonSource(topic, null, "earliest");
Source jsonSource = new JsonKafkaSource(props, jsc(), spark(), schemaProvider, metrics);
SourceFormatAdapter kafkaSource = new SourceFormatAdapter(jsonSource);
// 1. Extract without any checkpoint => get all the data, respecting sourceLimit
assertEquals(Option.empty(), kafkaSource.fetchNewDataInAvroFormat(Option.empty(), Long.MAX_VALUE).getBatch());
// Send 1000 non-null messages to Kafka
testUtils.sendMessages(topic, jsonifyRecords(dataGenerator.generateInserts("000", 1000)));
// Send 100 null messages to Kafka
testUtils.sendMessages(topic, new String[100]);
InputBatch<JavaRDD<GenericRecord>> fetch1 = kafkaSource.fetchNewDataInAvroFormat(Option.empty(), Long.MAX_VALUE);
// Verify that messages with null values are filtered
assertEquals(1000, fetch1.getBatch().get().count());
}
Aggregations