use of info.batey.kafka.unit.KafkaUnit in project apex-malhar by apache.
the class ApplicationTest method writeToTopic.
private void writeToTopic() {
KafkaUnit ku = kafkaUnitRule.getKafkaUnit();
ku.createTopic(TOPIC);
for (String line : lines) {
KeyedMessage<String, String> kMsg = new KeyedMessage<>(TOPIC, line);
ku.sendMessages(kMsg);
}
LOG.debug("Sent messages to topic {}", TOPIC);
}
use of info.batey.kafka.unit.KafkaUnit in project apex-malhar by apache.
the class ExactlyOnceJdbcOutputTest method testApplication.
@Test
public void testApplication() throws Exception {
KafkaUnit ku = kafkaUnitRule.getKafkaUnit();
String topicName = "testTopic";
// topic creation is async and the producer may also auto-create it
ku.createTopic(topicName, 1);
// produce test data
String[] words = "count the words from kafka and store them in the db".split("\\s+");
for (String word : words) {
ku.sendMessages(new KeyedMessage<String, String>(topicName, word));
}
Configuration conf = new Configuration(false);
conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml"));
conf.set("apex.operator.kafkaInput.prop.topics", topicName);
conf.set("apex.operator.kafkaInput.prop.clusters", "localhost:" + brokerPort);
// consume one word per window
conf.set("apex.operator.kafkaInput.prop.maxTuplesPerWindow", "1");
conf.set("apex.operator.kafkaInput.prop.initialOffset", "EARLIEST");
conf.set("apex.operator.store.prop.store.databaseDriver", DB_DRIVER);
conf.set("apex.operator.store.prop.store.databaseUrl", DB_URL);
EmbeddedAppLauncher<?> launcher = Launcher.getLauncher(LaunchMode.EMBEDDED);
Attribute.AttributeMap launchAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
// terminate after results are available
launchAttributes.put(EmbeddedAppLauncher.RUN_ASYNC, true);
AppHandle appHandle = launcher.launchApp(new ExactlyOnceJdbcOutputApp(), conf, launchAttributes);
HashSet<String> wordsSet = Sets.newHashSet(words);
Connection con = DriverManager.getConnection(DB_URL);
Statement stmt = con.createStatement();
int rowCount = 0;
// 30s timeout
long timeout = System.currentTimeMillis() + 30000;
while (rowCount < wordsSet.size() && timeout > System.currentTimeMillis()) {
Thread.sleep(500);
String countQuery = "SELECT count(*) from " + TABLE_NAME;
ResultSet resultSet = stmt.executeQuery(countQuery);
resultSet.next();
rowCount = resultSet.getInt(1);
resultSet.close();
LOG.info("current row count in {} is {}", TABLE_NAME, rowCount);
}
Assert.assertEquals("number of words", wordsSet.size(), rowCount);
appHandle.shutdown(ShutdownMode.KILL);
}
use of info.batey.kafka.unit.KafkaUnit in project apex-malhar by apache.
the class ApplicationTest method createTopics.
private void createTopics() throws Exception {
KafkaUnit ku = kafkaUnitRule.getKafkaUnit();
ku.createTopic("exactly-once");
ku.createTopic("at-least-once");
}
use of info.batey.kafka.unit.KafkaUnit in project apex-malhar by apache.
the class ApplicationTest method chkOutput.
private void chkOutput() throws Exception {
KafkaUnit ku = kafkaUnitRule.getKafkaUnit();
List<String> messages = null;
// wait for messages to appear in kafka
Thread.sleep(10000);
try {
messages = ku.readMessages(TOPIC, lines.length);
} catch (Exception e) {
LOG.error("Error: Got exception {}", e);
}
int i = 0;
for (String msg : messages) {
assertTrue("Error: message mismatch", msg.equals(lines[i]));
++i;
}
}
use of info.batey.kafka.unit.KafkaUnit in project apex-malhar by apache.
the class ExactlyOnceFileOutputAppTest method testApplication.
@Test
public void testApplication() throws Exception {
File targetDir = new File(TARGET_DIR);
FileUtils.deleteDirectory(targetDir);
FileUtils.forceMkdir(targetDir);
KafkaUnit ku = kafkaUnitRule.getKafkaUnit();
String topicName = "testTopic";
// topic creation is async and the producer may also auto-create it
ku.createTopic(topicName, 1);
// produce test data
String[] words = "count count the words from kafka and store them in a file".split("\\s+");
for (String word : words) {
ku.sendMessages(new KeyedMessage<String, String>(topicName, word));
}
Configuration conf = new Configuration(false);
conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml"));
conf.set("apex.operator.kafkaInput.prop.topics", topicName);
conf.set("apex.operator.kafkaInput.prop.clusters", "localhost:" + brokerPort);
// consume one word per window
conf.set("apex.operator.kafkaInput.prop.maxTuplesPerWindow", "2");
conf.set("apex.operator.kafkaInput.prop.initialOffset", "EARLIEST");
conf.set("apex.operator.fileWriter.prop.filePath", TARGET_DIR);
EmbeddedAppLauncher<?> launcher = Launcher.getLauncher(LaunchMode.EMBEDDED);
Attribute.AttributeMap launchAttributes = new Attribute.AttributeMap.DefaultAttributeMap();
// terminate after results are available
launchAttributes.put(EmbeddedAppLauncher.RUN_ASYNC, true);
AppHandle appHandle = launcher.launchApp(new ExactlyOnceFileOutputApp(), conf, launchAttributes);
// 60s timeout
long timeout = System.currentTimeMillis() + 60000;
File outputFile = new File(TARGET_DIR, ExactlyOnceFileOutputApp.FileWriter.FILE_NAME_PREFIX);
while (!outputFile.exists() && timeout > System.currentTimeMillis()) {
Thread.sleep(1000);
LOG.debug("Waiting for {}", outputFile);
}
Assert.assertTrue("output file exists " + ExactlyOnceFileOutputApp.FileWriter.FILE_NAME_PREFIX, outputFile.exists() && outputFile.isFile());
String result = FileUtils.readFileToString(outputFile);
Assert.assertTrue(result.contains("count=2"));
appHandle.shutdown(ShutdownMode.KILL);
}
Aggregations