use of com.datatorrent.api.StreamingApplication in project apex-malhar by apache.
the class KafkaOutputOperatorTest method testPOJOKafkaOutputOperator.
/**
* Test AbstractKafkaOutputOperator (i.e. an output adapter for Kafka, aka producer).
* This module sends data into kafka message bus.
*
* [Generate tuple] ==> [send tuple through Kafka output adapter(i.e. producer) into Kafka message bus]
* ==> [receive data in outside Kaka listener (i.e consumer)]
*
* @throws Exception
*/
@Test
@SuppressWarnings({ "rawtypes", "unchecked" })
public void testPOJOKafkaOutputOperator() throws Exception {
tupleCount = 0;
// initialize the latch to synchronize the threads
latch = new CountDownLatch(maxTuple);
// Setup a message listener to receive the message
KafkaTestConsumer listener = new KafkaTestConsumer("topic1");
listener.setLatch(latch);
new Thread(listener).start();
// Create DAG for testing.
LocalMode lma = LocalMode.newInstance();
StreamingApplication app = new StreamingApplication() {
@Override
public void populateDAG(DAG dag, Configuration conf) {
}
};
DAG dag = lma.getDAG();
StringGeneratorInputOperator generator = dag.addOperator("TestStringGenerator", StringGeneratorInputOperator.class);
POJOKafkaOutputOperator node = dag.addOperator("KafkaMessageProducer", POJOKafkaOutputOperator.class);
Properties props = new Properties();
props.setProperty("serializer.class", "kafka.serializer.StringEncoder");
props.setProperty("producer.type", "async");
props.setProperty("queue.buffering.max.ms", "200");
props.setProperty("queue.buffering.max.messages", "10");
node.setConfigProperties(props);
node.setTopic("topic1");
node.setBrokerList("localhost:9092");
node.setBatchSize(5);
// Connect ports
dag.addStream("Kafka message", generator.outputPort, node.inputPort).setLocality(DAG.Locality.CONTAINER_LOCAL);
Configuration conf = new Configuration(false);
lma.prepareDAG(app, conf);
// Create local cluster
final LocalMode.Controller lc = lma.getController();
lc.runAsync();
// Immediately return unless latch timeout in 20 seconds
latch.await(20, TimeUnit.SECONDS);
lc.shutdown();
// Check values send vs received
Assert.assertEquals("Number of emitted tuples", maxTuple, listener.holdingBuffer.size());
logger.debug(String.format("Number of emitted tuples: %d", listener.holdingBuffer.size()));
Assert.assertEquals("First tuple", "testString 1", listener.getMessage(listener.holdingBuffer.peek()));
listener.close();
}
use of com.datatorrent.api.StreamingApplication in project apex-malhar by apache.
the class HBasePOJOInputOperatorTest method test.
@Test
public void test() throws Exception {
// Create DAG for testing.
LocalMode lma = LocalMode.newInstance();
StreamingApplication app = new StreamingApplication() {
@Override
public void populateDAG(DAG dag, Configuration conf) {
}
};
DAG dag = lma.getDAG();
// Create ActiveMQStringSinglePortOutputOperator
MyGenerator generator = dag.addOperator(OPERATOR.GENERATOR.name(), MyGenerator.class);
generator.setTupleNum(TUPLE_NUM);
hbaseOutputOperator = dag.addOperator(OPERATOR.HBASEOUTPUT.name(), hbaseOutputOperator);
hbaseInputOperator = dag.addOperator(OPERATOR.HBASEINPUT.name(), hbaseInputOperator);
dag.setOutputPortAttribute(hbaseInputOperator.outputPort, Context.PortContext.TUPLE_CLASS, TestPOJO.class);
TupleCacheOutputOperator output = dag.addOperator(OPERATOR.OUTPUT.name(), TupleCacheOutputOperator.class);
// Connect ports
dag.addStream("queue1", generator.outputPort, hbaseOutputOperator.input).setLocality(DAG.Locality.NODE_LOCAL);
dag.addStream("queue2", hbaseInputOperator.outputPort, output.inputPort).setLocality(DAG.Locality.NODE_LOCAL);
Configuration conf = new Configuration(false);
lma.prepareDAG(app, conf);
// Create local cluster
final LocalMode.Controller lc = lma.getController();
lc.runAsync();
long start = System.currentTimeMillis();
// generator.doneLatch.await();
while (true) {
Thread.sleep(1000);
logger.info("Tuple row key: ", output.getReceivedTuples());
logger.info("Received tuple number {}, instance is {}.", output.getReceivedTuples() == null ? 0 : output.getReceivedTuples().size(), System.identityHashCode(output));
if (output.getReceivedTuples() != null && output.getReceivedTuples().size() == TUPLE_NUM) {
break;
}
if (System.currentTimeMillis() - start > RUN_DURATION) {
throw new RuntimeException("Testcase taking too long");
}
}
lc.shutdown();
validate(generator.getTuples(), output.getReceivedTuples());
}
use of com.datatorrent.api.StreamingApplication in project beam by apache.
the class ApexYarnLauncher method main.
/**
* The main method expects the serialized DAG and will launch the YARN application.
* @param args location of launch parameters
* @throws IOException when parameters cannot be read
*/
public static void main(String[] args) throws IOException {
checkArgument(args.length == 1, "exactly one argument expected");
File file = new File(args[0]);
checkArgument(file.exists() && file.isFile(), "invalid file path %s", file);
final LaunchParams params = (LaunchParams) SerializationUtils.deserialize(new FileInputStream(file));
StreamingApplication apexApp = new StreamingApplication() {
@Override
public void populateDAG(DAG dag, Configuration conf) {
copyShallow(params.dag, dag);
}
};
// configuration from Hadoop client
Configuration conf = new Configuration();
addProperties(conf, params.configProperties);
AppHandle appHandle = params.getApexLauncher().launchApp(apexApp, conf, params.launchAttributes);
if (appHandle == null) {
throw new AssertionError("Launch returns null handle.");
}
// TODO (future PR)
// At this point the application is running, but this process should remain active to
// allow the parent to implement the runner result.
}
use of com.datatorrent.api.StreamingApplication in project apex-core by apache.
the class LogicalPlanConfigurationTest method testAppNameAttribute.
@Test
public void testAppNameAttribute() {
StreamingApplication app = new AnnotatedApplication();
Configuration conf = new Configuration(false);
conf.addResource(StramClientUtils.DT_SITE_XML_FILE);
LogicalPlanConfiguration builder = new LogicalPlanConfiguration(conf);
Properties properties = new Properties();
properties.put(StreamingApplication.APEX_PREFIX + "application.TestAliasApp.class", app.getClass().getName());
builder.addFromProperties(properties, null);
LogicalPlan dag = new LogicalPlan();
String appPath = app.getClass().getName().replace(".", "/") + ".class";
dag.setAttribute(com.datatorrent.api.Context.DAGContext.APPLICATION_NAME, "testApp");
builder.prepareDAG(dag, app, appPath);
Assert.assertEquals("Application name", "testApp", dag.getAttributes().get(com.datatorrent.api.Context.DAGContext.APPLICATION_NAME));
}
use of com.datatorrent.api.StreamingApplication in project apex-core by apache.
the class DAGSetupPluginTests method testJavaApplication.
@Test
public void testJavaApplication() {
Configuration conf = getConfiguration();
StreamingAppFactory factory = new StreamingAppFactory(Application.class.getName(), Application.class) {
@Override
public LogicalPlan createApp(LogicalPlanConfiguration planConfig) {
Class<? extends StreamingApplication> c = StramUtils.classForName(Application.class.getName(), StreamingApplication.class);
StreamingApplication app = StramUtils.newInstance(c);
return super.createApp(app, planConfig);
}
};
LogicalPlan dag = factory.createApp(new LogicalPlanConfiguration(conf));
validateProperties(dag);
}
Aggregations