use of org.apache.samza.operators.impl.OperatorImpl in project samza by apache.
the class WatermarkIntegrationTest method testWatermark.
@Test
public void testWatermark() throws Exception {
Map<String, String> configs = new HashMap<>();
configs.put(ApplicationConfig.APP_RUNNER_CLASS, MockLocalApplicationRunner.class.getName());
configs.put("systems.test.samza.factory", TestSystemFactory.class.getName());
configs.put("streams.PageView.samza.system", "test");
configs.put("streams.PageView.partitionCount", String.valueOf(PARTITION_COUNT));
configs.put(JobConfig.JOB_NAME, "test-watermark-job");
configs.put(JobConfig.PROCESSOR_ID, "1");
configs.put(JobCoordinatorConfig.JOB_COORDINATOR_FACTORY, PassthroughJobCoordinatorFactory.class.getName());
configs.put(TaskConfig.GROUPER_FACTORY, SingleContainerGrouperFactory.class.getName());
configs.put("systems.kafka.samza.factory", "org.apache.samza.system.kafka.KafkaSystemFactory");
configs.put("systems.kafka.producer.bootstrap.servers", bootstrapUrl());
configs.put("systems.kafka.consumer.zookeeper.connect", zkConnect());
configs.put("systems.kafka.samza.key.serde", "int");
configs.put("systems.kafka.samza.msg.serde", "json");
configs.put("systems.kafka.default.stream.replication.factor", "1");
configs.put("job.default.system", "kafka");
configs.put("serializers.registry.int.class", IntegerSerdeFactory.class.getName());
configs.put("serializers.registry.string.class", StringSerdeFactory.class.getName());
configs.put("serializers.registry.json.class", PageViewJsonSerdeFactory.class.getName());
List<PageView> received = new ArrayList<>();
class TestStreamApp implements StreamApplication {
@Override
public void describe(StreamApplicationDescriptor appDescriptor) {
DelegatingSystemDescriptor sd = new DelegatingSystemDescriptor("test");
GenericInputDescriptor<KV<String, PageView>> isd = sd.getInputDescriptor("PageView", KVSerde.of(new NoOpSerde<>(), new NoOpSerde<>()));
appDescriptor.getInputStream(isd).map(KV::getValue).partitionBy(pv -> pv.getMemberId(), pv -> pv, KVSerde.of(new NoOpSerde<>(), new NoOpSerde<>()), "p1").sink((m, collector, coordinator) -> {
received.add(m.getValue());
});
}
}
Config config = new MapConfig(configs);
final ApplicationRunner runner = ApplicationRunners.getApplicationRunner(new TestStreamApp(), config);
executeRun(runner, config);
// processors are only available when the app is running
Map<String, StreamOperatorTask> tasks = getTaskOperationGraphs((MockLocalApplicationRunner) runner);
runner.waitForFinish();
// wait for the completion to ensure that all tasks are actually initialized and the OperatorImplGraph is initialized
StreamOperatorTask task0 = tasks.get("Partition 0");
OperatorImplGraph graph = TestStreamOperatorTask.getOperatorImplGraph(task0);
OperatorImpl pb = getOperator(graph, OperatorSpec.OpCode.PARTITION_BY);
assertEquals(TestOperatorImpl.getInputWatermark(pb), 4);
assertEquals(TestOperatorImpl.getOutputWatermark(pb), 4);
OperatorImpl sink = getOperator(graph, OperatorSpec.OpCode.SINK);
assertEquals(TestOperatorImpl.getInputWatermark(sink), 3);
assertEquals(TestOperatorImpl.getOutputWatermark(sink), 3);
StreamOperatorTask task1 = tasks.get("Partition 1");
graph = TestStreamOperatorTask.getOperatorImplGraph(task1);
pb = getOperator(graph, OperatorSpec.OpCode.PARTITION_BY);
assertEquals(TestOperatorImpl.getInputWatermark(pb), 3);
assertEquals(TestOperatorImpl.getOutputWatermark(pb), 3);
sink = getOperator(graph, OperatorSpec.OpCode.SINK);
assertEquals(TestOperatorImpl.getInputWatermark(sink), 3);
assertEquals(TestOperatorImpl.getOutputWatermark(sink), 3);
}
Aggregations