use of org.apache.samza.system.descriptors.GenericInputDescriptor in project samza by apache.
the class EndOfStreamIntegrationTest method testPipeline.
@Test
public void testPipeline() {
class PipelineApplication implements StreamApplication {
@Override
public void describe(StreamApplicationDescriptor appDescriptor) {
DelegatingSystemDescriptor sd = new DelegatingSystemDescriptor("test");
GenericInputDescriptor<KV<String, PageView>> isd = sd.getInputDescriptor("PageView", KVSerde.of(new NoOpSerde<>(), new NoOpSerde<>()));
appDescriptor.getInputStream(isd).map(KV::getValue).partitionBy(PageView::getMemberId, pv -> pv, KVSerde.of(new IntegerSerde(), new TestTableData.PageViewJsonSerde()), "p1").sink((m, collector, coordinator) -> {
RECEIVED.add(m.getValue());
});
}
}
int numPageViews = 40;
InMemorySystemDescriptor isd = new InMemorySystemDescriptor("test");
InMemoryInputDescriptor<TestTableData.PageView> inputDescriptor = isd.getInputDescriptor("PageView", new NoOpSerde<>());
TestRunner.of(new PipelineApplication()).addInputStream(inputDescriptor, TestTableData.generatePartitionedPageViews(numPageViews, 4)).run(Duration.ofSeconds(10));
assertEquals(RECEIVED.size(), numPageViews);
}
use of org.apache.samza.system.descriptors.GenericInputDescriptor in project samza by apache.
the class TestStreamApplicationDescriptorImpl method testSetDefaultSystemDescriptorAfterGettingInputStream.
@Test(expected = IllegalStateException.class)
public void testSetDefaultSystemDescriptorAfterGettingInputStream() {
String streamId = "test-stream-1";
GenericSystemDescriptor sd = new GenericSystemDescriptor("mockSystem", "mockSystemFactoryClass");
GenericInputDescriptor isd = sd.getInputDescriptor(streamId, mock(Serde.class));
new StreamApplicationDescriptorImpl(appDesc -> {
appDesc.getInputStream(isd);
// should throw exception
appDesc.withDefaultSystem(sd);
}, getConfig());
}
use of org.apache.samza.system.descriptors.GenericInputDescriptor in project samza by apache.
the class TestStreamApplicationDescriptorImpl method testGetInputStreamWithValueSerde.
@Test
public void testGetInputStreamWithValueSerde() {
String streamId = "test-stream-1";
Serde mockValueSerde = mock(Serde.class);
GenericSystemDescriptor sd = new GenericSystemDescriptor("mockSystem", "mockSystemFactoryClass");
GenericInputDescriptor isd = sd.getInputDescriptor(streamId, mockValueSerde);
StreamApplicationDescriptorImpl streamAppDesc = new StreamApplicationDescriptorImpl(appDesc -> {
appDesc.getInputStream(isd);
}, getConfig());
InputOperatorSpec inputOpSpec = streamAppDesc.getInputOperators().get(streamId);
assertEquals(OpCode.INPUT, inputOpSpec.getOpCode());
assertEquals(streamId, inputOpSpec.getStreamId());
assertEquals(isd, streamAppDesc.getInputDescriptors().get(streamId));
assertTrue(inputOpSpec.getKeySerde() instanceof NoOpSerde);
assertEquals(mockValueSerde, inputOpSpec.getValueSerde());
}
use of org.apache.samza.system.descriptors.GenericInputDescriptor in project samza by apache.
the class TestStreamApplicationDescriptorImpl method testGetInputStreamWithNullSerde.
@Test(expected = IllegalArgumentException.class)
public void testGetInputStreamWithNullSerde() {
GenericSystemDescriptor sd = new GenericSystemDescriptor("mockSystem", "mockSystemFactoryClass");
GenericInputDescriptor isd = sd.getInputDescriptor("mockStreamId", null);
new StreamApplicationDescriptorImpl(appDesc -> {
appDesc.getInputStream(isd);
}, getConfig());
}
use of org.apache.samza.system.descriptors.GenericInputDescriptor in project samza by apache.
the class TestJobGraphJsonGenerator method testRepartitionedWindowStreamApplication.
@Test
public void testRepartitionedWindowStreamApplication() throws Exception {
Map<String, String> configMap = new HashMap<>();
configMap.put(JobConfig.JOB_NAME, "test-app");
configMap.put(JobConfig.JOB_DEFAULT_SYSTEM, "test-system");
StreamTestUtils.addStreamConfigs(configMap, "PageView", "hdfs", "hdfs:/user/dummy/PageViewEvent");
StreamTestUtils.addStreamConfigs(configMap, "PageViewCount", "kafka", "PageViewCount");
Config config = new MapConfig(configMap);
// set up external partition count
Map<String, Integer> system1Map = new HashMap<>();
system1Map.put("hdfs:/user/dummy/PageViewEvent", 512);
Map<String, Integer> system2Map = new HashMap<>();
system2Map.put("PageViewCount", 16);
SystemAdmin systemAdmin1 = createSystemAdmin(system1Map);
SystemAdmin systemAdmin2 = createSystemAdmin(system2Map);
SystemAdmins systemAdmins = mock(SystemAdmins.class);
when(systemAdmins.getSystemAdmin("hdfs")).thenReturn(systemAdmin1);
when(systemAdmins.getSystemAdmin("kafka")).thenReturn(systemAdmin2);
StreamManager streamManager = new StreamManager(systemAdmins);
StreamApplicationDescriptorImpl graphSpec = new StreamApplicationDescriptorImpl(appDesc -> {
KVSerde<String, PageViewEvent> pvSerde = KVSerde.of(new StringSerde(), new JsonSerdeV2<>(PageViewEvent.class));
GenericSystemDescriptor isd = new GenericSystemDescriptor("hdfs", "mockSystemFactoryClass");
GenericInputDescriptor<KV<String, PageViewEvent>> pageView = isd.getInputDescriptor("PageView", pvSerde);
KVSerde<String, Long> pvcSerde = KVSerde.of(new StringSerde(), new LongSerde());
GenericSystemDescriptor osd = new GenericSystemDescriptor("kafka", "mockSystemFactoryClass");
GenericOutputDescriptor<KV<String, Long>> pageViewCount = osd.getOutputDescriptor("PageViewCount", pvcSerde);
MessageStream<KV<String, PageViewEvent>> inputStream = appDesc.getInputStream(pageView);
OutputStream<KV<String, Long>> outputStream = appDesc.getOutputStream(pageViewCount);
inputStream.partitionBy(kv -> kv.getValue().getCountry(), kv -> kv.getValue(), pvSerde, "keyed-by-country").window(Windows.keyedTumblingWindow(kv -> kv.getValue().getCountry(), Duration.ofSeconds(10L), () -> 0L, (m, c) -> c + 1L, new StringSerde(), new LongSerde()), "count-by-country").map(pane -> new KV<>(pane.getKey().getKey(), pane.getMessage())).sendTo(outputStream);
}, config);
ExecutionPlanner planner = new ExecutionPlanner(config, streamManager);
ExecutionPlan plan = planner.plan(graphSpec);
String json = plan.getPlanAsJson();
System.out.println(json);
// deserialize
ObjectMapper mapper = new ObjectMapper();
JobGraphJsonGenerator.JobGraphJson nodes = mapper.readValue(json, JobGraphJsonGenerator.JobGraphJson.class);
JobGraphJsonGenerator.OperatorGraphJson operatorGraphJson = nodes.jobs.get(0).operatorGraph;
assertEquals(2, operatorGraphJson.inputStreams.size());
assertEquals(4, operatorGraphJson.operators.size());
assertEquals(1, nodes.sourceStreams.size());
assertEquals(1, nodes.sinkStreams.size());
assertEquals(1, nodes.intermediateStreams.size());
// verify partitionBy op output to the intermdiate stream of the same id
assertEquals(operatorGraphJson.operators.get("test-app-1-partition_by-keyed-by-country").get("outputStreamId"), "test-app-1-partition_by-keyed-by-country");
assertEquals(operatorGraphJson.operators.get("test-app-1-send_to-5").get("outputStreamId"), "PageViewCount");
}
Aggregations