use of org.apache.samza.serializers.NoOpSerde in project beam by apache.
the class ImpulseTranslator method translate.
@Override
public void translate(PTransform<PBegin, PCollection<byte[]>> transform, Node node, TranslationContext ctx) {
final PCollection<byte[]> output = ctx.getOutput(transform);
final String outputId = ctx.getIdForPValue(output);
final GenericSystemDescriptor systemDescriptor = new GenericSystemDescriptor(outputId, SamzaImpulseSystemFactory.class.getName());
// The KvCoder is needed here for Samza not to crop the key.
final Serde<KV<?, OpMessage<byte[]>>> kvSerde = KVSerde.of(new NoOpSerde(), new NoOpSerde<>());
final GenericInputDescriptor<KV<?, OpMessage<byte[]>>> inputDescriptor = systemDescriptor.getInputDescriptor(outputId, kvSerde);
ctx.registerInputMessageStream(output, inputDescriptor);
}
use of org.apache.samza.serializers.NoOpSerde in project beam by apache.
the class ImpulseTranslator method translatePortable.
@Override
public void translatePortable(PipelineNode.PTransformNode transform, QueryablePipeline pipeline, PortableTranslationContext ctx) {
final String outputId = ctx.getOutputId(transform);
final String escapedOutputId = SamzaPipelineTranslatorUtils.escape(outputId);
final GenericSystemDescriptor systemDescriptor = new GenericSystemDescriptor(escapedOutputId, SamzaImpulseSystemFactory.class.getName());
// The KvCoder is needed here for Samza not to crop the key.
final Serde<KV<?, OpMessage<byte[]>>> kvSerde = KVSerde.of(new NoOpSerde(), new NoOpSerde<>());
final GenericInputDescriptor<KV<?, OpMessage<byte[]>>> inputDescriptor = systemDescriptor.getInputDescriptor(escapedOutputId, kvSerde);
ctx.registerInputMessageStream(outputId, inputDescriptor);
}
use of org.apache.samza.serializers.NoOpSerde in project beam by apache.
the class ReadTranslator method translate.
@Override
public void translate(PTransform<PBegin, PCollection<T>> transform, TransformHierarchy.Node node, TranslationContext ctx) {
final PCollection<T> output = ctx.getOutput(transform);
final Coder<WindowedValue<T>> coder = SamzaCoders.of(output);
final Source<?> source = transform instanceof SplittableParDo.PrimitiveBoundedRead ? ((SplittableParDo.PrimitiveBoundedRead) transform).getSource() : ((SplittableParDo.PrimitiveUnboundedRead) transform).getSource();
final String id = ctx.getIdForPValue(output);
// Create system descriptor
final GenericSystemDescriptor systemDescriptor;
if (source instanceof BoundedSource) {
systemDescriptor = new GenericSystemDescriptor(id, BoundedSourceSystem.Factory.class.getName());
} else {
systemDescriptor = new GenericSystemDescriptor(id, UnboundedSourceSystem.Factory.class.getName());
}
final Map<String, String> systemConfig = ImmutableMap.of("source", Base64Serializer.serializeUnchecked(source), "coder", Base64Serializer.serializeUnchecked(coder), "stepName", node.getFullName());
systemDescriptor.withSystemConfigs(systemConfig);
// Create stream descriptor
@SuppressWarnings("unchecked") final Serde<KV<?, OpMessage<T>>> kvSerde = (Serde) KVSerde.of(new NoOpSerde<>(), new NoOpSerde<>());
final GenericInputDescriptor<KV<?, OpMessage<T>>> inputDescriptor = systemDescriptor.getInputDescriptor(id, kvSerde);
if (source instanceof BoundedSource) {
inputDescriptor.isBounded();
}
ctx.registerInputMessageStream(output, inputDescriptor);
}
use of org.apache.samza.serializers.NoOpSerde in project samza by apache.
the class TestJobGraphJsonGenerator method testRepartitionedJoinStreamApplication.
@Test
public void testRepartitionedJoinStreamApplication() throws Exception {
/**
* the graph looks like the following.
* number in parentheses () indicates number of stream partitions.
* number in parentheses in quotes ("") indicates expected partition count.
* number in square brackets [] indicates operator ID.
*
* input3 (32) -> filter [7] -> partitionBy [8] ("64") -> map [10] -> join [14] -> sendTo(output2) [15] (16)
* |
* input2 (16) -> partitionBy [3] ("64") -> filter [5] -| -> sink [13]
* |
* input1 (64) -> map [1] -> join [11] -> sendTo(output1) [12] (8)
*/
Map<String, String> configMap = new HashMap<>();
configMap.put(JobConfig.JOB_NAME, "test-app");
configMap.put(JobConfig.JOB_DEFAULT_SYSTEM, "test-system");
StreamTestUtils.addStreamConfigs(configMap, "input1", "system1", "input1");
StreamTestUtils.addStreamConfigs(configMap, "input2", "system2", "input2");
StreamTestUtils.addStreamConfigs(configMap, "input3", "system2", "input3");
StreamTestUtils.addStreamConfigs(configMap, "output1", "system1", "output1");
StreamTestUtils.addStreamConfigs(configMap, "output2", "system2", "output2");
Config config = new MapConfig(configMap);
// set up external partition count
Map<String, Integer> system1Map = new HashMap<>();
system1Map.put("input1", 64);
system1Map.put("output1", 8);
Map<String, Integer> system2Map = new HashMap<>();
system2Map.put("input2", 16);
system2Map.put("input3", 32);
system2Map.put("output2", 16);
SystemAdmin systemAdmin1 = createSystemAdmin(system1Map);
SystemAdmin systemAdmin2 = createSystemAdmin(system2Map);
SystemAdmins systemAdmins = mock(SystemAdmins.class);
when(systemAdmins.getSystemAdmin("system1")).thenReturn(systemAdmin1);
when(systemAdmins.getSystemAdmin("system2")).thenReturn(systemAdmin2);
StreamManager streamManager = new StreamManager(systemAdmins);
StreamApplicationDescriptorImpl graphSpec = new StreamApplicationDescriptorImpl(appDesc -> {
KVSerde<Object, Object> kvSerde = new KVSerde<>(new NoOpSerde(), new NoOpSerde());
String mockSystemFactoryClass = "factory.class.name";
GenericSystemDescriptor system1 = new GenericSystemDescriptor("system1", mockSystemFactoryClass);
GenericSystemDescriptor system2 = new GenericSystemDescriptor("system2", mockSystemFactoryClass);
GenericInputDescriptor<KV<Object, Object>> input1Descriptor = system1.getInputDescriptor("input1", kvSerde);
GenericInputDescriptor<KV<Object, Object>> input2Descriptor = system2.getInputDescriptor("input2", kvSerde);
GenericInputDescriptor<KV<Object, Object>> input3Descriptor = system2.getInputDescriptor("input3", kvSerde);
GenericOutputDescriptor<KV<Object, Object>> output1Descriptor = system1.getOutputDescriptor("output1", kvSerde);
GenericOutputDescriptor<KV<Object, Object>> output2Descriptor = system2.getOutputDescriptor("output2", kvSerde);
MessageStream<KV<Object, Object>> messageStream1 = appDesc.getInputStream(input1Descriptor).map(m -> m);
MessageStream<KV<Object, Object>> messageStream2 = appDesc.getInputStream(input2Descriptor).partitionBy(m -> m.key, m -> m.value, mock(KVSerde.class), "p1").filter(m -> true);
MessageStream<KV<Object, Object>> messageStream3 = appDesc.getInputStream(input3Descriptor).filter(m -> true).partitionBy(m -> m.key, m -> m.value, mock(KVSerde.class), "p2").map(m -> m);
OutputStream<KV<Object, Object>> outputStream1 = appDesc.getOutputStream(output1Descriptor);
OutputStream<KV<Object, Object>> outputStream2 = appDesc.getOutputStream(output2Descriptor);
messageStream1.join(messageStream2, (JoinFunction<Object, KV<Object, Object>, KV<Object, Object>, KV<Object, Object>>) mock(JoinFunction.class), mock(Serde.class), mock(Serde.class), mock(Serde.class), Duration.ofHours(2), "j1").sendTo(outputStream1);
messageStream2.sink((message, collector, coordinator) -> {
});
messageStream3.join(messageStream2, (JoinFunction<Object, KV<Object, Object>, KV<Object, Object>, KV<Object, Object>>) mock(JoinFunction.class), mock(Serde.class), mock(Serde.class), mock(Serde.class), Duration.ofHours(1), "j2").sendTo(outputStream2);
}, config);
ExecutionPlanner planner = new ExecutionPlanner(config, streamManager);
ExecutionPlan plan = planner.plan(graphSpec);
String json = plan.getPlanAsJson();
System.out.println(json);
// deserialize
ObjectMapper mapper = new ObjectMapper();
JobGraphJsonGenerator.JobGraphJson nodes = mapper.readValue(json, JobGraphJsonGenerator.JobGraphJson.class);
assertEquals(5, nodes.jobs.get(0).operatorGraph.inputStreams.size());
assertEquals(11, nodes.jobs.get(0).operatorGraph.operators.size());
assertEquals(3, nodes.sourceStreams.size());
assertEquals(2, nodes.sinkStreams.size());
assertEquals(2, nodes.intermediateStreams.size());
}
Aggregations