use of com.datatorrent.stram.engine.DefaultUnifier in project apex-core by apache.
the class StreamingContainerManagerTest method testStaticPartitioning.
@Test
public void testStaticPartitioning() {
//
// ,---> node2----,
// | |
// node1---+---> node2----+---> unifier | node3
// | |
// '---> node2----'
//
GenericTestOperator node1 = dag.addOperator("node1", GenericTestOperator.class);
PhysicalPlanTest.PartitioningTestOperator node2 = dag.addOperator("node2", PhysicalPlanTest.PartitioningTestOperator.class);
node2.setPartitionCount(3);
dag.setOperatorAttribute(node2, OperatorContext.SPIN_MILLIS, 10);
/* this should not affect anything materially */
dag.setOutputPortAttribute(node2.outport1, PortContext.QUEUE_CAPACITY, 1111);
GenericTestOperator node3 = dag.addOperator("node3", GenericTestOperator.class);
dag.setInputPortAttribute(node3.inport1, PortContext.QUEUE_CAPACITY, 2222);
LogicalPlan.StreamMeta n1n2 = dag.addStream("n1n2", node1.outport1, node2.inport1);
LogicalPlan.StreamMeta n2n3 = dag.addStream("n2n3", node2.outport1, node3.inport1);
dag.setAttribute(LogicalPlan.CONTAINERS_MAX_COUNT, Integer.MAX_VALUE);
MemoryStorageAgent msa = new MemoryStorageAgent();
dag.setAttribute(OperatorContext.STORAGE_AGENT, msa);
StreamingContainerManager dnm = new StreamingContainerManager(dag);
PhysicalPlan plan = dnm.getPhysicalPlan();
Assert.assertEquals("number containers", 5, plan.getContainers().size());
List<StreamingContainerAgent> containerAgents = Lists.newArrayList();
for (int i = 0; i < plan.getContainers().size(); i++) {
containerAgents.add(assignContainer(dnm, "container" + (i + 1)));
}
PTContainer c = plan.getOperators(dag.getMeta(node1)).get(0).getContainer();
StreamingContainerAgent sca1 = dnm.getContainerAgent(c.getExternalId());
List<OperatorDeployInfo> c1 = getDeployInfo(sca1);
Assert.assertEquals("number operators assigned to container", 1, c1.size());
Assert.assertTrue(dag.getMeta(node2) + " assigned to " + sca1.container.getExternalId(), containsNodeContext(c1, dag.getMeta(node1)));
List<PTOperator> o2Partitions = plan.getOperators(dag.getMeta(node2));
Assert.assertEquals("number partitions", TestStaticPartitioningSerDe.partitions.length, o2Partitions.size());
for (int i = 0; i < o2Partitions.size(); i++) {
String containerId = o2Partitions.get(i).getContainer().getExternalId();
List<OperatorDeployInfo> cc = getDeployInfo(dnm.getContainerAgent(containerId));
Assert.assertEquals("number operators assigned to container", 1, cc.size());
Assert.assertTrue(dag.getMeta(node2) + " assigned to " + containerId, containsNodeContext(cc, dag.getMeta(node2)));
// n1n2 in, mergeStream out
OperatorDeployInfo ndi = cc.get(0);
Assert.assertEquals("type " + ndi, OperatorDeployInfo.OperatorType.GENERIC, ndi.type);
Assert.assertEquals("inputs " + ndi, 1, ndi.inputs.size());
Assert.assertEquals("outputs " + ndi, 1, ndi.outputs.size());
InputDeployInfo nidi = ndi.inputs.get(0);
Assert.assertEquals("stream " + nidi, n1n2.getName(), nidi.declaredStreamId);
Assert.assertEquals("partition for " + containerId, Sets.newHashSet(node2.partitionKeys[i]), nidi.partitionKeys);
Assert.assertEquals("number stream codecs for " + nidi, 1, nidi.streamCodecs.size());
}
List<OperatorDeployInfo> cUnifier = getDeployInfo(dnm.getContainerAgent(plan.getOperators(dag.getMeta(node3)).get(0).getContainer().getExternalId()));
Assert.assertEquals("number operators " + cUnifier, 2, cUnifier.size());
OperatorDeployInfo mergeNodeDI = getNodeDeployInfo(cUnifier, dag.getMeta(node2).getMeta(node2.outport1).getUnifierMeta());
Assert.assertNotNull("unifier for " + node2, mergeNodeDI);
Assert.assertEquals("type " + mergeNodeDI, OperatorDeployInfo.OperatorType.UNIFIER, mergeNodeDI.type);
Assert.assertEquals("inputs " + mergeNodeDI, 3, mergeNodeDI.inputs.size());
List<Integer> sourceNodeIds = Lists.newArrayList();
for (InputDeployInfo nidi : mergeNodeDI.inputs) {
Assert.assertEquals("streamName " + nidi, n2n3.getName(), nidi.declaredStreamId);
String mergePortName = "<merge#" + dag.getMeta(node2).getMeta(node2.outport1).getPortName() + ">";
Assert.assertEquals("portName " + nidi, mergePortName, nidi.portName);
Assert.assertNotNull("sourceNodeId " + nidi, nidi.sourceNodeId);
Assert.assertNotNull("contextAttributes " + nidi, nidi.contextAttributes);
Assert.assertEquals("contextAttributes ", new Integer(1111), nidi.getValue(PortContext.QUEUE_CAPACITY));
sourceNodeIds.add(nidi.sourceNodeId);
}
for (PTOperator node : dnm.getPhysicalPlan().getOperators(dag.getMeta(node2))) {
Assert.assertTrue(sourceNodeIds + " contains " + node.getId(), sourceNodeIds.contains(node.getId()));
}
Assert.assertEquals("outputs " + mergeNodeDI, 1, mergeNodeDI.outputs.size());
for (OutputDeployInfo odi : mergeNodeDI.outputs) {
Assert.assertNotNull("contextAttributes " + odi, odi.contextAttributes);
Assert.assertEquals("contextAttributes ", new Integer(2222), odi.getValue(PortContext.QUEUE_CAPACITY));
}
try {
Object operator = msa.load(mergeNodeDI.id, Stateless.WINDOW_ID);
Assert.assertTrue("" + operator, operator instanceof DefaultUnifier);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
// node3 container
c = plan.getOperators(dag.getMeta(node3)).get(0).getContainer();
List<OperatorDeployInfo> cmerge = getDeployInfo(dnm.getContainerAgent(c.getExternalId()));
Assert.assertEquals("number operators " + cmerge, 2, cmerge.size());
OperatorDeployInfo node3DI = getNodeDeployInfo(cmerge, dag.getMeta(node3));
Assert.assertNotNull(dag.getMeta(node3) + " assigned", node3DI);
Assert.assertEquals("inputs " + node3DI, 1, node3DI.inputs.size());
InputDeployInfo node3In = node3DI.inputs.get(0);
Assert.assertEquals("streamName " + node3In, n2n3.getName(), node3In.declaredStreamId);
Assert.assertEquals("portName " + node3In, dag.getMeta(node3).getMeta(node3.inport1).getPortName(), node3In.portName);
Assert.assertNotNull("sourceNodeId " + node3DI, node3In.sourceNodeId);
Assert.assertEquals("sourcePortName " + node3DI, mergeNodeDI.outputs.get(0).portName, node3In.sourcePortName);
}
Aggregations