use of com.datatorrent.stram.plan.logical.LogicalPlan in project apex-core by apache.
the class PhysicalPlanTest method testContainerSize.
@Test
public void testContainerSize() {
LogicalPlan dag = new LogicalPlan();
dag.setAttribute(OperatorContext.STORAGE_AGENT, new StramTestSupport.MemoryStorageAgent());
GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class);
GenericTestOperator o3 = dag.addOperator("o3", GenericTestOperator.class);
dag.setOperatorAttribute(o1, OperatorContext.VCORES, 1);
dag.setOperatorAttribute(o2, OperatorContext.VCORES, 2);
dag.addStream("o1.outport1", o1.outport1, o2.inport1);
dag.addStream("o2.outport1", o2.outport1, o3.inport1);
dag.setOperatorAttribute(o2, OperatorContext.MEMORY_MB, 4000);
dag.setAttribute(LogicalPlan.CONTAINERS_MAX_COUNT, 2);
PhysicalPlan plan = new PhysicalPlan(dag, new TestPlanContext());
Assert.assertEquals("number of containers", 2, plan.getContainers().size());
Assert.assertEquals("memory container 1", 2560, plan.getContainers().get(0).getRequiredMemoryMB());
Assert.assertEquals("vcores container 1", 1, plan.getContainers().get(0).getRequiredVCores());
Assert.assertEquals("memory container 2", 4512, plan.getContainers().get(1).getRequiredMemoryMB());
Assert.assertEquals("vcores container 2", 2, plan.getContainers().get(1).getRequiredVCores());
Assert.assertEquals("number of operators in container 1", 2, plan.getContainers().get(0).getOperators().size());
}
use of com.datatorrent.stram.plan.logical.LogicalPlan in project apex-core by apache.
the class PhysicalPlanTest method testInlineMultipleInputs.
@Test
public void testInlineMultipleInputs() {
LogicalPlan dag = new LogicalPlan();
GenericTestOperator node1 = dag.addOperator("node1", GenericTestOperator.class);
GenericTestOperator node2 = dag.addOperator("node2", GenericTestOperator.class);
GenericTestOperator node3 = dag.addOperator("node3", GenericTestOperator.class);
dag.addStream("n1Output1", node1.outport1, node3.inport1).setLocality(Locality.CONTAINER_LOCAL);
dag.addStream("n2Output1", node2.outport1, node3.inport2).setLocality(Locality.CONTAINER_LOCAL);
int maxContainers = 5;
dag.setAttribute(LogicalPlan.CONTAINERS_MAX_COUNT, maxContainers);
dag.setAttribute(OperatorContext.STORAGE_AGENT, new TestPlanContext());
PhysicalPlan deployer = new PhysicalPlan(dag, new TestPlanContext());
Assert.assertEquals("number of containers", 1, deployer.getContainers().size());
PTOutput node1Out = deployer.getOperators(dag.getMeta(node1)).get(0).getOutputs().get(0);
Assert.assertTrue("inline " + node1Out, node1Out.isDownStreamInline());
// per current logic, different container is assigned to second input node
PTOutput node2Out = deployer.getOperators(dag.getMeta(node2)).get(0).getOutputs().get(0);
Assert.assertTrue("inline " + node2Out, node2Out.isDownStreamInline());
}
use of com.datatorrent.stram.plan.logical.LogicalPlan in project apex-core by apache.
the class PhysicalPlanTest method testParallelPartitionForSlidingWindow.
@Test
public void testParallelPartitionForSlidingWindow() {
LogicalPlan dag = new LogicalPlan();
dag.setAttribute(OperatorContext.STORAGE_AGENT, new StramTestSupport.MemoryStorageAgent());
GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class);
GenericTestOperator o3 = dag.addOperator("o3", GenericTestOperator.class);
dag.setOperatorAttribute(o1, OperatorContext.SLIDE_BY_WINDOW_COUNT, 2);
dag.setOperatorAttribute(o1, OperatorContext.PARTITIONER, new StatelessPartitioner<>(2));
dag.setInputPortAttribute(o2.inport1, PortContext.PARTITION_PARALLEL, true);
dag.setOperatorAttribute(o1, OperatorContext.APPLICATION_WINDOW_COUNT, 4);
dag.addStream("o1.outport1", o1.outport1, o2.inport1);
dag.addStream("o2.outport1", o2.outport1, o3.inport1);
PhysicalPlan plan = new PhysicalPlan(dag, new TestPlanContext());
Assert.assertEquals("number of containers", 7, plan.getContainers().size());
}
use of com.datatorrent.stram.plan.logical.LogicalPlan in project apex-malhar by apache.
the class SerDeTest method testSQLSelectInsertWithAPI.
@Test
public void testSQLSelectInsertWithAPI() throws IOException, ClassNotFoundException {
LogicalPlan dag = new LogicalPlan();
String schemaIn = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"RowTime\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss\"}}," + "{\"name\":\"id\",\"type\":\"Integer\"}," + "{\"name\":\"Product\",\"type\":\"String\"}," + "{\"name\":\"units\",\"type\":\"Integer\"}]}";
String schemaOut = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"RowTime\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss\"}}," + "{\"name\":\"Product\",\"type\":\"String\"}]}";
SQLExecEnvironment.getEnvironment().registerTable("ORDERS", new FileEndpoint("dummyFilePathInput", new CSVMessageFormat(schemaIn))).registerTable("SALES", new FileEndpoint("dummyFilePathOutput", "out.tmp", new CSVMessageFormat(schemaOut))).executeSQL(dag, "INSERT INTO SALES SELECT STREAM FLOOR(ROWTIME TO HOUR), SUBSTRING(PRODUCT, 0, 5) " + "FROM ORDERS WHERE id > 3");
dag.validate();
}
use of com.datatorrent.stram.plan.logical.LogicalPlan in project apex-malhar by apache.
the class SerDeTest method testJoin.
@Test
public void testJoin() throws IOException, ClassNotFoundException {
LogicalPlan dag = new LogicalPlan();
String schemaIn0 = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"RowTime\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"id\",\"type\":\"Integer\"}," + "{\"name\":\"Product\",\"type\":\"String\"}," + "{\"name\":\"units\",\"type\":\"Integer\"}]}";
String schemaIn1 = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"id\",\"type\":\"Integer\"}," + "{\"name\":\"Category\",\"type\":\"String\"}]}";
String schemaOut = "{\"separator\":\",\",\"quoteChar\":\"\\\"\",\"fields\":[" + "{\"name\":\"RowTime1\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"RowTime2\",\"type\":\"Date\",\"constraints\":{\"format\":\"dd/MM/yyyy hh:mm:ss Z\"}}," + "{\"name\":\"Product\",\"type\":\"String\"}," + "{\"name\":\"Category\",\"type\":\"String\"}]}";
String sql = "INSERT INTO SALES SELECT STREAM A.ROWTIME, FLOOR(A.ROWTIME TO DAY), " + "APEXCONCAT('OILPAINT', SUBSTRING(A.PRODUCT, 6, 7)), B.CATEGORY " + "FROM ORDERS AS A " + "JOIN CATEGORY AS B ON A.id = B.id " + "WHERE A.id > 3 AND A.PRODUCT LIKE 'paint%'";
SQLExecEnvironment.getEnvironment().registerTable("ORDERS", new KafkaEndpoint("localhost:9092", "testdata0", new CSVMessageFormat(schemaIn0))).registerTable("CATEGORY", new KafkaEndpoint("localhost:9092", "testdata1", new CSVMessageFormat(schemaIn1))).registerTable("SALES", new KafkaEndpoint("localhost:9092", "testresult", new CSVMessageFormat(schemaOut))).registerFunction("APEXCONCAT", FileEndpointTest.class, "apex_concat_str").executeSQL(dag, sql);
dag.validate();
}
Aggregations