Search in sources :

Example 61 with LogicalPlan

use of com.datatorrent.stram.plan.logical.LogicalPlan in project apex-core by apache.

the class LogicalPlanSerializer method convertToMap.

/**
   *
   * @param dag
   * @return
   */
public static Map<String, Object> convertToMap(LogicalPlan dag, boolean includeModules) {
    HashMap<String, Object> result = new HashMap<>();
    ArrayList<Object> operatorArray = new ArrayList<>();
    ArrayList<Object> streamMap = new ArrayList<>();
    //result.put("applicationName", appConfig.getName());
    result.put("operators", operatorArray);
    result.put("streams", streamMap);
    //LogicalPlan dag = StramAppLauncher.prepareDAG(appConfig, StreamingApplication.LAUNCHMODE_YARN);
    //
    // should we put the DAGContext info here?
    Map<String, Object> dagAttrs = new HashMap<>();
    for (Map.Entry<Attribute<Object>, Object> e : Attribute.AttributeMap.AttributeInitializer.getAllAttributes(dag, Context.DAGContext.class).entrySet()) {
        dagAttrs.put(e.getKey().getSimpleName(), e.getValue());
    }
    result.put("attributes", dagAttrs);
    Collection<OperatorMeta> allOperators = dag.getAllOperators();
    ObjectMapper propertyObjectMapper = new ObjectMapper();
    propertyObjectMapper.configure(JsonGenerator.Feature.WRITE_NUMBERS_AS_STRINGS, true);
    propertyObjectMapper.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false);
    StdTypeResolverBuilder typer = new PropertyTypeResolverBuilder();
    typer.init(JsonTypeInfo.Id.CLASS, null);
    typer = typer.inclusion(JsonTypeInfo.As.PROPERTY);
    propertyObjectMapper.setDefaultTyping(typer);
    for (OperatorMeta operatorMeta : allOperators) {
        HashMap<String, Object> operatorDetailMap = new HashMap<>();
        ArrayList<Map<String, Object>> portList = new ArrayList<>();
        Map<String, Object> attributeMap = new HashMap<>();
        String operatorName = operatorMeta.getName();
        operatorArray.add(operatorDetailMap);
        operatorDetailMap.put("name", operatorName);
        operatorDetailMap.put("ports", portList);
        operatorDetailMap.put("class", operatorMeta.getOperator().getClass().getName());
        operatorDetailMap.put("attributes", attributeMap);
        Map<Attribute<Object>, Object> rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(operatorMeta, Context.OperatorContext.class);
        for (Map.Entry<Attribute<Object>, Object> entry : rawAttributes.entrySet()) {
            attributeMap.put(entry.getKey().getSimpleName(), entry.getValue());
        }
        ObjectMapperString str;
        try {
            str = new ObjectMapperString(propertyObjectMapper.writeValueAsString(operatorMeta.getOperator()));
        } catch (Throwable ex) {
            LOG.error("Got exception when trying to get properties for operator {}", operatorMeta.getName(), ex);
            str = null;
        }
        operatorDetailMap.put("properties", str);
        Operators.PortMappingDescriptor pmd = new Operators.PortMappingDescriptor();
        Operators.describe(operatorMeta.getOperator(), pmd);
        for (Map.Entry<String, PortContextPair<InputPort<?>>> entry : pmd.inputPorts.entrySet()) {
            HashMap<String, Object> portDetailMap = new HashMap<>();
            HashMap<String, Object> portAttributeMap = new HashMap<>();
            InputPortMeta portMeta = operatorMeta.getMeta(entry.getValue().component);
            String portName = portMeta.getPortName();
            portDetailMap.put("name", portName);
            portDetailMap.put("type", "input");
            portDetailMap.put("attributes", portAttributeMap);
            rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(portMeta, Context.PortContext.class);
            for (Map.Entry<Attribute<Object>, Object> attEntry : rawAttributes.entrySet()) {
                portAttributeMap.put(attEntry.getKey().getSimpleName(), attEntry.getValue());
            }
            portList.add(portDetailMap);
        }
        for (Map.Entry<String, PortContextPair<OutputPort<?>>> entry : pmd.outputPorts.entrySet()) {
            HashMap<String, Object> portDetailMap = new HashMap<>();
            HashMap<String, Object> portAttributeMap = new HashMap<>();
            OutputPortMeta portMeta = operatorMeta.getMeta(entry.getValue().component);
            String portName = portMeta.getPortName();
            portDetailMap.put("name", portName);
            portDetailMap.put("type", "output");
            portDetailMap.put("attributes", portAttributeMap);
            rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(portMeta, Context.PortContext.class);
            for (Map.Entry<Attribute<Object>, Object> attEntry : rawAttributes.entrySet()) {
                portAttributeMap.put(attEntry.getKey().getSimpleName(), attEntry.getValue());
            }
            portList.add(portDetailMap);
        }
    }
    Collection<StreamMeta> allStreams = dag.getAllStreams();
    for (StreamMeta streamMeta : allStreams) {
        HashMap<String, Object> streamDetailMap = new HashMap<>();
        String streamName = streamMeta.getName();
        streamMap.add(streamDetailMap);
        String sourcePortName = streamMeta.getSource().getPortName();
        OperatorMeta operatorMeta = streamMeta.getSource().getOperatorMeta();
        HashMap<String, Object> sourcePortDetailMap = new HashMap<>();
        sourcePortDetailMap.put("operatorName", operatorMeta.getName());
        sourcePortDetailMap.put("portName", sourcePortName);
        streamDetailMap.put("name", streamName);
        streamDetailMap.put("source", sourcePortDetailMap);
        Collection<InputPortMeta> sinks = streamMeta.getSinks();
        ArrayList<HashMap<String, Object>> sinkPortList = new ArrayList<>();
        for (InputPortMeta sinkPort : sinks) {
            HashMap<String, Object> sinkPortDetailMap = new HashMap<>();
            sinkPortDetailMap.put("operatorName", sinkPort.getOperatorMeta().getName());
            sinkPortDetailMap.put("portName", sinkPort.getPortName());
            sinkPortList.add(sinkPortDetailMap);
        }
        streamDetailMap.put("sinks", sinkPortList);
        if (streamMeta.getLocality() != null) {
            streamDetailMap.put("locality", streamMeta.getLocality().name());
        }
    }
    if (includeModules) {
        ArrayList<Map<String, Object>> modulesArray = new ArrayList<>();
        result.put("modules", modulesArray);
        for (LogicalPlan.ModuleMeta meta : dag.getAllModules()) {
            modulesArray.add(getLogicalModuleDetails(dag, meta));
        }
    }
    return result;
}
Also used : HashMap(java.util.HashMap) Attribute(com.datatorrent.api.Attribute) StdTypeResolverBuilder(org.codehaus.jackson.map.jsontype.impl.StdTypeResolverBuilder) ArrayList(java.util.ArrayList) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) PortContextPair(com.datatorrent.stram.plan.logical.Operators.PortContextPair) StreamMeta(com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta) OutputPortMeta(com.datatorrent.stram.plan.logical.LogicalPlan.OutputPortMeta) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) Context(com.datatorrent.api.Context) Operators(com.datatorrent.stram.plan.logical.Operators) OperatorMeta(com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta) InputPortMeta(com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta) JSONObject(org.codehaus.jettison.json.JSONObject) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan) HashMap(java.util.HashMap) Map(java.util.Map) BeanMap(org.apache.commons.beanutils.BeanMap)

Example 62 with LogicalPlan

use of com.datatorrent.stram.plan.logical.LogicalPlan in project apex-core by apache.

the class AffinityRulesTest method testAntiAffinityInOperators.

@Test
public void testAntiAffinityInOperators() {
    LogicalPlan dag = new LogicalPlan();
    dag.getAttributes().put(com.datatorrent.api.Context.DAGContext.APPLICATION_PATH, testMeta.getAbsolutePath());
    dag.setAttribute(OperatorContext.STORAGE_AGENT, new MemoryStorageAgent());
    GenericTestOperator o1 = dag.addOperator("O1", GenericTestOperator.class);
    dag.setOperatorAttribute(o1, OperatorContext.MEMORY_MB, 256);
    GenericTestOperator o2 = dag.addOperator("O2", GenericTestOperator.class);
    dag.setOperatorAttribute(o2, OperatorContext.MEMORY_MB, 256);
    dag.getMeta(o1).getAttributes().put(OperatorContext.LOCALITY_HOST, "host1");
    AffinityRulesSet ruleSet = new AffinityRulesSet();
    List<AffinityRule> rules = new ArrayList<>();
    ruleSet.setAffinityRules(rules);
    AffinityRule rule1 = new AffinityRule(Type.ANTI_AFFINITY, Locality.NODE_LOCAL, false, "O1", "O2");
    rules.add(rule1);
    dag.setAttribute(DAGContext.AFFINITY_RULES_SET, ruleSet);
    // .setLocality(Locality.NODE_LOCAL);
    dag.addStream("o1_outport1", o1.outport1, o2.inport1);
    StreamingContainerManager scm = new StreamingContainerManager(dag);
    ResourceRequestHandler rr = new ResourceRequestHandler();
    int containerMem = 1000;
    Map<String, NodeReport> nodeReports = Maps.newHashMap();
    NodeReport nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host1", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0);
    nodeReports.put(nr.getNodeId().getHost(), nr);
    nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host2", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0);
    nodeReports.put(nr.getNodeId().getHost(), nr);
    // set resources
    rr.updateNodeReports(Lists.newArrayList(nodeReports.values()));
    for (ContainerStartRequest csr : scm.containerStartRequests) {
        String host = rr.getHost(csr, true);
        csr.container.host = host;
        if (csr.container.getOperators().get(0).getName().equals("O1")) {
            Assert.assertEquals("Hosts set to host1 for Operator O1", "host1", host);
        }
        if (csr.container.getOperators().get(0).getName().equals("O2")) {
            Assert.assertEquals("Hosts set to host2 for Operator O2", "host2", host);
        }
    }
}
Also used : ContainerStartRequest(com.datatorrent.stram.StreamingContainerAgent.ContainerStartRequest) AffinityRule(com.datatorrent.api.AffinityRule) ArrayList(java.util.ArrayList) AffinityRulesSet(com.datatorrent.api.AffinityRulesSet) GenericTestOperator(com.datatorrent.stram.engine.GenericTestOperator) MemoryStorageAgent(com.datatorrent.stram.support.StramTestSupport.MemoryStorageAgent) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan) NodeReport(org.apache.hadoop.yarn.api.records.NodeReport) Test(org.junit.Test)

Example 63 with LogicalPlan

use of com.datatorrent.stram.plan.logical.LogicalPlan in project apex-core by apache.

the class TypeDiscoverer method getAppAttributes.

/**
   * Fetches application attributes.
   *
   * @return all application attributes exposed to user.
   * @throws JSONException
   * @throws IllegalAccessException
   */
public static JSONObject getAppAttributes() throws JSONException, IllegalAccessException {
    Field[] fields = Context.DAGContext.class.getFields();
    Collection<Field> attributes = Collections2.filter(Arrays.asList(fields), attrPredicate);
    return getAttrDescription(new LogicalPlan(), attributes);
}
Also used : PortContext(com.datatorrent.stram.engine.PortContext) Context(com.datatorrent.api.Context) BaseContext(com.datatorrent.stram.api.BaseContext) Field(java.lang.reflect.Field) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan)

Example 64 with LogicalPlan

use of com.datatorrent.stram.plan.logical.LogicalPlan in project apex-core by apache.

the class HostLocalTest method testContainerLocalWithVCores.

@Test
public void testContainerLocalWithVCores() {
    LogicalPlan dag = new LogicalPlan();
    dag.getAttributes().put(com.datatorrent.api.Context.DAGContext.APPLICATION_PATH, new File("target", HostLocalTest.class.getName()).getAbsolutePath());
    dag.setAttribute(OperatorContext.STORAGE_AGENT, new MemoryStorageAgent());
    GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
    dag.getMeta(o1).getAttributes().put(OperatorContext.LOCALITY_HOST, "host2");
    GenericTestOperator partitioned = dag.addOperator("partitioned", GenericTestOperator.class);
    dag.addStream("o1_outport1", o1.outport1, partitioned.inport1).setLocality(Locality.CONTAINER_LOCAL);
    dag.setOperatorAttribute(o1, OperatorContext.MEMORY_MB, 256);
    dag.setOperatorAttribute(o1, OperatorContext.VCORES, 1);
    dag.setOperatorAttribute(partitioned, OperatorContext.VCORES, 1);
    StreamingContainerManager scm = new StreamingContainerManager(dag);
    ResourceRequestHandler rr = new ResourceRequestHandler();
    int containerMem = 1000;
    Map<String, NodeReport> nodeReports = Maps.newHashMap();
    NodeReport nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host1", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0);
    nodeReports.put(nr.getNodeId().getHost(), nr);
    nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host2", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0);
    nodeReports.put(nr.getNodeId().getHost(), nr);
    // set resources
    rr.updateNodeReports(Lists.newArrayList(nodeReports.values()));
    Assert.assertEquals("number of containers is 1", 1, scm.containerStartRequests.size());
    for (ContainerStartRequest csr : scm.containerStartRequests) {
        String host = rr.getHost(csr, true);
        csr.container.host = host;
        Assert.assertEquals("number of vcores", 2, csr.container.getRequiredVCores());
        Assert.assertEquals("Hosts set to host2", "host2", host);
    }
}
Also used : ContainerStartRequest(com.datatorrent.stram.StreamingContainerAgent.ContainerStartRequest) GenericTestOperator(com.datatorrent.stram.engine.GenericTestOperator) MemoryStorageAgent(com.datatorrent.stram.support.StramTestSupport.MemoryStorageAgent) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan) File(java.io.File) NodeReport(org.apache.hadoop.yarn.api.records.NodeReport) Test(org.junit.Test)

Example 65 with LogicalPlan

use of com.datatorrent.stram.plan.logical.LogicalPlan in project apex-core by apache.

the class HostLocalTest method testNodeLocal.

@Test
public void testNodeLocal() {
    LogicalPlan dag = new LogicalPlan();
    dag.getAttributes().put(com.datatorrent.api.Context.DAGContext.APPLICATION_PATH, new File("target", HostLocalTest.class.getName()).getAbsolutePath());
    dag.setAttribute(OperatorContext.STORAGE_AGENT, new MemoryStorageAgent());
    GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
    dag.setOperatorAttribute(o1, OperatorContext.MEMORY_MB, 256);
    GenericTestOperator partitioned = dag.addOperator("partitioned", GenericTestOperator.class);
    dag.setOperatorAttribute(partitioned, OperatorContext.MEMORY_MB, 256);
    dag.getMeta(partitioned).getAttributes().put(OperatorContext.LOCALITY_HOST, "host1");
    dag.addStream("o1_outport1", o1.outport1, partitioned.inport1).setLocality(Locality.NODE_LOCAL);
    StreamingContainerManager scm = new StreamingContainerManager(dag);
    ResourceRequestHandler rr = new ResourceRequestHandler();
    int containerMem = 1000;
    Map<String, NodeReport> nodeReports = Maps.newHashMap();
    NodeReport nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host1", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0);
    nodeReports.put(nr.getNodeId().getHost(), nr);
    nr = BuilderUtils.newNodeReport(BuilderUtils.newNodeId("host2", 0), NodeState.RUNNING, "httpAddress", "rackName", BuilderUtils.newResource(0, 0), BuilderUtils.newResource(containerMem * 2, 2), 0, null, 0);
    nodeReports.put(nr.getNodeId().getHost(), nr);
    // set resources
    rr.updateNodeReports(Lists.newArrayList(nodeReports.values()));
    for (ContainerStartRequest csr : scm.containerStartRequests) {
        String host = rr.getHost(csr, true);
        csr.container.host = host;
        Assert.assertEquals("Hosts set to host1", "host1", host);
    }
}
Also used : ContainerStartRequest(com.datatorrent.stram.StreamingContainerAgent.ContainerStartRequest) GenericTestOperator(com.datatorrent.stram.engine.GenericTestOperator) MemoryStorageAgent(com.datatorrent.stram.support.StramTestSupport.MemoryStorageAgent) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan) File(java.io.File) NodeReport(org.apache.hadoop.yarn.api.records.NodeReport) Test(org.junit.Test)

Aggregations

LogicalPlan (com.datatorrent.stram.plan.logical.LogicalPlan)99 Test (org.junit.Test)84 GenericTestOperator (com.datatorrent.stram.engine.GenericTestOperator)40 TestPlanContext (com.datatorrent.stram.plan.TestPlanContext)29 PartitioningTest (com.datatorrent.stram.PartitioningTest)27 File (java.io.File)23 OperatorMeta (com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta)22 StramLocalCluster (com.datatorrent.stram.StramLocalCluster)19 Checkpoint (com.datatorrent.stram.api.Checkpoint)17 MemoryStorageAgent (com.datatorrent.stram.support.StramTestSupport.MemoryStorageAgent)16 AsyncFSStorageAgent (com.datatorrent.common.util.AsyncFSStorageAgent)15 StatsListener (com.datatorrent.api.StatsListener)13 StramTestSupport (com.datatorrent.stram.support.StramTestSupport)13 Configuration (org.apache.hadoop.conf.Configuration)13 LogicalPlanConfiguration (com.datatorrent.stram.plan.logical.LogicalPlanConfiguration)11 NodeReport (org.apache.hadoop.yarn.api.records.NodeReport)10 ContainerStartRequest (com.datatorrent.stram.StreamingContainerAgent.ContainerStartRequest)9 PTOperator (com.datatorrent.stram.plan.physical.PTOperator)9 ArrayList (java.util.ArrayList)9 ConstraintViolationException (javax.validation.ConstraintViolationException)9