Search in sources :

Example 11 with InputPortMeta

use of com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta in project apex-core by apache.

the class PhysicalPlan method removeLogicalStream.

/**
   * Remove physical representation of given stream. Operators that are affected
   * in the execution layer will be added to the set. This method does not
   * automatically remove operators from the plan.
   *
   * @param sm
   */
public void removeLogicalStream(StreamMeta sm) {
    // remove incoming connections for logical stream
    for (InputPortMeta ipm : sm.getSinks()) {
        OperatorMeta om = ipm.getOperatorMeta();
        PMapping m = this.logicalToPTOperator.get(om);
        if (m == null) {
            throw new AssertionError("Unknown operator " + om);
        }
        for (PTOperator oper : m.partitions) {
            List<PTInput> inputsCopy = Lists.newArrayList(oper.inputs);
            for (PTInput input : oper.inputs) {
                if (input.logicalStream == sm) {
                    input.source.sinks.remove(input);
                    inputsCopy.remove(input);
                    undeployOpers.add(oper);
                    deployOpers.add(oper);
                }
            }
            oper.inputs = inputsCopy;
        }
    }
    // remove outgoing connections for logical stream
    PMapping m = this.logicalToPTOperator.get(sm.getSource().getOperatorMeta());
    for (PTOperator oper : m.partitions) {
        List<PTOutput> outputsCopy = Lists.newArrayList(oper.outputs);
        for (PTOutput out : oper.outputs) {
            if (out.logicalStream == sm) {
                for (PTInput input : out.sinks) {
                    PTOperator downstreamOper = input.source.source;
                    downstreamOper.inputs.remove(input);
                    Set<PTOperator> deps = this.getDependents(Collections.singletonList(downstreamOper));
                    undeployOpers.addAll(deps);
                    deployOpers.addAll(deps);
                }
                outputsCopy.remove(out);
                undeployOpers.add(oper);
                deployOpers.add(oper);
            }
        }
        oper.outputs = outputsCopy;
    }
}
Also used : InputPortMeta(com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta) OperatorMeta(com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta) PTInput(com.datatorrent.stram.plan.physical.PTOperator.PTInput) PTOutput(com.datatorrent.stram.plan.physical.PTOperator.PTOutput)

Example 12 with InputPortMeta

use of com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta in project apex-core by apache.

the class StreamCodecWrapperForPersistance method shouldCaptureEvent.

public boolean shouldCaptureEvent(T o) {
    for (Entry<InputPortMeta, Collection<PartitionKeys>> entry : inputPortToPartitionMap.entrySet()) {
        StreamCodec<Object> codec = codecsToMerge.get(entry.getKey());
        Collection<PartitionKeys> partitionKeysList = entry.getValue();
        for (PartitionKeys keys : partitionKeysList) {
            if (keys.partitions != null && keys.partitions.contains(keys.mask & codec.getPartition(o))) {
                // So send the event to persist operator
                return true;
            }
        }
    }
    return false;
}
Also used : InputPortMeta(com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta) Collection(java.util.Collection) PartitionKeys(com.datatorrent.api.Partitioner.PartitionKeys)

Example 13 with InputPortMeta

use of com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta in project apex-core by apache.

the class StreamingContainerManager method deploy.

@Override
public void deploy(Set<PTContainer> releaseContainers, Collection<PTOperator> undeploy, Set<PTContainer> startContainers, Collection<PTOperator> deploy) {
    try {
        this.deployChangeInProgress.set(true);
        Map<PTContainer, List<PTOperator>> undeployGroups = groupByContainer(undeploy);
        // order does not matter, remove all operators in each container in one sweep
        for (Map.Entry<PTContainer, List<PTOperator>> e : undeployGroups.entrySet()) {
            // container may already be in failed or pending deploy state, notified by RM or timed out
            PTContainer c = e.getKey();
            if (!startContainers.contains(c) && !releaseContainers.contains(c) && c.getState() != PTContainer.State.KILLED) {
                LOG.debug("scheduling undeploy {} {}", e.getKey().getExternalId(), e.getValue());
                for (PTOperator oper : e.getValue()) {
                    oper.setState(PTOperator.State.PENDING_UNDEPLOY);
                }
            }
        }
        // start new containers
        for (PTContainer c : startContainers) {
            requestContainer(c);
        }
        // (re)deploy affected operators
        // can happen in parallel after buffer server for recovered publishers is reset
        Map<PTContainer, List<PTOperator>> deployGroups = groupByContainer(deploy);
        for (Map.Entry<PTContainer, List<PTOperator>> e : deployGroups.entrySet()) {
            if (!startContainers.contains(e.getKey())) {
                // to reset publishers, clean buffer server past checkpoint so subscribers don't read stale data (including end of stream)
                for (PTOperator operator : e.getValue()) {
                    for (PTOperator.PTOutput out : operator.getOutputs()) {
                        if (!out.isDownStreamInline()) {
                            for (InputPortMeta ipm : out.logicalStream.getSinks()) {
                                StreamCodec<?> streamCodec = ipm.getStreamCodec();
                                Integer codecId = plan.getStreamCodecIdentifier(streamCodec);
                                // following needs to match the concat logic in StreamingContainer
                                String sourceIdentifier = Integer.toString(operator.getId()).concat(Component.CONCAT_SEPARATOR).concat(out.portName).concat(Component.CONCAT_SEPARATOR).concat(codecId.toString());
                                if (operator.getContainer().getState() == PTContainer.State.ACTIVE) {
                                    // TODO: unit test - find way to mock this when testing rest of logic
                                    if (operator.getContainer().bufferServerAddress.getPort() != 0) {
                                        BufferServerController bsc = getBufferServerClient(operator);
                                        // ensures new subscriber starting to read from checkpoint will wait until publisher redeploy cycle is complete
                                        try {
                                            bsc.reset(null, sourceIdentifier, 0);
                                        } catch (Exception ex) {
                                            LOG.error("Failed to reset buffer server {} {}", sourceIdentifier, ex);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }
            // add to operators that we expect to deploy
            LOG.debug("scheduling deploy {} {}", e.getKey().getExternalId(), e.getValue());
            for (PTOperator oper : e.getValue()) {
                // operator will be deployed after it has been undeployed, if still referenced by the container
                if (oper.getState() != PTOperator.State.PENDING_UNDEPLOY) {
                    oper.setState(PTOperator.State.PENDING_DEPLOY);
                }
            }
        }
        // stop containers that are no longer used
        for (PTContainer c : releaseContainers) {
            if (c.getExternalId() == null) {
                continue;
            }
            StreamingContainerAgent sca = containers.get(c.getExternalId());
            if (sca != null) {
                LOG.debug("Container marked for shutdown: {}", c);
                // container already removed from plan
                // TODO: monitor soft shutdown
                sca.requestShutDown(ShutdownType.ABORT);
            }
        }
    } finally {
        this.deployChangeCnt++;
        this.deployChangeInProgress.set(false);
    }
}
Also used : PTOperator(com.datatorrent.stram.plan.physical.PTOperator) InputPortMeta(com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta) NotFoundException(org.apache.hadoop.yarn.webapp.NotFoundException) IOException(java.io.IOException) JSONException(org.codehaus.jettison.json.JSONException) KryoException(com.esotericsoftware.kryo.KryoException) PTOutput(com.datatorrent.stram.plan.physical.PTOperator.PTOutput) PTContainer(com.datatorrent.stram.plan.physical.PTContainer) ArrayList(java.util.ArrayList) List(java.util.List) LinkedList(java.util.LinkedList) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) ConcurrentSkipListMap(java.util.concurrent.ConcurrentSkipListMap)

Example 14 with InputPortMeta

use of com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta in project apex-core by apache.

the class LogicalPlanSerializer method convertToProperties.

public static PropertiesConfiguration convertToProperties(LogicalPlan dag) {
    PropertiesConfiguration props = new PropertiesConfiguration();
    Collection<OperatorMeta> allOperators = dag.getAllOperators();
    for (OperatorMeta operatorMeta : allOperators) {
        String operatorKey = LogicalPlanConfiguration.OPERATOR_PREFIX + operatorMeta.getName();
        Operator operator = operatorMeta.getOperator();
        props.setProperty(operatorKey + "." + LogicalPlanConfiguration.OPERATOR_CLASSNAME, operator.getClass().getName());
        BeanMap operatorProperties = LogicalPlanConfiguration.getObjectProperties(operator);
        @SuppressWarnings("rawtypes") Iterator entryIterator = operatorProperties.entryIterator();
        while (entryIterator.hasNext()) {
            try {
                @SuppressWarnings("unchecked") Map.Entry<String, Object> entry = (Map.Entry<String, Object>) entryIterator.next();
                if (!entry.getKey().equals("class") && !entry.getKey().equals("name") && entry.getValue() != null) {
                    props.setProperty(operatorKey + "." + entry.getKey(), entry.getValue());
                }
            } catch (Exception ex) {
                LOG.warn("Error trying to get a property of operator {}", operatorMeta.getName(), ex);
            }
        }
    }
    Collection<StreamMeta> allStreams = dag.getAllStreams();
    for (StreamMeta streamMeta : allStreams) {
        String streamKey = LogicalPlanConfiguration.STREAM_PREFIX + streamMeta.getName();
        OutputPortMeta source = streamMeta.getSource();
        Collection<InputPortMeta> sinks = streamMeta.getSinks();
        props.setProperty(streamKey + "." + LogicalPlanConfiguration.STREAM_SOURCE, source.getOperatorMeta().getName() + "." + source.getPortName());
        String sinksValue = "";
        for (InputPortMeta sink : sinks) {
            if (!sinksValue.isEmpty()) {
                sinksValue += ",";
            }
            sinksValue += sink.getOperatorMeta().getName() + "." + sink.getPortName();
        }
        props.setProperty(streamKey + "." + LogicalPlanConfiguration.STREAM_SINKS, sinksValue);
        if (streamMeta.getLocality() != null) {
            props.setProperty(streamKey + "." + LogicalPlanConfiguration.STREAM_LOCALITY, streamMeta.getLocality().name());
        }
    }
    return props;
}
Also used : Operator(com.datatorrent.api.Operator) OperatorMeta(com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta) InputPortMeta(com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) PropertiesConfiguration(org.apache.commons.configuration.PropertiesConfiguration) IOException(java.io.IOException) JSONException(org.codehaus.jettison.json.JSONException) BeanMap(org.apache.commons.beanutils.BeanMap) StreamMeta(com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta) OutputPortMeta(com.datatorrent.stram.plan.logical.LogicalPlan.OutputPortMeta) Iterator(java.util.Iterator) JSONObject(org.codehaus.jettison.json.JSONObject) HashMap(java.util.HashMap) Map(java.util.Map) BeanMap(org.apache.commons.beanutils.BeanMap)

Example 15 with InputPortMeta

use of com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta in project apex-core by apache.

the class LogicalPlanSerializer method convertToMap.

/**
   *
   * @param dag
   * @return
   */
public static Map<String, Object> convertToMap(LogicalPlan dag, boolean includeModules) {
    HashMap<String, Object> result = new HashMap<>();
    ArrayList<Object> operatorArray = new ArrayList<>();
    ArrayList<Object> streamMap = new ArrayList<>();
    //result.put("applicationName", appConfig.getName());
    result.put("operators", operatorArray);
    result.put("streams", streamMap);
    //LogicalPlan dag = StramAppLauncher.prepareDAG(appConfig, StreamingApplication.LAUNCHMODE_YARN);
    //
    // should we put the DAGContext info here?
    Map<String, Object> dagAttrs = new HashMap<>();
    for (Map.Entry<Attribute<Object>, Object> e : Attribute.AttributeMap.AttributeInitializer.getAllAttributes(dag, Context.DAGContext.class).entrySet()) {
        dagAttrs.put(e.getKey().getSimpleName(), e.getValue());
    }
    result.put("attributes", dagAttrs);
    Collection<OperatorMeta> allOperators = dag.getAllOperators();
    ObjectMapper propertyObjectMapper = new ObjectMapper();
    propertyObjectMapper.configure(JsonGenerator.Feature.WRITE_NUMBERS_AS_STRINGS, true);
    propertyObjectMapper.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false);
    StdTypeResolverBuilder typer = new PropertyTypeResolverBuilder();
    typer.init(JsonTypeInfo.Id.CLASS, null);
    typer = typer.inclusion(JsonTypeInfo.As.PROPERTY);
    propertyObjectMapper.setDefaultTyping(typer);
    for (OperatorMeta operatorMeta : allOperators) {
        HashMap<String, Object> operatorDetailMap = new HashMap<>();
        ArrayList<Map<String, Object>> portList = new ArrayList<>();
        Map<String, Object> attributeMap = new HashMap<>();
        String operatorName = operatorMeta.getName();
        operatorArray.add(operatorDetailMap);
        operatorDetailMap.put("name", operatorName);
        operatorDetailMap.put("ports", portList);
        operatorDetailMap.put("class", operatorMeta.getOperator().getClass().getName());
        operatorDetailMap.put("attributes", attributeMap);
        Map<Attribute<Object>, Object> rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(operatorMeta, Context.OperatorContext.class);
        for (Map.Entry<Attribute<Object>, Object> entry : rawAttributes.entrySet()) {
            attributeMap.put(entry.getKey().getSimpleName(), entry.getValue());
        }
        ObjectMapperString str;
        try {
            str = new ObjectMapperString(propertyObjectMapper.writeValueAsString(operatorMeta.getOperator()));
        } catch (Throwable ex) {
            LOG.error("Got exception when trying to get properties for operator {}", operatorMeta.getName(), ex);
            str = null;
        }
        operatorDetailMap.put("properties", str);
        Operators.PortMappingDescriptor pmd = new Operators.PortMappingDescriptor();
        Operators.describe(operatorMeta.getOperator(), pmd);
        for (Map.Entry<String, PortContextPair<InputPort<?>>> entry : pmd.inputPorts.entrySet()) {
            HashMap<String, Object> portDetailMap = new HashMap<>();
            HashMap<String, Object> portAttributeMap = new HashMap<>();
            InputPortMeta portMeta = operatorMeta.getMeta(entry.getValue().component);
            String portName = portMeta.getPortName();
            portDetailMap.put("name", portName);
            portDetailMap.put("type", "input");
            portDetailMap.put("attributes", portAttributeMap);
            rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(portMeta, Context.PortContext.class);
            for (Map.Entry<Attribute<Object>, Object> attEntry : rawAttributes.entrySet()) {
                portAttributeMap.put(attEntry.getKey().getSimpleName(), attEntry.getValue());
            }
            portList.add(portDetailMap);
        }
        for (Map.Entry<String, PortContextPair<OutputPort<?>>> entry : pmd.outputPorts.entrySet()) {
            HashMap<String, Object> portDetailMap = new HashMap<>();
            HashMap<String, Object> portAttributeMap = new HashMap<>();
            OutputPortMeta portMeta = operatorMeta.getMeta(entry.getValue().component);
            String portName = portMeta.getPortName();
            portDetailMap.put("name", portName);
            portDetailMap.put("type", "output");
            portDetailMap.put("attributes", portAttributeMap);
            rawAttributes = Attribute.AttributeMap.AttributeInitializer.getAllAttributes(portMeta, Context.PortContext.class);
            for (Map.Entry<Attribute<Object>, Object> attEntry : rawAttributes.entrySet()) {
                portAttributeMap.put(attEntry.getKey().getSimpleName(), attEntry.getValue());
            }
            portList.add(portDetailMap);
        }
    }
    Collection<StreamMeta> allStreams = dag.getAllStreams();
    for (StreamMeta streamMeta : allStreams) {
        HashMap<String, Object> streamDetailMap = new HashMap<>();
        String streamName = streamMeta.getName();
        streamMap.add(streamDetailMap);
        String sourcePortName = streamMeta.getSource().getPortName();
        OperatorMeta operatorMeta = streamMeta.getSource().getOperatorMeta();
        HashMap<String, Object> sourcePortDetailMap = new HashMap<>();
        sourcePortDetailMap.put("operatorName", operatorMeta.getName());
        sourcePortDetailMap.put("portName", sourcePortName);
        streamDetailMap.put("name", streamName);
        streamDetailMap.put("source", sourcePortDetailMap);
        Collection<InputPortMeta> sinks = streamMeta.getSinks();
        ArrayList<HashMap<String, Object>> sinkPortList = new ArrayList<>();
        for (InputPortMeta sinkPort : sinks) {
            HashMap<String, Object> sinkPortDetailMap = new HashMap<>();
            sinkPortDetailMap.put("operatorName", sinkPort.getOperatorMeta().getName());
            sinkPortDetailMap.put("portName", sinkPort.getPortName());
            sinkPortList.add(sinkPortDetailMap);
        }
        streamDetailMap.put("sinks", sinkPortList);
        if (streamMeta.getLocality() != null) {
            streamDetailMap.put("locality", streamMeta.getLocality().name());
        }
    }
    if (includeModules) {
        ArrayList<Map<String, Object>> modulesArray = new ArrayList<>();
        result.put("modules", modulesArray);
        for (LogicalPlan.ModuleMeta meta : dag.getAllModules()) {
            modulesArray.add(getLogicalModuleDetails(dag, meta));
        }
    }
    return result;
}
Also used : HashMap(java.util.HashMap) Attribute(com.datatorrent.api.Attribute) StdTypeResolverBuilder(org.codehaus.jackson.map.jsontype.impl.StdTypeResolverBuilder) ArrayList(java.util.ArrayList) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) PortContextPair(com.datatorrent.stram.plan.logical.Operators.PortContextPair) StreamMeta(com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta) OutputPortMeta(com.datatorrent.stram.plan.logical.LogicalPlan.OutputPortMeta) ObjectMapperString(com.datatorrent.common.util.ObjectMapperString) ObjectMapper(org.codehaus.jackson.map.ObjectMapper) Context(com.datatorrent.api.Context) Operators(com.datatorrent.stram.plan.logical.Operators) OperatorMeta(com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta) InputPortMeta(com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta) JSONObject(org.codehaus.jettison.json.JSONObject) LogicalPlan(com.datatorrent.stram.plan.logical.LogicalPlan) HashMap(java.util.HashMap) Map(java.util.Map) BeanMap(org.apache.commons.beanutils.BeanMap)

Aggregations

InputPortMeta (com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta)18 StreamMeta (com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta)10 Map (java.util.Map)10 OperatorMeta (com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta)9 HashMap (java.util.HashMap)7 LinkedHashMap (java.util.LinkedHashMap)7 OutputPortMeta (com.datatorrent.stram.plan.logical.LogicalPlan.OutputPortMeta)5 ConcurrentMap (java.util.concurrent.ConcurrentMap)5 PTOperator (com.datatorrent.stram.plan.physical.PTOperator)4 PTOutput (com.datatorrent.stram.plan.physical.PTOperator.PTOutput)4 IOException (java.io.IOException)4 PartitionKeys (com.datatorrent.api.Partitioner.PartitionKeys)3 StreamCodec (com.datatorrent.api.StreamCodec)3 LogicalPlan (com.datatorrent.stram.plan.logical.LogicalPlan)3 StreamCodecWrapperForPersistance (com.datatorrent.stram.plan.logical.StreamCodecWrapperForPersistance)3 PTContainer (com.datatorrent.stram.plan.physical.PTContainer)3 PTInput (com.datatorrent.stram.plan.physical.PTOperator.PTInput)3 ArrayList (java.util.ArrayList)3 ObjectMapperString (com.datatorrent.common.util.ObjectMapperString)2 Checkpoint (com.datatorrent.stram.api.Checkpoint)2