Search in sources :

Example 1 with PortMappingDescriptor

use of com.datatorrent.stram.plan.logical.Operators.PortMappingDescriptor in project apex-core by apache.

the class StreamMapping method createSlidingUnifier.

public static PTOperator createSlidingUnifier(StreamMeta streamMeta, PhysicalPlan plan, int operatorApplicationWindowCount, int slidingWindowCount) {
    int gcd = IntMath.gcd(operatorApplicationWindowCount, slidingWindowCount);
    OperatorMeta um = streamMeta.getSource().getSlidingUnifier(operatorApplicationWindowCount / gcd, gcd, slidingWindowCount / gcd);
    PTOperator pu = plan.newOperator(um, um.getName());
    Operator unifier = um.getOperator();
    PortMappingDescriptor mergeDesc = new PortMappingDescriptor();
    Operators.describe(unifier, mergeDesc);
    if (mergeDesc.outputPorts.size() != 1) {
        throw new AssertionError("Unifier must have a single output port, instead found : " + mergeDesc.outputPorts);
    }
    pu.unifiedOperatorMeta = streamMeta.getSource().getOperatorMeta();
    pu.outputs.add(new PTOutput(mergeDesc.outputPorts.keySet().iterator().next(), streamMeta, pu));
    plan.newOpers.put(pu, unifier);
    return pu;
}
Also used : Operator(com.datatorrent.api.Operator) OperatorMeta(com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta) PTOutput(com.datatorrent.stram.plan.physical.PTOperator.PTOutput) PortMappingDescriptor(com.datatorrent.stram.plan.logical.Operators.PortMappingDescriptor)

Example 2 with PortMappingDescriptor

use of com.datatorrent.stram.plan.logical.Operators.PortMappingDescriptor in project apex-core by apache.

the class TupleRecorderCollection method startRecording.

private void startRecording(String id, final Node<?> node, int operatorId, final String portName, long numWindows) {
    PortMappingDescriptor descriptor = node.getPortMappingDescriptor();
    OperatorIdPortNamePair operatorIdPortNamePair = new OperatorIdPortNamePair(operatorId, portName);
    // check any recording conflict
    boolean conflict = false;
    if (containsKey(new OperatorIdPortNamePair(operatorId, null))) {
        conflict = true;
    } else if (portName == null) {
        for (Map.Entry<String, PortContextPair<InputPort<?>>> entry : descriptor.inputPorts.entrySet()) {
            if (containsKey(new OperatorIdPortNamePair(operatorId, entry.getKey()))) {
                conflict = true;
                break;
            }
        }
        for (Map.Entry<String, PortContextPair<OutputPort<?>>> entry : descriptor.outputPorts.entrySet()) {
            if (containsKey(new OperatorIdPortNamePair(operatorId, entry.getKey()))) {
                conflict = true;
                break;
            }
        }
    } else {
        if (containsKey(operatorIdPortNamePair)) {
            conflict = true;
        }
    }
    if (!conflict) {
        logger.debug("Executing start recording request for {}", operatorIdPortNamePair);
        if (wsClient != null) {
            try {
                wsClient.openConnection();
            } catch (Exception e) {
                logger.warn("Cannot establish websocket connection to uri {}", wsClient.getUri(), e);
            }
        }
        TupleRecorder tupleRecorder = new TupleRecorder(id, appId);
        tupleRecorder.setWebSocketClient(wsClient);
        HashMap<String, Sink<Object>> sinkMap = new HashMap<>();
        for (Map.Entry<String, PortContextPair<InputPort<?>>> entry : descriptor.inputPorts.entrySet()) {
            String streamId = getDeclaredStreamId(operatorId, entry.getKey());
            if (streamId == null) {
                streamId = portName + "_implicit_stream";
            }
            if (entry.getValue().context != null && (portName == null || entry.getKey().equals(portName))) {
                logger.debug("Adding recorder sink to input port {}, stream {}", entry.getKey(), streamId);
                tupleRecorder.addInputPortInfo(entry.getKey(), streamId);
                sinkMap.put(entry.getKey(), tupleRecorder.newSink(entry.getKey()));
            }
        }
        for (Map.Entry<String, PortContextPair<OutputPort<?>>> entry : descriptor.outputPorts.entrySet()) {
            String streamId = getDeclaredStreamId(operatorId, entry.getKey());
            if (streamId == null) {
                streamId = portName + "_implicit_stream";
            }
            if (portName == null || entry.getKey().equals(portName)) {
                logger.debug("Adding recorder sink to output port {}, stream {}", entry.getKey(), streamId);
                tupleRecorder.addOutputPortInfo(entry.getKey(), streamId);
                sinkMap.put(entry.getKey(), tupleRecorder.newSink(entry.getKey()));
            }
        }
        if (!sinkMap.isEmpty()) {
            logger.debug("Started recording on {} through {}", operatorIdPortNamePair, System.identityHashCode(this));
            String basePath = appPath + "/recordings/" + operatorId + "/" + tupleRecorder.getId();
            tupleRecorder.getStorage().setBasePath(basePath);
            tupleRecorder.getStorage().setBytesPerPartFile(tupleRecordingPartFileSize);
            tupleRecorder.getStorage().setMillisPerPartFile(tupleRecordingPartFileTimeMillis);
            node.addSinks(sinkMap);
            tupleRecorder.setup(node.getOperator(), codecs);
            put(operatorIdPortNamePair, tupleRecorder);
            if (numWindows > 0) {
                tupleRecorder.setNumWindows(numWindows, new Runnable() {

                    @Override
                    public void run() {
                        node.context.request(new OperatorRequest() {

                            @Override
                            public StatsListener.OperatorResponse execute(Operator operator, int operatorId, long windowId) throws IOException {
                                stopRecording(node, operatorId, portName);
                                return null;
                            }
                        });
                    }
                });
            }
        } else {
            logger.warn("Tuple recording request ignored because operator is not connected on the specified port.");
        }
    } else {
        logger.error("Operator id {} is already being recorded.", operatorId);
    }
}
Also used : OutputPort(com.datatorrent.api.Operator.OutputPort) Operator(com.datatorrent.api.Operator) InputPort(com.datatorrent.api.Operator.InputPort) HashMap(java.util.HashMap) StatsListener(com.datatorrent.api.StatsListener) IOException(java.io.IOException) PortContextPair(com.datatorrent.stram.plan.logical.Operators.PortContextPair) Sink(com.datatorrent.api.Sink) OperatorRequest(com.datatorrent.api.StatsListener.OperatorRequest) HashMap(java.util.HashMap) Map(java.util.Map) PortMappingDescriptor(com.datatorrent.stram.plan.logical.Operators.PortMappingDescriptor)

Example 3 with PortMappingDescriptor

use of com.datatorrent.stram.plan.logical.Operators.PortMappingDescriptor in project apex-core by apache.

the class StreamMapping method createUnifier.

public static PTOperator createUnifier(StreamMeta streamMeta, PhysicalPlan plan) {
    OperatorMeta um = streamMeta.getSource().getUnifierMeta();
    PTOperator pu = plan.newOperator(um, um.getName());
    Operator unifier = um.getOperator();
    PortMappingDescriptor mergeDesc = new PortMappingDescriptor();
    Operators.describe(unifier, mergeDesc);
    if (mergeDesc.outputPorts.size() != 1) {
        throw new AssertionError("Unifier must have a single output port, instead found : " + mergeDesc.outputPorts);
    }
    pu.unifiedOperatorMeta = streamMeta.getSource().getOperatorMeta();
    pu.outputs.add(new PTOutput(mergeDesc.outputPorts.keySet().iterator().next(), streamMeta, pu));
    plan.newOpers.put(pu, unifier);
    return pu;
}
Also used : Operator(com.datatorrent.api.Operator) OperatorMeta(com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta) PTOutput(com.datatorrent.stram.plan.physical.PTOperator.PTOutput) PortMappingDescriptor(com.datatorrent.stram.plan.logical.Operators.PortMappingDescriptor)

Example 4 with PortMappingDescriptor

use of com.datatorrent.stram.plan.logical.Operators.PortMappingDescriptor in project apex-core by apache.

the class StreamingContainer method disconnectNode.

private void disconnectNode(int nodeid) {
    Node<?> node = nodes.get(nodeid);
    disconnectWindowGenerator(nodeid, node);
    PortMappingDescriptor portMappingDescriptor = node.getPortMappingDescriptor();
    Iterator<String> outputPorts = portMappingDescriptor.outputPorts.keySet().iterator();
    while (outputPorts.hasNext()) {
        String sourceIdentifier = String.valueOf(nodeid).concat(Component.CONCAT_SEPARATOR).concat(outputPorts.next());
        ComponentContextPair<Stream, StreamContext> pair = streams.remove(sourceIdentifier);
        if (pair != null) {
            if (activeStreams.remove(pair.component) != null) {
                pair.component.deactivate();
                eventBus.publish(new StreamDeactivationEvent(pair));
            }
            if (pair.component instanceof Stream.MultiSinkCapableStream) {
                String sinks = pair.context.getSinkId();
                if (sinks == null) {
                    logger.error("mux sinks found connected at {} with sink id null", sourceIdentifier);
                } else {
                    String[] split = sinks.split(MuxStream.MULTI_SINK_ID_CONCAT_SEPARATOR);
                    for (int i = split.length; i-- > 0; ) {
                        ComponentContextPair<Stream, StreamContext> spair = streams.remove(split[i]);
                        if (spair == null) {
                            logger.error("mux is missing the stream for sink {}", split[i]);
                        } else {
                            if (activeStreams.remove(spair.component) != null) {
                                spair.component.deactivate();
                                eventBus.publish(new StreamDeactivationEvent(spair));
                            }
                            spair.component.teardown();
                        }
                    }
                }
            } else {
            // it's either inline stream or it's bufferserver publisher.
            }
            pair.component.teardown();
        }
    }
    Iterator<String> inputPorts = portMappingDescriptor.inputPorts.keySet().iterator();
    while (inputPorts.hasNext()) {
        String sinkIdentifier = String.valueOf(nodeid).concat(Component.CONCAT_SEPARATOR).concat(inputPorts.next());
        ComponentContextPair<Stream, StreamContext> pair = streams.remove(sinkIdentifier);
        if (pair != null) {
            if (activeStreams.remove(pair.component) != null) {
                pair.component.deactivate();
                eventBus.publish(new StreamDeactivationEvent(pair));
            }
            pair.component.teardown();
            /**
             * we should also make sure that if this stream is connected to mux stream,
             * we deregister it from the mux stream to avoid clogged sink problem.
             */
            ComponentContextPair<Stream, StreamContext> sourcePair = streams.get(pair.context.getSourceId());
            if (sourcePair != null) {
                if (sourcePair == pair) {
                    /* for some reason we had the stream stored against both source and sink identifiers */
                    streams.remove(pair.context.getSourceId());
                } else {
                    /* the stream was one of the many streams sourced by a muxstream */
                    unregisterSinkFromMux(sourcePair, sinkIdentifier);
                }
            }
        }
    }
}
Also used : OiOStream(com.datatorrent.stram.stream.OiOStream) InlineStream(com.datatorrent.stram.stream.InlineStream) MuxStream(com.datatorrent.stram.stream.MuxStream) StreamDeactivationEvent(com.datatorrent.stram.api.ContainerEvent.StreamDeactivationEvent) Checkpoint(com.datatorrent.stram.api.Checkpoint) PortMappingDescriptor(com.datatorrent.stram.plan.logical.Operators.PortMappingDescriptor)

Aggregations

PortMappingDescriptor (com.datatorrent.stram.plan.logical.Operators.PortMappingDescriptor)4 Operator (com.datatorrent.api.Operator)3 OperatorMeta (com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta)2 PTOutput (com.datatorrent.stram.plan.physical.PTOperator.PTOutput)2 InputPort (com.datatorrent.api.Operator.InputPort)1 OutputPort (com.datatorrent.api.Operator.OutputPort)1 Sink (com.datatorrent.api.Sink)1 StatsListener (com.datatorrent.api.StatsListener)1 OperatorRequest (com.datatorrent.api.StatsListener.OperatorRequest)1 Checkpoint (com.datatorrent.stram.api.Checkpoint)1 StreamDeactivationEvent (com.datatorrent.stram.api.ContainerEvent.StreamDeactivationEvent)1 PortContextPair (com.datatorrent.stram.plan.logical.Operators.PortContextPair)1 InlineStream (com.datatorrent.stram.stream.InlineStream)1 MuxStream (com.datatorrent.stram.stream.MuxStream)1 OiOStream (com.datatorrent.stram.stream.OiOStream)1 IOException (java.io.IOException)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1