use of org.ballerinalang.siddhi.core.query.input.stream.single.SingleStreamRuntime in project ballerina by ballerina-lang.
the class AggregationParser method parse.
public static AggregationRuntime parse(AggregationDefinition aggregationDefinition, SiddhiAppContext siddhiAppContext, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder) {
if (aggregationDefinition == null) {
throw new SiddhiAppCreationException("AggregationDefinition instance is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'");
}
if (aggregationDefinition.getTimePeriod() == null) {
throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s timePeriod is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (aggregationDefinition.getSelector() == null) {
throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s selection is not defined. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (streamDefinitionMap.get(aggregationDefinition.getBasicSingleInputStream().getStreamId()) == null) {
throw new SiddhiAppCreationException("Stream " + aggregationDefinition.getBasicSingleInputStream().getStreamId() + " has not been defined");
}
try {
List<VariableExpressionExecutor> incomingVariableExpressionExecutors = new ArrayList<>();
String aggregatorName = aggregationDefinition.getId();
StreamRuntime streamRuntime = InputStreamParser.parse(aggregationDefinition.getBasicSingleInputStream(), siddhiAppContext, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, windowMap, aggregationMap, incomingVariableExpressionExecutors, null, false, aggregatorName);
// Get original meta for later use.
MetaStreamEvent incomingMetaStreamEvent = (MetaStreamEvent) streamRuntime.getMetaComplexEvent();
// Create new meta stream event.
// This must hold the timestamp, group by attributes (if given) and the incremental attributes, in
// onAfterWindowData array
// Example format: AGG_TIMESTAMP, groupByAttribute1, groupByAttribute2, AGG_incAttribute1, AGG_incAttribute2
// AGG_incAttribute1, AGG_incAttribute2 would have the same attribute names as in
// finalListOfIncrementalAttributes
// To enter data as onAfterWindowData
incomingMetaStreamEvent.initializeAfterWindowData();
List<ExpressionExecutor> incomingExpressionExecutors = new ArrayList<>();
List<IncrementalAttributeAggregator> incrementalAttributeAggregators = new ArrayList<>();
List<Variable> groupByVariableList = aggregationDefinition.getSelector().getGroupByList();
boolean isProcessingOnExternalTime = aggregationDefinition.getAggregateAttribute() != null;
// Expressions to get
List<Expression> outputExpressions = new ArrayList<>();
// final aggregate outputs. e.g avg = sum/count
// Expression executors to get
List<ExpressionExecutor> outputExpressionExecutors = new ArrayList<>();
// final aggregate outputs. e.g avg = sum/count
populateIncomingAggregatorsAndExecutors(aggregationDefinition, siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators, groupByVariableList, outputExpressions);
int baseAggregatorBeginIndex = incomingMetaStreamEvent.getOutputData().size();
List<Expression> finalBaseAggregators = getFinalBaseAggregators(siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators);
StreamDefinition incomingOutputStreamDefinition = StreamDefinition.id("");
incomingOutputStreamDefinition.setQueryContextStartIndex(aggregationDefinition.getQueryContextStartIndex());
incomingOutputStreamDefinition.setQueryContextEndIndex(aggregationDefinition.getQueryContextEndIndex());
MetaStreamEvent processedMetaStreamEvent = new MetaStreamEvent();
for (Attribute attribute : incomingMetaStreamEvent.getOutputData()) {
incomingOutputStreamDefinition.attribute(attribute.getName(), attribute.getType());
processedMetaStreamEvent.addOutputData(attribute);
}
incomingMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.addInputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
// Executors of processing meta
List<VariableExpressionExecutor> processVariableExpressionExecutors = new ArrayList<>();
boolean groupBy = aggregationDefinition.getSelector().getGroupByList().size() != 0;
List<ExpressionExecutor> processExpressionExecutors = constructProcessExpressionExecutors(siddhiAppContext, tableMap, aggregatorName, baseAggregatorBeginIndex, finalBaseAggregators, incomingOutputStreamDefinition, processedMetaStreamEvent, processVariableExpressionExecutors, groupBy);
outputExpressionExecutors.addAll(outputExpressions.stream().map(expression -> ExpressionParser.parseExpression(expression, processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, siddhiAppContext, groupBy, 0, aggregatorName)).collect(Collectors.toList()));
// Create group by key generator
GroupByKeyGenerator groupByKeyGenerator = null;
if (groupBy) {
groupByKeyGenerator = new GroupByKeyGenerator(groupByVariableList, processedMetaStreamEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, processVariableExpressionExecutors, siddhiAppContext, aggregatorName);
}
// Create new scheduler
EntryValveExecutor entryValveExecutor = new EntryValveExecutor(siddhiAppContext);
LockWrapper lockWrapper = new LockWrapper(aggregatorName);
lockWrapper.setLock(new ReentrantLock());
Scheduler scheduler = SchedulerParser.parse(siddhiAppContext.getScheduledExecutorService(), entryValveExecutor, siddhiAppContext);
scheduler.init(lockWrapper, aggregatorName);
scheduler.setStreamEventPool(new StreamEventPool(processedMetaStreamEvent, 10));
QueryParserHelper.reduceMetaComplexEvent(incomingMetaStreamEvent);
QueryParserHelper.reduceMetaComplexEvent(processedMetaStreamEvent);
QueryParserHelper.updateVariablePosition(incomingMetaStreamEvent, incomingVariableExpressionExecutors);
QueryParserHelper.updateVariablePosition(processedMetaStreamEvent, processVariableExpressionExecutors);
List<TimePeriod.Duration> incrementalDurations = getSortedPeriods(aggregationDefinition.getTimePeriod());
Map<TimePeriod.Duration, Table> aggregationTables = initDefaultTables(aggregatorName, incrementalDurations, processedMetaStreamEvent.getOutputStreamDefinition(), siddhiAppRuntimeBuilder, aggregationDefinition.getAnnotations(), groupByVariableList);
int bufferSize = 0;
Element element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_BUFFER_SIZE, null, aggregationDefinition.getAnnotations());
if (element != null) {
try {
bufferSize = Integer.parseInt(element.getValue());
} catch (NumberFormatException e) {
throw new SiddhiAppCreationException(e.getMessage() + ": BufferSize must be an integer");
}
}
if (bufferSize > 0) {
TimePeriod.Duration rootDuration = incrementalDurations.get(0);
if (rootDuration == TimePeriod.Duration.MONTHS || rootDuration == TimePeriod.Duration.YEARS) {
throw new SiddhiAppCreationException("A buffer size greater than 0 can be provided, only when the " + "first duration value is seconds, minutes, hours or days");
}
if (!isProcessingOnExternalTime) {
throw new SiddhiAppCreationException("Buffer size cannot be specified when events are aggregated " + "based on event arrival time.");
// Buffer size is used to process out of order events. However, events would never be out of
// order if they are processed based on event arrival time.
}
} else if (bufferSize < 0) {
throw new SiddhiAppCreationException("Expected a positive integer as the buffer size, but found " + bufferSize + " as the provided value");
}
boolean ignoreEventsOlderThanBuffer = false;
element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_IGNORE_EVENTS_OLDER_THAN_BUFFER, null, aggregationDefinition.getAnnotations());
if (element != null) {
if (element.getValue().equalsIgnoreCase("true")) {
ignoreEventsOlderThanBuffer = true;
} else if (!element.getValue().equalsIgnoreCase("false")) {
throw new SiddhiAppCreationException("IgnoreEventsOlderThanBuffer value must " + "be true or false");
}
}
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap = buildIncrementalExecutors(isProcessingOnExternalTime, processedMetaStreamEvent, processExpressionExecutors, groupByKeyGenerator, bufferSize, ignoreEventsOlderThanBuffer, incrementalDurations, aggregationTables, siddhiAppContext, aggregatorName);
// Recreate in-memory data from tables
RecreateInMemoryData recreateInMemoryData = new RecreateInMemoryData(incrementalDurations, aggregationTables, incrementalExecutorMap, siddhiAppContext, processedMetaStreamEvent, tableMap, windowMap, aggregationMap);
IncrementalExecutor rootIncrementalExecutor = incrementalExecutorMap.get(incrementalDurations.get(0));
rootIncrementalExecutor.setScheduler(scheduler);
// Connect entry valve to root incremental executor
entryValveExecutor.setNextExecutor(rootIncrementalExecutor);
QueryParserHelper.initStreamRuntime(streamRuntime, incomingMetaStreamEvent, lockWrapper, aggregatorName);
LatencyTracker latencyTrackerFind = null;
LatencyTracker latencyTrackerInsert = null;
ThroughputTracker throughputTrackerFind = null;
ThroughputTracker throughputTrackerInsert = null;
if (siddhiAppContext.getStatisticsManager() != null) {
latencyTrackerFind = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
latencyTrackerInsert = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
throughputTrackerFind = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
throughputTrackerInsert = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
}
streamRuntime.setCommonProcessor(new IncrementalAggregationProcessor(rootIncrementalExecutor, incomingExpressionExecutors, processedMetaStreamEvent, latencyTrackerInsert, throughputTrackerInsert, siddhiAppContext));
List<ExpressionExecutor> baseExecutors = cloneExpressionExecutors(processExpressionExecutors);
ExpressionExecutor timestampExecutor = baseExecutors.remove(0);
return new AggregationRuntime(aggregationDefinition, incrementalExecutorMap, aggregationTables, ((SingleStreamRuntime) streamRuntime), entryValveExecutor, incrementalDurations, siddhiAppContext, baseExecutors, timestampExecutor, processedMetaStreamEvent, outputExpressionExecutors, latencyTrackerFind, throughputTrackerFind, recreateInMemoryData);
} catch (Throwable t) {
ExceptionUtil.populateQueryContext(t, aggregationDefinition, siddhiAppContext);
throw t;
}
}
use of org.ballerinalang.siddhi.core.query.input.stream.single.SingleStreamRuntime in project ballerina by ballerina-lang.
the class JoinInputStreamParser method parseInputStream.
public static StreamRuntime parseInputStream(JoinInputStream joinInputStream, SiddhiAppContext siddhiAppContext, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, List<VariableExpressionExecutor> executors, LatencyTracker latencyTracker, boolean outputExpectsExpiredEvents, String queryName) {
try {
ProcessStreamReceiver leftProcessStreamReceiver;
ProcessStreamReceiver rightProcessStreamReceiver;
MetaStreamEvent leftMetaStreamEvent = new MetaStreamEvent();
MetaStreamEvent rightMetaStreamEvent = new MetaStreamEvent();
String leftInputStreamId = ((SingleInputStream) joinInputStream.getLeftInputStream()).getStreamId();
String rightInputStreamId = ((SingleInputStream) joinInputStream.getRightInputStream()).getStreamId();
boolean leftOuterJoinProcessor = false;
boolean rightOuterJoinProcessor = false;
if (joinInputStream.getAllStreamIds().size() == 2) {
setEventType(streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, leftMetaStreamEvent, leftInputStreamId);
setEventType(streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, rightMetaStreamEvent, rightInputStreamId);
leftProcessStreamReceiver = new ProcessStreamReceiver(leftInputStreamId, latencyTracker, queryName, siddhiAppContext);
leftProcessStreamReceiver.setBatchProcessingAllowed(leftMetaStreamEvent.getEventType() == WINDOW);
rightProcessStreamReceiver = new ProcessStreamReceiver(rightInputStreamId, latencyTracker, queryName, siddhiAppContext);
rightProcessStreamReceiver.setBatchProcessingAllowed(rightMetaStreamEvent.getEventType() == WINDOW);
if ((leftMetaStreamEvent.getEventType() == TABLE || leftMetaStreamEvent.getEventType() == AGGREGATE) && (rightMetaStreamEvent.getEventType() == TABLE || rightMetaStreamEvent.getEventType() == AGGREGATE)) {
throw new SiddhiAppCreationException("Both inputs of join " + leftInputStreamId + " and " + rightInputStreamId + " are from static sources");
}
if (leftMetaStreamEvent.getEventType() != AGGREGATE && rightMetaStreamEvent.getEventType() != AGGREGATE) {
if (joinInputStream.getPer() != null) {
throw new SiddhiAppCreationException("When joining " + leftInputStreamId + " and " + rightInputStreamId + " 'per' cannot be used as neither of them is an aggregation ");
} else if (joinInputStream.getWithin() != null) {
throw new SiddhiAppCreationException("When joining " + leftInputStreamId + " and " + rightInputStreamId + " 'within' cannot be used as neither of them is an aggregation ");
}
}
} else {
if (windowDefinitionMap.containsKey(joinInputStream.getAllStreamIds().get(0))) {
leftMetaStreamEvent.setEventType(WINDOW);
rightMetaStreamEvent.setEventType(WINDOW);
rightProcessStreamReceiver = new MultiProcessStreamReceiver(joinInputStream.getAllStreamIds().get(0), 1, latencyTracker, queryName, siddhiAppContext);
rightProcessStreamReceiver.setBatchProcessingAllowed(true);
leftProcessStreamReceiver = rightProcessStreamReceiver;
} else if (streamDefinitionMap.containsKey(joinInputStream.getAllStreamIds().get(0))) {
rightProcessStreamReceiver = new MultiProcessStreamReceiver(joinInputStream.getAllStreamIds().get(0), 2, latencyTracker, queryName, siddhiAppContext);
leftProcessStreamReceiver = rightProcessStreamReceiver;
} else {
throw new SiddhiAppCreationException("Input of join is from static source " + leftInputStreamId + " and " + rightInputStreamId);
}
}
SingleStreamRuntime leftStreamRuntime = SingleInputStreamParser.parseInputStream((SingleInputStream) joinInputStream.getLeftInputStream(), siddhiAppContext, executors, streamDefinitionMap, leftMetaStreamEvent.getEventType() != TABLE ? null : tableDefinitionMap, leftMetaStreamEvent.getEventType() != WINDOW ? null : windowDefinitionMap, leftMetaStreamEvent.getEventType() != AGGREGATE ? null : aggregationDefinitionMap, tableMap, leftMetaStreamEvent, leftProcessStreamReceiver, true, outputExpectsExpiredEvents, queryName);
for (VariableExpressionExecutor variableExpressionExecutor : executors) {
variableExpressionExecutor.getPosition()[SiddhiConstants.STREAM_EVENT_CHAIN_INDEX] = 0;
}
int size = executors.size();
SingleStreamRuntime rightStreamRuntime = SingleInputStreamParser.parseInputStream((SingleInputStream) joinInputStream.getRightInputStream(), siddhiAppContext, executors, streamDefinitionMap, rightMetaStreamEvent.getEventType() != TABLE ? null : tableDefinitionMap, rightMetaStreamEvent.getEventType() != WINDOW ? null : windowDefinitionMap, rightMetaStreamEvent.getEventType() != AGGREGATE ? null : aggregationDefinitionMap, tableMap, rightMetaStreamEvent, rightProcessStreamReceiver, true, outputExpectsExpiredEvents, queryName);
for (int i = size; i < executors.size(); i++) {
VariableExpressionExecutor variableExpressionExecutor = executors.get(i);
variableExpressionExecutor.getPosition()[SiddhiConstants.STREAM_EVENT_CHAIN_INDEX] = 1;
}
setStreamRuntimeProcessorChain(leftMetaStreamEvent, leftStreamRuntime, leftInputStreamId, tableMap, windowMap, aggregationMap, executors, outputExpectsExpiredEvents, queryName, joinInputStream.getWithin(), joinInputStream.getPer(), siddhiAppContext, joinInputStream.getLeftInputStream());
setStreamRuntimeProcessorChain(rightMetaStreamEvent, rightStreamRuntime, rightInputStreamId, tableMap, windowMap, aggregationMap, executors, outputExpectsExpiredEvents, queryName, joinInputStream.getWithin(), joinInputStream.getPer(), siddhiAppContext, joinInputStream.getRightInputStream());
MetaStateEvent metaStateEvent = new MetaStateEvent(2);
metaStateEvent.addEvent(leftMetaStreamEvent);
metaStateEvent.addEvent(rightMetaStreamEvent);
switch(joinInputStream.getType()) {
case FULL_OUTER_JOIN:
leftOuterJoinProcessor = true;
rightOuterJoinProcessor = true;
break;
case RIGHT_OUTER_JOIN:
rightOuterJoinProcessor = true;
break;
case LEFT_OUTER_JOIN:
leftOuterJoinProcessor = true;
break;
}
JoinProcessor leftPreJoinProcessor = new JoinProcessor(true, true, leftOuterJoinProcessor, 0);
JoinProcessor leftPostJoinProcessor = new JoinProcessor(true, false, leftOuterJoinProcessor, 0);
FindableProcessor leftFindableProcessor = insertJoinProcessorsAndGetFindable(leftPreJoinProcessor, leftPostJoinProcessor, leftStreamRuntime, siddhiAppContext, outputExpectsExpiredEvents, queryName, joinInputStream.getLeftInputStream());
JoinProcessor rightPreJoinProcessor = new JoinProcessor(false, true, rightOuterJoinProcessor, 1);
JoinProcessor rightPostJoinProcessor = new JoinProcessor(false, false, rightOuterJoinProcessor, 1);
FindableProcessor rightFindableProcessor = insertJoinProcessorsAndGetFindable(rightPreJoinProcessor, rightPostJoinProcessor, rightStreamRuntime, siddhiAppContext, outputExpectsExpiredEvents, queryName, joinInputStream.getRightInputStream());
leftPreJoinProcessor.setFindableProcessor(rightFindableProcessor);
leftPostJoinProcessor.setFindableProcessor(rightFindableProcessor);
rightPreJoinProcessor.setFindableProcessor(leftFindableProcessor);
rightPostJoinProcessor.setFindableProcessor(leftFindableProcessor);
Expression compareCondition = joinInputStream.getOnCompare();
if (compareCondition == null) {
compareCondition = Expression.value(true);
}
MatchingMetaInfoHolder rightMatchingMetaInfoHolder = MatcherParser.constructMatchingMetaStateHolder(metaStateEvent, 0, rightMetaStreamEvent.getLastInputDefinition(), UNKNOWN_STATE);
CompiledCondition leftCompiledCondition = rightFindableProcessor.compileCondition(compareCondition, rightMatchingMetaInfoHolder, siddhiAppContext, executors, tableMap, queryName);
MatchingMetaInfoHolder leftMatchingMetaInfoHolder = MatcherParser.constructMatchingMetaStateHolder(metaStateEvent, 1, leftMetaStreamEvent.getLastInputDefinition(), UNKNOWN_STATE);
CompiledCondition rightCompiledCondition = leftFindableProcessor.compileCondition(compareCondition, leftMatchingMetaInfoHolder, siddhiAppContext, executors, tableMap, queryName);
if (joinInputStream.getTrigger() != JoinInputStream.EventTrigger.LEFT) {
populateJoinProcessors(rightMetaStreamEvent, rightInputStreamId, rightPreJoinProcessor, rightPostJoinProcessor, rightCompiledCondition);
}
if (joinInputStream.getTrigger() != JoinInputStream.EventTrigger.RIGHT) {
populateJoinProcessors(leftMetaStreamEvent, leftInputStreamId, leftPreJoinProcessor, leftPostJoinProcessor, leftCompiledCondition);
}
JoinStreamRuntime joinStreamRuntime = new JoinStreamRuntime(siddhiAppContext, metaStateEvent);
joinStreamRuntime.addRuntime(leftStreamRuntime);
joinStreamRuntime.addRuntime(rightStreamRuntime);
return joinStreamRuntime;
} catch (Throwable t) {
ExceptionUtil.populateQueryContext(t, joinInputStream, siddhiAppContext);
throw t;
}
}
use of org.ballerinalang.siddhi.core.query.input.stream.single.SingleStreamRuntime in project ballerina by ballerina-lang.
the class QueryParserHelper method initStreamRuntime.
public static void initStreamRuntime(StreamRuntime runtime, MetaComplexEvent metaComplexEvent, LockWrapper lockWrapper, String queryName) {
if (runtime instanceof SingleStreamRuntime) {
initSingleStreamRuntime((SingleStreamRuntime) runtime, 0, metaComplexEvent, null, lockWrapper, queryName);
} else {
MetaStateEvent metaStateEvent = (MetaStateEvent) metaComplexEvent;
StateEventPool stateEventPool = new StateEventPool(metaStateEvent, 5);
MetaStreamEvent[] metaStreamEvents = metaStateEvent.getMetaStreamEvents();
for (int i = 0, metaStreamEventsLength = metaStreamEvents.length; i < metaStreamEventsLength; i++) {
initSingleStreamRuntime(runtime.getSingleStreamRuntimes().get(i), i, metaStateEvent, stateEventPool, lockWrapper, queryName);
}
}
}
use of org.ballerinalang.siddhi.core.query.input.stream.single.SingleStreamRuntime in project ballerina by ballerina-lang.
the class SiddhiAppRuntime method debug.
public synchronized SiddhiDebugger debug() {
siddhiDebugger = new SiddhiDebugger(siddhiAppContext);
List<StreamRuntime> streamRuntime = new ArrayList<>();
List<OutputCallback> streamCallbacks = new ArrayList<>();
for (QueryRuntime queryRuntime : queryProcessorMap.values()) {
streamRuntime.add(queryRuntime.getStreamRuntime());
streamCallbacks.add(queryRuntime.getOutputCallback());
}
for (StreamRuntime streamRuntime1 : streamRuntime) {
for (SingleStreamRuntime singleStreamRuntime : streamRuntime1.getSingleStreamRuntimes()) {
singleStreamRuntime.getProcessStreamReceiver().setSiddhiDebugger(siddhiDebugger);
}
}
for (OutputCallback callback : streamCallbacks) {
callback.setSiddhiDebugger(siddhiDebugger);
}
start();
running = true;
return siddhiDebugger;
}
use of org.ballerinalang.siddhi.core.query.input.stream.single.SingleStreamRuntime in project ballerina by ballerina-lang.
the class StateStreamRuntime method clone.
@Override
public StreamRuntime clone(String key) {
StateStreamRuntime stateStreamRuntime = new StateStreamRuntime(siddhiAppContext, metaStateEvent);
stateStreamRuntime.innerStateRuntime = this.innerStateRuntime.clone(key);
for (SingleStreamRuntime singleStreamRuntime : stateStreamRuntime.getSingleStreamRuntimes()) {
ProcessStreamReceiver processStreamReceiver = singleStreamRuntime.getProcessStreamReceiver();
if (processStreamReceiver instanceof SequenceMultiProcessStreamReceiver) {
((SequenceMultiProcessStreamReceiver) processStreamReceiver).setStateStreamRuntime(stateStreamRuntime);
} else if (processStreamReceiver instanceof SequenceSingleProcessStreamReceiver) {
((SequenceSingleProcessStreamReceiver) processStreamReceiver).setStateStreamRuntime(stateStreamRuntime);
}
}
((StreamPreStateProcessor) stateStreamRuntime.innerStateRuntime.getFirstProcessor()).setThisLastProcessor((StreamPostStateProcessor) stateStreamRuntime.innerStateRuntime.getLastProcessor());
return stateStreamRuntime;
}
Aggregations