Search in sources :

Example 1 with AggregationRuntime

use of org.ballerinalang.siddhi.core.aggregation.AggregationRuntime in project ballerina by ballerina-lang.

the class AggregationParser method parse.

public static AggregationRuntime parse(AggregationDefinition aggregationDefinition, SiddhiAppContext siddhiAppContext, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder) {
    if (aggregationDefinition == null) {
        throw new SiddhiAppCreationException("AggregationDefinition instance is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'");
    }
    if (aggregationDefinition.getTimePeriod() == null) {
        throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s timePeriod is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
    }
    if (aggregationDefinition.getSelector() == null) {
        throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s selection is not defined. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
    }
    if (streamDefinitionMap.get(aggregationDefinition.getBasicSingleInputStream().getStreamId()) == null) {
        throw new SiddhiAppCreationException("Stream " + aggregationDefinition.getBasicSingleInputStream().getStreamId() + " has not been defined");
    }
    try {
        List<VariableExpressionExecutor> incomingVariableExpressionExecutors = new ArrayList<>();
        String aggregatorName = aggregationDefinition.getId();
        StreamRuntime streamRuntime = InputStreamParser.parse(aggregationDefinition.getBasicSingleInputStream(), siddhiAppContext, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, windowMap, aggregationMap, incomingVariableExpressionExecutors, null, false, aggregatorName);
        // Get original meta for later use.
        MetaStreamEvent incomingMetaStreamEvent = (MetaStreamEvent) streamRuntime.getMetaComplexEvent();
        // Create new meta stream event.
        // This must hold the timestamp, group by attributes (if given) and the incremental attributes, in
        // onAfterWindowData array
        // Example format: AGG_TIMESTAMP, groupByAttribute1, groupByAttribute2, AGG_incAttribute1, AGG_incAttribute2
        // AGG_incAttribute1, AGG_incAttribute2 would have the same attribute names as in
        // finalListOfIncrementalAttributes
        // To enter data as onAfterWindowData
        incomingMetaStreamEvent.initializeAfterWindowData();
        List<ExpressionExecutor> incomingExpressionExecutors = new ArrayList<>();
        List<IncrementalAttributeAggregator> incrementalAttributeAggregators = new ArrayList<>();
        List<Variable> groupByVariableList = aggregationDefinition.getSelector().getGroupByList();
        boolean isProcessingOnExternalTime = aggregationDefinition.getAggregateAttribute() != null;
        // Expressions to get
        List<Expression> outputExpressions = new ArrayList<>();
        // final aggregate outputs. e.g avg = sum/count
        // Expression executors to get
        List<ExpressionExecutor> outputExpressionExecutors = new ArrayList<>();
        // final aggregate outputs. e.g avg = sum/count
        populateIncomingAggregatorsAndExecutors(aggregationDefinition, siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators, groupByVariableList, outputExpressions);
        int baseAggregatorBeginIndex = incomingMetaStreamEvent.getOutputData().size();
        List<Expression> finalBaseAggregators = getFinalBaseAggregators(siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators);
        StreamDefinition incomingOutputStreamDefinition = StreamDefinition.id("");
        incomingOutputStreamDefinition.setQueryContextStartIndex(aggregationDefinition.getQueryContextStartIndex());
        incomingOutputStreamDefinition.setQueryContextEndIndex(aggregationDefinition.getQueryContextEndIndex());
        MetaStreamEvent processedMetaStreamEvent = new MetaStreamEvent();
        for (Attribute attribute : incomingMetaStreamEvent.getOutputData()) {
            incomingOutputStreamDefinition.attribute(attribute.getName(), attribute.getType());
            processedMetaStreamEvent.addOutputData(attribute);
        }
        incomingMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
        processedMetaStreamEvent.addInputDefinition(incomingOutputStreamDefinition);
        processedMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
        // Executors of processing meta
        List<VariableExpressionExecutor> processVariableExpressionExecutors = new ArrayList<>();
        boolean groupBy = aggregationDefinition.getSelector().getGroupByList().size() != 0;
        List<ExpressionExecutor> processExpressionExecutors = constructProcessExpressionExecutors(siddhiAppContext, tableMap, aggregatorName, baseAggregatorBeginIndex, finalBaseAggregators, incomingOutputStreamDefinition, processedMetaStreamEvent, processVariableExpressionExecutors, groupBy);
        outputExpressionExecutors.addAll(outputExpressions.stream().map(expression -> ExpressionParser.parseExpression(expression, processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, siddhiAppContext, groupBy, 0, aggregatorName)).collect(Collectors.toList()));
        // Create group by key generator
        GroupByKeyGenerator groupByKeyGenerator = null;
        if (groupBy) {
            groupByKeyGenerator = new GroupByKeyGenerator(groupByVariableList, processedMetaStreamEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, processVariableExpressionExecutors, siddhiAppContext, aggregatorName);
        }
        // Create new scheduler
        EntryValveExecutor entryValveExecutor = new EntryValveExecutor(siddhiAppContext);
        LockWrapper lockWrapper = new LockWrapper(aggregatorName);
        lockWrapper.setLock(new ReentrantLock());
        Scheduler scheduler = SchedulerParser.parse(siddhiAppContext.getScheduledExecutorService(), entryValveExecutor, siddhiAppContext);
        scheduler.init(lockWrapper, aggregatorName);
        scheduler.setStreamEventPool(new StreamEventPool(processedMetaStreamEvent, 10));
        QueryParserHelper.reduceMetaComplexEvent(incomingMetaStreamEvent);
        QueryParserHelper.reduceMetaComplexEvent(processedMetaStreamEvent);
        QueryParserHelper.updateVariablePosition(incomingMetaStreamEvent, incomingVariableExpressionExecutors);
        QueryParserHelper.updateVariablePosition(processedMetaStreamEvent, processVariableExpressionExecutors);
        List<TimePeriod.Duration> incrementalDurations = getSortedPeriods(aggregationDefinition.getTimePeriod());
        Map<TimePeriod.Duration, Table> aggregationTables = initDefaultTables(aggregatorName, incrementalDurations, processedMetaStreamEvent.getOutputStreamDefinition(), siddhiAppRuntimeBuilder, aggregationDefinition.getAnnotations(), groupByVariableList);
        int bufferSize = 0;
        Element element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_BUFFER_SIZE, null, aggregationDefinition.getAnnotations());
        if (element != null) {
            try {
                bufferSize = Integer.parseInt(element.getValue());
            } catch (NumberFormatException e) {
                throw new SiddhiAppCreationException(e.getMessage() + ": BufferSize must be an integer");
            }
        }
        if (bufferSize > 0) {
            TimePeriod.Duration rootDuration = incrementalDurations.get(0);
            if (rootDuration == TimePeriod.Duration.MONTHS || rootDuration == TimePeriod.Duration.YEARS) {
                throw new SiddhiAppCreationException("A buffer size greater than 0 can be provided, only when the " + "first duration value is seconds, minutes, hours or days");
            }
            if (!isProcessingOnExternalTime) {
                throw new SiddhiAppCreationException("Buffer size cannot be specified when events are aggregated " + "based on event arrival time.");
            // Buffer size is used to process out of order events. However, events would never be out of
            // order if they are processed based on event arrival time.
            }
        } else if (bufferSize < 0) {
            throw new SiddhiAppCreationException("Expected a positive integer as the buffer size, but found " + bufferSize + " as the provided value");
        }
        boolean ignoreEventsOlderThanBuffer = false;
        element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_IGNORE_EVENTS_OLDER_THAN_BUFFER, null, aggregationDefinition.getAnnotations());
        if (element != null) {
            if (element.getValue().equalsIgnoreCase("true")) {
                ignoreEventsOlderThanBuffer = true;
            } else if (!element.getValue().equalsIgnoreCase("false")) {
                throw new SiddhiAppCreationException("IgnoreEventsOlderThanBuffer value must " + "be true or false");
            }
        }
        Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap = buildIncrementalExecutors(isProcessingOnExternalTime, processedMetaStreamEvent, processExpressionExecutors, groupByKeyGenerator, bufferSize, ignoreEventsOlderThanBuffer, incrementalDurations, aggregationTables, siddhiAppContext, aggregatorName);
        // Recreate in-memory data from tables
        RecreateInMemoryData recreateInMemoryData = new RecreateInMemoryData(incrementalDurations, aggregationTables, incrementalExecutorMap, siddhiAppContext, processedMetaStreamEvent, tableMap, windowMap, aggregationMap);
        IncrementalExecutor rootIncrementalExecutor = incrementalExecutorMap.get(incrementalDurations.get(0));
        rootIncrementalExecutor.setScheduler(scheduler);
        // Connect entry valve to root incremental executor
        entryValveExecutor.setNextExecutor(rootIncrementalExecutor);
        QueryParserHelper.initStreamRuntime(streamRuntime, incomingMetaStreamEvent, lockWrapper, aggregatorName);
        LatencyTracker latencyTrackerFind = null;
        LatencyTracker latencyTrackerInsert = null;
        ThroughputTracker throughputTrackerFind = null;
        ThroughputTracker throughputTrackerInsert = null;
        if (siddhiAppContext.getStatisticsManager() != null) {
            latencyTrackerFind = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
            latencyTrackerInsert = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
            throughputTrackerFind = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
            throughputTrackerInsert = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
        }
        streamRuntime.setCommonProcessor(new IncrementalAggregationProcessor(rootIncrementalExecutor, incomingExpressionExecutors, processedMetaStreamEvent, latencyTrackerInsert, throughputTrackerInsert, siddhiAppContext));
        List<ExpressionExecutor> baseExecutors = cloneExpressionExecutors(processExpressionExecutors);
        ExpressionExecutor timestampExecutor = baseExecutors.remove(0);
        return new AggregationRuntime(aggregationDefinition, incrementalExecutorMap, aggregationTables, ((SingleStreamRuntime) streamRuntime), entryValveExecutor, incrementalDurations, siddhiAppContext, baseExecutors, timestampExecutor, processedMetaStreamEvent, outputExpressionExecutors, latencyTrackerFind, throughputTrackerFind, recreateInMemoryData);
    } catch (Throwable t) {
        ExceptionUtil.populateQueryContext(t, aggregationDefinition, siddhiAppContext);
        throw t;
    }
}
Also used : Variable(org.ballerinalang.siddhi.query.api.expression.Variable) Attribute(org.ballerinalang.siddhi.query.api.definition.Attribute) OutputAttribute(org.ballerinalang.siddhi.query.api.execution.query.selection.OutputAttribute) Scheduler(org.ballerinalang.siddhi.core.util.Scheduler) GroupByKeyGenerator(org.ballerinalang.siddhi.core.query.selector.GroupByKeyGenerator) Element(org.ballerinalang.siddhi.query.api.annotation.Element) ArrayList(java.util.ArrayList) IncrementalExecutor(org.ballerinalang.siddhi.core.aggregation.IncrementalExecutor) StreamEventPool(org.ballerinalang.siddhi.core.event.stream.StreamEventPool) StreamRuntime(org.ballerinalang.siddhi.core.query.input.stream.StreamRuntime) SingleStreamRuntime(org.ballerinalang.siddhi.core.query.input.stream.single.SingleStreamRuntime) IncrementalAggregationProcessor(org.ballerinalang.siddhi.core.aggregation.IncrementalAggregationProcessor) AggregationRuntime(org.ballerinalang.siddhi.core.aggregation.AggregationRuntime) ReentrantLock(java.util.concurrent.locks.ReentrantLock) ThroughputTracker(org.ballerinalang.siddhi.core.util.statistics.ThroughputTracker) Table(org.ballerinalang.siddhi.core.table.Table) ExpressionExecutor(org.ballerinalang.siddhi.core.executor.ExpressionExecutor) VariableExpressionExecutor(org.ballerinalang.siddhi.core.executor.VariableExpressionExecutor) StreamDefinition(org.ballerinalang.siddhi.query.api.definition.StreamDefinition) SiddhiAppCreationException(org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException) TimePeriod(org.ballerinalang.siddhi.query.api.aggregation.TimePeriod) SingleStreamRuntime(org.ballerinalang.siddhi.core.query.input.stream.single.SingleStreamRuntime) VariableExpressionExecutor(org.ballerinalang.siddhi.core.executor.VariableExpressionExecutor) EntryValveExecutor(org.ballerinalang.siddhi.core.query.input.stream.single.EntryValveExecutor) LockWrapper(org.ballerinalang.siddhi.core.util.lock.LockWrapper) Expression(org.ballerinalang.siddhi.query.api.expression.Expression) IncrementalAttributeAggregator(org.ballerinalang.siddhi.core.query.selector.attribute.aggregator.incremental.IncrementalAttributeAggregator) RecreateInMemoryData(org.ballerinalang.siddhi.core.aggregation.RecreateInMemoryData) LatencyTracker(org.ballerinalang.siddhi.core.util.statistics.LatencyTracker) MetaStreamEvent(org.ballerinalang.siddhi.core.event.stream.MetaStreamEvent)

Example 2 with AggregationRuntime

use of org.ballerinalang.siddhi.core.aggregation.AggregationRuntime in project ballerina by ballerina-lang.

the class JoinInputStreamParser method setStreamRuntimeProcessorChain.

private static void setStreamRuntimeProcessorChain(MetaStreamEvent metaStreamEvent, SingleStreamRuntime streamRuntime, String inputStreamId, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, List<VariableExpressionExecutor> variableExpressionExecutors, boolean outputExpectsExpiredEvents, String queryName, Within within, Expression per, SiddhiAppContext siddhiAppContext, InputStream inputStream) {
    switch(metaStreamEvent.getEventType()) {
        case TABLE:
            TableWindowProcessor tableWindowProcessor = new TableWindowProcessor(tableMap.get(inputStreamId));
            tableWindowProcessor.initProcessor(metaStreamEvent.getLastInputDefinition(), new ExpressionExecutor[0], null, siddhiAppContext, outputExpectsExpiredEvents, queryName, inputStream);
            streamRuntime.setProcessorChain(tableWindowProcessor);
            break;
        case WINDOW:
            WindowWindowProcessor windowWindowProcessor = new WindowWindowProcessor(windowMap.get(inputStreamId));
            windowWindowProcessor.initProcessor(metaStreamEvent.getLastInputDefinition(), variableExpressionExecutors.toArray(new ExpressionExecutor[0]), null, siddhiAppContext, outputExpectsExpiredEvents, queryName, inputStream);
            streamRuntime.setProcessorChain(windowWindowProcessor);
            break;
        case AGGREGATE:
            AggregationRuntime aggregationRuntime = aggregationMap.get(inputStreamId);
            AggregateWindowProcessor aggregateWindowProcessor = new AggregateWindowProcessor(aggregationRuntime, within, per);
            aggregateWindowProcessor.initProcessor(metaStreamEvent.getLastInputDefinition(), variableExpressionExecutors.toArray(new ExpressionExecutor[0]), null, siddhiAppContext, outputExpectsExpiredEvents, queryName, inputStream);
            streamRuntime.setProcessorChain(aggregateWindowProcessor);
            break;
        case DEFAULT:
            break;
    }
}
Also used : AggregateWindowProcessor(org.ballerinalang.siddhi.core.query.processor.stream.window.AggregateWindowProcessor) TableWindowProcessor(org.ballerinalang.siddhi.core.query.processor.stream.window.TableWindowProcessor) ConstantExpressionExecutor(org.ballerinalang.siddhi.core.executor.ConstantExpressionExecutor) ExpressionExecutor(org.ballerinalang.siddhi.core.executor.ExpressionExecutor) VariableExpressionExecutor(org.ballerinalang.siddhi.core.executor.VariableExpressionExecutor) WindowWindowProcessor(org.ballerinalang.siddhi.core.query.processor.stream.window.WindowWindowProcessor) AggregationRuntime(org.ballerinalang.siddhi.core.aggregation.AggregationRuntime)

Example 3 with AggregationRuntime

use of org.ballerinalang.siddhi.core.aggregation.AggregationRuntime in project ballerina by ballerina-lang.

the class StoreQueryParser method parse.

/**
 * Parse a storeQuery and return corresponding StoreQueryRuntime.
 *
 * @param storeQuery       storeQuery to be parsed.
 * @param siddhiAppContext associated Siddhi app context.
 * @param tableMap         keyvalue containing tables.
 * @param windowMap        keyvalue containing windows.
 * @param aggregationMap   keyvalue containing aggregation runtimes.
 * @return StoreQueryRuntime
 */
public static StoreQueryRuntime parse(StoreQuery storeQuery, SiddhiAppContext siddhiAppContext, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap) {
    String queryName = "store_query_" + storeQuery.getInputStore().getStoreId();
    InputStore inputStore = storeQuery.getInputStore();
    int metaPosition = SiddhiConstants.UNKNOWN_STATE;
    Within within = null;
    Expression per = null;
    try {
        SnapshotService.getSkipSnapshotableThreadLocal().set(true);
        Expression onCondition = Expression.value(true);
        MetaStreamEvent metaStreamEvent = new MetaStreamEvent();
        metaStreamEvent.setInputReferenceId(inputStore.getStoreReferenceId());
        if (inputStore instanceof AggregationInputStore) {
            AggregationInputStore aggregationInputStore = (AggregationInputStore) inputStore;
            if (aggregationMap.get(inputStore.getStoreId()) == null) {
                throw new StoreQueryCreationException("Aggregation \"" + inputStore.getStoreId() + "\" has not been defined");
            }
            if (aggregationInputStore.getPer() != null && aggregationInputStore.getWithin() != null) {
                within = aggregationInputStore.getWithin();
                per = aggregationInputStore.getPer();
            } else if (aggregationInputStore.getPer() != null || aggregationInputStore.getWithin() != null) {
                throw new StoreQueryCreationException(inputStore.getStoreId() + " should either have both 'within' and 'per' defined or none.");
            }
            if (((AggregationInputStore) inputStore).getOnCondition() != null) {
                onCondition = ((AggregationInputStore) inputStore).getOnCondition();
            }
        } else if (inputStore instanceof ConditionInputStore) {
            if (((ConditionInputStore) inputStore).getOnCondition() != null) {
                onCondition = ((ConditionInputStore) inputStore).getOnCondition();
            }
        }
        List<VariableExpressionExecutor> variableExpressionExecutors = new ArrayList<>();
        Table table = tableMap.get(inputStore.getStoreId());
        if (table != null) {
            return constructStoreQueryRuntime(table, storeQuery, siddhiAppContext, tableMap, queryName, metaPosition, onCondition, metaStreamEvent, variableExpressionExecutors);
        } else {
            AggregationRuntime aggregation = aggregationMap.get(inputStore.getStoreId());
            if (aggregation != null) {
                return constructStoreQueryRuntime(aggregation, storeQuery, siddhiAppContext, tableMap, queryName, within, per, onCondition, metaStreamEvent, variableExpressionExecutors);
            } else {
                Window window = windowMap.get(inputStore.getStoreId());
                if (window != null) {
                    return constructStoreQueryRuntime(window, storeQuery, siddhiAppContext, tableMap, queryName, metaPosition, onCondition, metaStreamEvent, variableExpressionExecutors);
                } else {
                    throw new StoreQueryCreationException(inputStore.getStoreId() + " is neither a table, aggregation or window");
                }
            }
        }
    } finally {
        SnapshotService.getSkipSnapshotableThreadLocal().set(null);
    }
}
Also used : Window(org.ballerinalang.siddhi.core.window.Window) Table(org.ballerinalang.siddhi.core.table.Table) VariableExpressionExecutor(org.ballerinalang.siddhi.core.executor.VariableExpressionExecutor) ArrayList(java.util.ArrayList) AggregationInputStore(org.ballerinalang.siddhi.query.api.execution.query.input.store.AggregationInputStore) Expression(org.ballerinalang.siddhi.query.api.expression.Expression) ConditionInputStore(org.ballerinalang.siddhi.query.api.execution.query.input.store.ConditionInputStore) AggregationInputStore(org.ballerinalang.siddhi.query.api.execution.query.input.store.AggregationInputStore) InputStore(org.ballerinalang.siddhi.query.api.execution.query.input.store.InputStore) ConditionInputStore(org.ballerinalang.siddhi.query.api.execution.query.input.store.ConditionInputStore) Within(org.ballerinalang.siddhi.query.api.aggregation.Within) StoreQueryCreationException(org.ballerinalang.siddhi.core.exception.StoreQueryCreationException) MetaStreamEvent(org.ballerinalang.siddhi.core.event.stream.MetaStreamEvent) AggregationRuntime(org.ballerinalang.siddhi.core.aggregation.AggregationRuntime)

Example 4 with AggregationRuntime

use of org.ballerinalang.siddhi.core.aggregation.AggregationRuntime in project ballerina by ballerina-lang.

the class EventTestCase method testQueryParser.

@Test
public void testQueryParser() {
    StreamDefinition streamDefinition = StreamDefinition.id("cseEventStream").attribute("symbol", Attribute.Type.STRING).attribute("price", Attribute.Type.FLOAT).attribute("volume", Attribute.Type.INT);
    StreamDefinition outStreamDefinition = StreamDefinition.id("outputStream").attribute("symbol", Attribute.Type.STRING).attribute("price", Attribute.Type.FLOAT);
    Query query = new Query();
    query.annotation(Annotation.annotation("info").element("name", "query1"));
    query.from(InputStream.stream("cseEventStream").filter(Expression.compare(Expression.variable("volume"), Compare.Operator.NOT_EQUAL, Expression.value(50))));
    query.select(Selector.selector().select("symbol", Expression.variable("symbol")).select("price", Expression.variable("price")));
    query.insertInto("outputStream");
    Map<String, AbstractDefinition> tableDefinitionMap = new HashMap<>();
    Map<String, AbstractDefinition> windowDefinitionMap = new HashMap<>();
    Map<String, AbstractDefinition> aggregationDefinitionMap = new HashMap<>();
    Map<String, Table> tableMap = new HashMap<String, Table>();
    Map<String, Window> eventWindowMap = new HashMap<String, Window>();
    Map<String, AggregationRuntime> aggregationMap = new HashMap<String, AggregationRuntime>();
    Map<String, List<Source>> eventSourceMap = new HashMap<String, List<Source>>();
    Map<String, List<Sink>> eventSinkMap = new HashMap<String, List<Sink>>();
    Map<String, AbstractDefinition> streamDefinitionMap = new HashMap<String, AbstractDefinition>();
    LockSynchronizer lockSynchronizer = new LockSynchronizer();
    streamDefinitionMap.put("cseEventStream", streamDefinition);
    streamDefinitionMap.put("outputStream", outStreamDefinition);
    SiddhiContext siddhicontext = new SiddhiContext();
    SiddhiAppContext context = new SiddhiAppContext();
    context.setSiddhiContext(siddhicontext);
    context.setElementIdGenerator(new ElementIdGenerator(context.getName()));
    context.setSnapshotService(new SnapshotService(context));
    QueryRuntime runtime = QueryParser.parse(query, context, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, aggregationMap, eventWindowMap, lockSynchronizer, "1");
    AssertJUnit.assertNotNull(runtime);
    AssertJUnit.assertTrue(runtime.getStreamRuntime() instanceof SingleStreamRuntime);
    AssertJUnit.assertNotNull(runtime.getSelector());
    AssertJUnit.assertTrue(runtime.getMetaComplexEvent() instanceof MetaStreamEvent);
}
Also used : Query(org.ballerinalang.siddhi.query.api.execution.query.Query) HashMap(java.util.HashMap) ElementIdGenerator(org.ballerinalang.siddhi.core.util.ElementIdGenerator) Source(org.ballerinalang.siddhi.core.stream.input.source.Source) SiddhiContext(org.ballerinalang.siddhi.core.config.SiddhiContext) Sink(org.ballerinalang.siddhi.core.stream.output.sink.Sink) QueryRuntime(org.ballerinalang.siddhi.core.query.QueryRuntime) LockSynchronizer(org.ballerinalang.siddhi.core.util.lock.LockSynchronizer) List(java.util.List) AggregationRuntime(org.ballerinalang.siddhi.core.aggregation.AggregationRuntime) Window(org.ballerinalang.siddhi.core.window.Window) SnapshotService(org.ballerinalang.siddhi.core.util.snapshot.SnapshotService) Table(org.ballerinalang.siddhi.core.table.Table) StreamDefinition(org.ballerinalang.siddhi.query.api.definition.StreamDefinition) SingleStreamRuntime(org.ballerinalang.siddhi.core.query.input.stream.single.SingleStreamRuntime) AbstractDefinition(org.ballerinalang.siddhi.query.api.definition.AbstractDefinition) SiddhiAppContext(org.ballerinalang.siddhi.core.config.SiddhiAppContext) MetaStreamEvent(org.ballerinalang.siddhi.core.event.stream.MetaStreamEvent) Test(org.testng.annotations.Test)

Example 5 with AggregationRuntime

use of org.ballerinalang.siddhi.core.aggregation.AggregationRuntime in project ballerina by ballerina-lang.

the class SiddhiAppRuntimeBuilder method defineAggregation.

public void defineAggregation(AggregationDefinition aggregationDefinition) {
    AggregationRuntime aggregationRuntime = AggregationParser.parse(aggregationDefinition, siddhiAppContext, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, windowMap, aggregationMap, this);
    DefinitionParserHelper.validateDefinition(aggregationDefinition, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap);
    aggregationDefinitionMap.putIfAbsent(aggregationDefinition.getId(), aggregationDefinition);
    ProcessStreamReceiver processStreamReceiver = aggregationRuntime.getSingleStreamRuntime().getProcessStreamReceiver();
    streamJunctionMap.get(processStreamReceiver.getStreamId()).subscribe(processStreamReceiver);
    aggregationMap.putIfAbsent(aggregationDefinition.getId(), aggregationRuntime);
}
Also used : ProcessStreamReceiver(org.ballerinalang.siddhi.core.query.input.ProcessStreamReceiver) AggregationRuntime(org.ballerinalang.siddhi.core.aggregation.AggregationRuntime)

Aggregations

AggregationRuntime (org.ballerinalang.siddhi.core.aggregation.AggregationRuntime)5 MetaStreamEvent (org.ballerinalang.siddhi.core.event.stream.MetaStreamEvent)3 VariableExpressionExecutor (org.ballerinalang.siddhi.core.executor.VariableExpressionExecutor)3 Table (org.ballerinalang.siddhi.core.table.Table)3 ArrayList (java.util.ArrayList)2 ExpressionExecutor (org.ballerinalang.siddhi.core.executor.ExpressionExecutor)2 SingleStreamRuntime (org.ballerinalang.siddhi.core.query.input.stream.single.SingleStreamRuntime)2 Window (org.ballerinalang.siddhi.core.window.Window)2 StreamDefinition (org.ballerinalang.siddhi.query.api.definition.StreamDefinition)2 Expression (org.ballerinalang.siddhi.query.api.expression.Expression)2 HashMap (java.util.HashMap)1 List (java.util.List)1 ReentrantLock (java.util.concurrent.locks.ReentrantLock)1 IncrementalAggregationProcessor (org.ballerinalang.siddhi.core.aggregation.IncrementalAggregationProcessor)1 IncrementalExecutor (org.ballerinalang.siddhi.core.aggregation.IncrementalExecutor)1 RecreateInMemoryData (org.ballerinalang.siddhi.core.aggregation.RecreateInMemoryData)1 SiddhiAppContext (org.ballerinalang.siddhi.core.config.SiddhiAppContext)1 SiddhiContext (org.ballerinalang.siddhi.core.config.SiddhiContext)1 StreamEventPool (org.ballerinalang.siddhi.core.event.stream.StreamEventPool)1 SiddhiAppCreationException (org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException)1