use of org.wso2.siddhi.query.api.definition.AggregationDefinition in project siddhi by wso2.
the class AggregationParser method parse.
public static AggregationRuntime parse(AggregationDefinition aggregationDefinition, SiddhiAppContext siddhiAppContext, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder) {
if (aggregationDefinition == null) {
throw new SiddhiAppCreationException("AggregationDefinition instance is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'");
}
if (aggregationDefinition.getTimePeriod() == null) {
throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s timePeriod is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (aggregationDefinition.getSelector() == null) {
throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s selection is not defined. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (streamDefinitionMap.get(aggregationDefinition.getBasicSingleInputStream().getStreamId()) == null) {
throw new SiddhiAppCreationException("Stream " + aggregationDefinition.getBasicSingleInputStream().getStreamId() + " has not been defined");
}
try {
List<VariableExpressionExecutor> incomingVariableExpressionExecutors = new ArrayList<>();
String aggregatorName = aggregationDefinition.getId();
StreamRuntime streamRuntime = InputStreamParser.parse(aggregationDefinition.getBasicSingleInputStream(), siddhiAppContext, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, windowMap, aggregationMap, incomingVariableExpressionExecutors, null, false, aggregatorName);
// Get original meta for later use.
MetaStreamEvent incomingMetaStreamEvent = (MetaStreamEvent) streamRuntime.getMetaComplexEvent();
// Create new meta stream event.
// This must hold the timestamp, group by attributes (if given) and the incremental attributes, in
// onAfterWindowData array
// Example format: AGG_TIMESTAMP, groupByAttribute1, groupByAttribute2, AGG_incAttribute1, AGG_incAttribute2
// AGG_incAttribute1, AGG_incAttribute2 would have the same attribute names as in
// finalListOfIncrementalAttributes
// To enter data as onAfterWindowData
incomingMetaStreamEvent.initializeAfterWindowData();
List<ExpressionExecutor> incomingExpressionExecutors = new ArrayList<>();
List<IncrementalAttributeAggregator> incrementalAttributeAggregators = new ArrayList<>();
List<Variable> groupByVariableList = aggregationDefinition.getSelector().getGroupByList();
boolean isProcessingOnExternalTime = aggregationDefinition.getAggregateAttribute() != null;
// Expressions to get
List<Expression> outputExpressions = new ArrayList<>();
// final aggregate outputs. e.g avg = sum/count
// Expression executors to get
List<ExpressionExecutor> outputExpressionExecutors = new ArrayList<>();
// final aggregate outputs. e.g avg = sum/count
populateIncomingAggregatorsAndExecutors(aggregationDefinition, siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators, groupByVariableList, outputExpressions);
int baseAggregatorBeginIndex = incomingMetaStreamEvent.getOutputData().size();
List<Expression> finalBaseAggregators = getFinalBaseAggregators(siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators);
StreamDefinition incomingOutputStreamDefinition = StreamDefinition.id("");
incomingOutputStreamDefinition.setQueryContextStartIndex(aggregationDefinition.getQueryContextStartIndex());
incomingOutputStreamDefinition.setQueryContextEndIndex(aggregationDefinition.getQueryContextEndIndex());
MetaStreamEvent processedMetaStreamEvent = new MetaStreamEvent();
for (Attribute attribute : incomingMetaStreamEvent.getOutputData()) {
incomingOutputStreamDefinition.attribute(attribute.getName(), attribute.getType());
processedMetaStreamEvent.addOutputData(attribute);
}
incomingMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.addInputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
// Executors of processing meta
List<VariableExpressionExecutor> processVariableExpressionExecutors = new ArrayList<>();
boolean groupBy = aggregationDefinition.getSelector().getGroupByList().size() != 0;
List<ExpressionExecutor> processExpressionExecutors = constructProcessExpressionExecutors(siddhiAppContext, tableMap, aggregatorName, baseAggregatorBeginIndex, finalBaseAggregators, incomingOutputStreamDefinition, processedMetaStreamEvent, processVariableExpressionExecutors, groupBy);
outputExpressionExecutors.addAll(outputExpressions.stream().map(expression -> ExpressionParser.parseExpression(expression, processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, siddhiAppContext, groupBy, 0, aggregatorName)).collect(Collectors.toList()));
// Create group by key generator
GroupByKeyGenerator groupByKeyGenerator = null;
if (groupBy) {
groupByKeyGenerator = new GroupByKeyGenerator(groupByVariableList, processedMetaStreamEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, processVariableExpressionExecutors, siddhiAppContext, aggregatorName);
}
// Create new scheduler
EntryValveExecutor entryValveExecutor = new EntryValveExecutor(siddhiAppContext);
LockWrapper lockWrapper = new LockWrapper(aggregatorName);
lockWrapper.setLock(new ReentrantLock());
Scheduler scheduler = SchedulerParser.parse(siddhiAppContext.getScheduledExecutorService(), entryValveExecutor, siddhiAppContext);
scheduler.init(lockWrapper, aggregatorName);
scheduler.setStreamEventPool(new StreamEventPool(processedMetaStreamEvent, 10));
QueryParserHelper.reduceMetaComplexEvent(incomingMetaStreamEvent);
QueryParserHelper.reduceMetaComplexEvent(processedMetaStreamEvent);
QueryParserHelper.updateVariablePosition(incomingMetaStreamEvent, incomingVariableExpressionExecutors);
QueryParserHelper.updateVariablePosition(processedMetaStreamEvent, processVariableExpressionExecutors);
List<TimePeriod.Duration> incrementalDurations = getSortedPeriods(aggregationDefinition.getTimePeriod());
Map<TimePeriod.Duration, Table> aggregationTables = initDefaultTables(aggregatorName, incrementalDurations, processedMetaStreamEvent.getOutputStreamDefinition(), siddhiAppRuntimeBuilder, aggregationDefinition.getAnnotations(), groupByVariableList);
int bufferSize = 0;
Element element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_BUFFER_SIZE, null, aggregationDefinition.getAnnotations());
if (element != null) {
try {
bufferSize = Integer.parseInt(element.getValue());
} catch (NumberFormatException e) {
throw new SiddhiAppCreationException(e.getMessage() + ": BufferSize must be an integer");
}
}
if (bufferSize > 0) {
TimePeriod.Duration rootDuration = incrementalDurations.get(0);
if (rootDuration == TimePeriod.Duration.MONTHS || rootDuration == TimePeriod.Duration.YEARS) {
throw new SiddhiAppCreationException("A buffer size greater than 0 can be provided, only when the " + "first duration value is seconds, minutes, hours or days");
}
if (!isProcessingOnExternalTime) {
throw new SiddhiAppCreationException("Buffer size cannot be specified when events are aggregated " + "based on event arrival time.");
// Buffer size is used to process out of order events. However, events would never be out of
// order if they are processed based on event arrival time.
}
} else if (bufferSize < 0) {
throw new SiddhiAppCreationException("Expected a positive integer as the buffer size, but found " + bufferSize + " as the provided value");
}
boolean ignoreEventsOlderThanBuffer = false;
element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_IGNORE_EVENTS_OLDER_THAN_BUFFER, null, aggregationDefinition.getAnnotations());
if (element != null) {
if (element.getValue().equalsIgnoreCase("true")) {
ignoreEventsOlderThanBuffer = true;
} else if (!element.getValue().equalsIgnoreCase("false")) {
throw new SiddhiAppCreationException("IgnoreEventsOlderThanBuffer value must " + "be true or false");
}
}
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap = buildIncrementalExecutors(isProcessingOnExternalTime, processedMetaStreamEvent, processExpressionExecutors, groupByKeyGenerator, bufferSize, ignoreEventsOlderThanBuffer, incrementalDurations, aggregationTables, siddhiAppContext, aggregatorName);
// Recreate in-memory data from tables
RecreateInMemoryData recreateInMemoryData = new RecreateInMemoryData(incrementalDurations, aggregationTables, incrementalExecutorMap, siddhiAppContext, processedMetaStreamEvent, tableMap, windowMap, aggregationMap);
IncrementalExecutor rootIncrementalExecutor = incrementalExecutorMap.get(incrementalDurations.get(0));
rootIncrementalExecutor.setScheduler(scheduler);
// Connect entry valve to root incremental executor
entryValveExecutor.setNextExecutor(rootIncrementalExecutor);
QueryParserHelper.initStreamRuntime(streamRuntime, incomingMetaStreamEvent, lockWrapper, aggregatorName);
LatencyTracker latencyTrackerFind = null;
LatencyTracker latencyTrackerInsert = null;
ThroughputTracker throughputTrackerFind = null;
ThroughputTracker throughputTrackerInsert = null;
if (siddhiAppContext.getStatisticsManager() != null) {
latencyTrackerFind = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
latencyTrackerInsert = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
throughputTrackerFind = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
throughputTrackerInsert = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
}
streamRuntime.setCommonProcessor(new IncrementalAggregationProcessor(rootIncrementalExecutor, incomingExpressionExecutors, processedMetaStreamEvent, latencyTrackerInsert, throughputTrackerInsert, siddhiAppContext));
List<ExpressionExecutor> baseExecutors = cloneExpressionExecutors(processExpressionExecutors);
ExpressionExecutor timestampExecutor = baseExecutors.remove(0);
return new AggregationRuntime(aggregationDefinition, incrementalExecutorMap, aggregationTables, ((SingleStreamRuntime) streamRuntime), entryValveExecutor, incrementalDurations, siddhiAppContext, baseExecutors, timestampExecutor, processedMetaStreamEvent, outputExpressionExecutors, latencyTrackerFind, throughputTrackerFind, recreateInMemoryData);
} catch (Throwable t) {
ExceptionUtil.populateQueryContext(t, aggregationDefinition, siddhiAppContext);
throw t;
}
}
use of org.wso2.siddhi.query.api.definition.AggregationDefinition in project siddhi by wso2.
the class SingleInputStreamParser method initMetaStreamEvent.
/**
* Method to generate MetaStreamEvent reagent to the given input stream. Empty definition will be created and
* definition and reference is will be set accordingly in this method.
*
* @param inputStream InputStream
* @param streamDefinitionMap StreamDefinition Map
* @param tableDefinitionMap TableDefinition Map
* @param aggregationDefinitionMap AggregationDefinition Map
* @param metaStreamEvent MetaStreamEvent
*/
private static void initMetaStreamEvent(SingleInputStream inputStream, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, MetaStreamEvent metaStreamEvent) {
String streamId = inputStream.getStreamId();
if (!inputStream.isInnerStream() && windowDefinitionMap != null && windowDefinitionMap.containsKey(streamId)) {
AbstractDefinition inputDefinition = windowDefinitionMap.get(streamId);
if (!metaStreamEvent.getInputDefinitions().contains(inputDefinition)) {
metaStreamEvent.addInputDefinition(inputDefinition);
}
} else if (streamDefinitionMap != null && streamDefinitionMap.containsKey(streamId)) {
AbstractDefinition inputDefinition = streamDefinitionMap.get(streamId);
metaStreamEvent.addInputDefinition(inputDefinition);
} else if (!inputStream.isInnerStream() && tableDefinitionMap != null && tableDefinitionMap.containsKey(streamId)) {
AbstractDefinition inputDefinition = tableDefinitionMap.get(streamId);
metaStreamEvent.addInputDefinition(inputDefinition);
} else if (!inputStream.isInnerStream() && aggregationDefinitionMap != null && aggregationDefinitionMap.containsKey(streamId)) {
AbstractDefinition inputDefinition = aggregationDefinitionMap.get(streamId);
metaStreamEvent.addInputDefinition(inputDefinition);
} else {
throw new SiddhiAppCreationException("Stream/table/window/aggregation definition with ID '" + inputStream.getStreamId() + "' has not been defined", inputStream.getQueryContextStartIndex(), inputStream.getQueryContextEndIndex());
}
if ((inputStream.getStreamReferenceId() != null) && !(inputStream.getStreamId()).equals(inputStream.getStreamReferenceId())) {
// if ref id is provided
metaStreamEvent.setInputReferenceId(inputStream.getStreamReferenceId());
}
}
use of org.wso2.siddhi.query.api.definition.AggregationDefinition in project siddhi by wso2.
the class DefinitionParserHelper method validateDefinition.
public static void validateDefinition(AbstractDefinition definition, ConcurrentMap<String, AbstractDefinition> streamDefinitionMap, ConcurrentMap<String, AbstractDefinition> tableDefinitionMap, ConcurrentMap<String, AbstractDefinition> windowDefinitionMap, ConcurrentMap<String, AbstractDefinition> aggregationDefinitionMap) {
AbstractDefinition existingTableDefinition = tableDefinitionMap.get(definition.getId());
if (existingTableDefinition != null && (!existingTableDefinition.equals(definition) || definition instanceof StreamDefinition)) {
throw new DuplicateDefinitionException("Table Definition with same Stream Id '" + definition.getId() + "' already exist : " + existingTableDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
AbstractDefinition existingStreamDefinition = streamDefinitionMap.get(definition.getId());
if (existingStreamDefinition != null && (!existingStreamDefinition.equals(definition) || definition instanceof TableDefinition)) {
throw new DuplicateDefinitionException("Stream Definition with same Stream Id '" + definition.getId() + "' already exist : " + existingStreamDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
AbstractDefinition existingWindowDefinition = windowDefinitionMap.get(definition.getId());
if (existingWindowDefinition != null && (!existingWindowDefinition.equals(definition) || definition instanceof WindowDefinition)) {
throw new DuplicateDefinitionException("Window Definition with same Window Id '" + definition.getId() + "' already exist : " + existingWindowDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
AbstractDefinition existingAggregationDefinition = aggregationDefinitionMap.get(definition.getId());
if (existingAggregationDefinition != null && (!existingAggregationDefinition.equals(definition) || definition instanceof AggregationDefinition)) {
throw new DuplicateDefinitionException("Aggregation Definition with same Aggregation Id '" + definition.getId() + "' already exist : " + existingWindowDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
}
use of org.wso2.siddhi.query.api.definition.AggregationDefinition in project siddhi by wso2.
the class DefineAggregationTestCase method test2.
@Test
public void test2() throws SiddhiParserException {
AggregationDefinition aggregationDefinitionQuery = SiddhiCompiler.parseAggregationDefinition("define aggregation StockAggregationDefinition " + "from StockStream " + "select StockStream.timestamp, StockStream.symbol as symbol, " + " StockStream.price as price " + " group by StockStream.symbol " + "aggregate by timestamp " + "every seconds, minutes, hours ;");
AggregationDefinition aggregationDefinition = AggregationDefinition.id("StockAggregationDefinition").from(InputStream.stream("StockStream")).select(Selector.basicSelector().select(Expression.variable("timestamp").ofStream("StockStream")).select("symbol", Expression.variable("symbol").ofStream("StockStream")).select("price", Expression.variable("price").ofStream("StockStream")).groupBy(Expression.variable("symbol").ofStream("StockStream"))).aggregateBy(Expression.variable("timestamp")).every(TimePeriod.interval(TimePeriod.Duration.SECONDS, TimePeriod.Duration.MINUTES, TimePeriod.Duration.HOURS));
AssertJUnit.assertEquals(aggregationDefinition, aggregationDefinitionQuery);
}
use of org.wso2.siddhi.query.api.definition.AggregationDefinition in project siddhi by wso2.
the class SiddhiApp method checkDuplicateDefinition.
private void checkDuplicateDefinition(AbstractDefinition definition) {
TableDefinition existingTableDefinition = tableDefinitionMap.get(definition.getId());
if (existingTableDefinition != null && (!existingTableDefinition.equals(definition) || definition instanceof StreamDefinition)) {
throw new DuplicateDefinitionException("Table Definition with same Stream Id '" + definition.getId() + "' already exist : " + existingTableDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
StreamDefinition existingStreamDefinition = streamDefinitionMap.get(definition.getId());
if (existingStreamDefinition != null && (!existingStreamDefinition.equals(definition) || definition instanceof TableDefinition)) {
throw new DuplicateDefinitionException("Stream Definition with same Stream Id '" + definition.getId() + "' already exist : " + existingStreamDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
WindowDefinition existingWindowDefinition = windowDefinitionMap.get(definition.getId());
if (existingWindowDefinition != null && (!existingWindowDefinition.equals(definition) || definition instanceof WindowDefinition)) {
throw new DuplicateDefinitionException("Stream Definition with same Window Id '" + definition.getId() + "' already exist : " + existingWindowDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
AggregationDefinition existingAggregationDefinition = aggregationDefinitionMap.get(definition.getId());
if (existingAggregationDefinition != null && (!existingAggregationDefinition.equals(definition) || definition instanceof AggregationDefinition)) {
throw new DuplicateDefinitionException("Aggregate Definition with same Aggregate Id '" + definition.getId() + "' already exist : " + existingAggregationDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
}
Aggregations