use of io.siddhi.core.query.processor.Processor in project siddhi by siddhi-io.
the class AggregationParser method buildIncrementalExecutors.
private static Map<TimePeriod.Duration, Executor> buildIncrementalExecutors(MetaStreamEvent processedMetaStreamEvent, Map<TimePeriod.Duration, List<ExpressionExecutor>> processExpressionExecutorsMap, Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorList, List<TimePeriod.Duration> incrementalDurations, Map<TimePeriod.Duration, Table> aggregationTables, SiddhiQueryContext siddhiQueryContext, String aggregatorName, ExpressionExecutor shouldUpdateTimestamp, String timeZone, boolean isPersistedAggregation, StreamDefinition incomingOutputStreamDefinition, boolean isDistributed, String shardId, boolean isProcessingOnExternalTime, AggregationDefinition aggregationDefinition, ConfigManager configManager, List<Variable> groupByVariableList, boolean isReadOnly) {
Map<TimePeriod.Duration, Executor> incrementalExecutorMap = new HashMap<>();
Map<TimePeriod.Duration, Processor> cudProcessors = new HashMap<>();
// Create incremental executors
Executor child;
Executor root = null;
if (isPersistedAggregation) {
if (!isReadOnly) {
cudProcessors = initAggregateQueryExecutor(incrementalDurations, processExpressionExecutorsMap, incomingOutputStreamDefinition, isDistributed, shardId, isProcessingOnExternalTime, siddhiQueryContext, aggregationDefinition, configManager, aggregationTables, groupByVariableList);
}
CudStreamProcessorQueueManager queueManager = new CudStreamProcessorQueueManager();
// initialize cud stream processor queue
LinkedBlockingQueue<QueuedCudStreamProcessor> cudStreamProcessorQueue = queueManager.initializeAndGetCudStreamProcessorQueue();
siddhiQueryContext.getSiddhiAppContext().getExecutorService().execute(queueManager);
for (int i = incrementalDurations.size() - 1; i >= 0; i--) {
// Base incremental expression executors created using new meta
// Add an object to aggregationTable map inorder fill up the missing durations
aggregationTables.putIfAbsent(incrementalDurations.get(i), null);
boolean isRoot = i == 0;
child = root;
TimePeriod.Duration duration = incrementalDurations.get(i);
Executor incrementalExecutor;
if (duration == TimePeriod.Duration.SECONDS || duration == TimePeriod.Duration.MINUTES || duration == TimePeriod.Duration.HOURS) {
incrementalExecutor = new IncrementalExecutor(aggregatorName, duration, processExpressionExecutorsMap.get(duration), shouldUpdateTimestamp, groupByKeyGeneratorList.get(duration), isRoot, aggregationTables.get(duration), child, siddhiQueryContext, processedMetaStreamEvent, timeZone, duration.equals(TimePeriod.Duration.HOURS));
} else {
incrementalExecutor = new PersistedIncrementalExecutor(aggregatorName, duration, processExpressionExecutorsMap.get(duration), child, siddhiQueryContext, generateCUDMetaStreamEvent(isProcessingOnExternalTime), timeZone, cudProcessors.get(duration), cudStreamProcessorQueue);
}
incrementalExecutorMap.put(duration, incrementalExecutor);
root = incrementalExecutor;
}
} else {
for (int i = incrementalDurations.size() - 1; i >= 0; i--) {
// Base incremental expression executors created using new meta
boolean isRoot = i == 0;
child = root;
TimePeriod.Duration duration = incrementalDurations.get(i);
IncrementalExecutor incrementalExecutor = new IncrementalExecutor(aggregatorName, duration, processExpressionExecutorsMap.get(duration), shouldUpdateTimestamp, groupByKeyGeneratorList.get(duration), isRoot, aggregationTables.get(duration), child, siddhiQueryContext, processedMetaStreamEvent, timeZone, false);
incrementalExecutorMap.put(duration, incrementalExecutor);
root = incrementalExecutor;
}
}
return incrementalExecutorMap;
}
use of io.siddhi.core.query.processor.Processor in project siddhi by siddhi-io.
the class SingleInputStreamParser method parseInputStream.
/**
* Parse single InputStream and return SingleStreamRuntime
*
* @param inputStream single input stream to be parsed
* @param variableExpressionExecutors List to hold VariableExpressionExecutors to update after query parsing
* @param streamDefinitionMap Stream Definition Map
* @param tableDefinitionMap Table Definition Map
* @param windowDefinitionMap window definition map
* @param aggregationDefinitionMap aggregation definition map
* @param tableMap Table Map
* @param metaComplexEvent MetaComplexEvent
* @param processStreamReceiver ProcessStreamReceiver
* @param supportsBatchProcessing supports batch processing
* @param outputExpectsExpiredEvents is expired events sent as output
* @param findToBeExecuted find will be executed in the steam stores
* @param multiValue event has the possibility to produce multiple values
* @param siddhiQueryContext @return SingleStreamRuntime
*/
public static SingleStreamRuntime parseInputStream(SingleInputStream inputStream, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, MetaComplexEvent metaComplexEvent, ProcessStreamReceiver processStreamReceiver, boolean supportsBatchProcessing, boolean outputExpectsExpiredEvents, boolean findToBeExecuted, boolean multiValue, SiddhiQueryContext siddhiQueryContext) {
Processor processor = null;
EntryValveProcessor entryValveProcessor = null;
ProcessingMode processingMode = ProcessingMode.BATCH;
boolean first = true;
MetaStreamEvent metaStreamEvent;
if (metaComplexEvent instanceof MetaStateEvent) {
metaStreamEvent = new MetaStreamEvent();
((MetaStateEvent) metaComplexEvent).addEvent(metaStreamEvent);
initMetaStreamEvent(inputStream, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, multiValue, metaStreamEvent);
} else {
metaStreamEvent = (MetaStreamEvent) metaComplexEvent;
initMetaStreamEvent(inputStream, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, multiValue, metaStreamEvent);
}
// A window cannot be defined for a window stream
if (!inputStream.getStreamHandlers().isEmpty() && windowDefinitionMap != null && windowDefinitionMap.containsKey(inputStream.getStreamId())) {
for (StreamHandler handler : inputStream.getStreamHandlers()) {
if (handler instanceof Window) {
throw new OperationNotSupportedException("Cannot create " + ((Window) handler).getName() + " " + "window for the window stream " + inputStream.getStreamId());
}
}
}
if (!inputStream.getStreamHandlers().isEmpty()) {
for (StreamHandler handler : inputStream.getStreamHandlers()) {
Processor currentProcessor = generateProcessor(handler, metaComplexEvent, variableExpressionExecutors, tableMap, supportsBatchProcessing, outputExpectsExpiredEvents, findToBeExecuted, siddhiQueryContext);
if (currentProcessor instanceof SchedulingProcessor) {
if (entryValveProcessor == null) {
entryValveProcessor = new EntryValveProcessor(siddhiQueryContext.getSiddhiAppContext());
if (first) {
processor = entryValveProcessor;
first = false;
} else {
processor.setToLast(entryValveProcessor);
}
}
Scheduler scheduler = SchedulerParser.parse(entryValveProcessor, siddhiQueryContext);
((SchedulingProcessor) currentProcessor).setScheduler(scheduler);
}
if (currentProcessor instanceof AbstractStreamProcessor) {
processingMode = ProcessingMode.findUpdatedProcessingMode(processingMode, ((AbstractStreamProcessor) currentProcessor).getProcessingMode());
}
if (first) {
processor = currentProcessor;
first = false;
} else {
processor.setToLast(currentProcessor);
}
}
}
metaStreamEvent.initializeOnAfterWindowData();
return new SingleStreamRuntime(processStreamReceiver, processor, processingMode, metaComplexEvent);
}
use of io.siddhi.core.query.processor.Processor in project siddhi by siddhi-io.
the class AggregationParser method initAggregateQueryExecutor.
private static Map<TimePeriod.Duration, Processor> initAggregateQueryExecutor(List<TimePeriod.Duration> aggregationDurations, Map<TimePeriod.Duration, List<ExpressionExecutor>> processExpressionExecutorsMap, StreamDefinition incomingOutputStreamDefinition, boolean isDistributed, String shardID, boolean isProcessingOnExternalTime, SiddhiQueryContext siddhiQueryContext, AggregationDefinition aggregationDefinition, ConfigManager configManager, Map<TimePeriod.Duration, Table> aggregationTables, List<Variable> groupByVariableList) {
Map<TimePeriod.Duration, Processor> cudProcessors = new LinkedHashMap<>();
String datasourceName = AnnotationHelper.getAnnotationElement(SiddhiConstants.NAMESPACE_STORE, "datasource", aggregationDefinition.getAnnotations()).getValue();
if (datasourceName == null || datasourceName.isEmpty()) {
throw new SiddhiAppCreationException("Datasource configuration must be provided inorder to use persisted " + "aggregation mode");
}
Database databaseType = getDatabaseType(configManager, datasourceName);
if (log.isDebugEnabled()) {
log.debug("Database type " + databaseType);
}
SiddhiAppContext cudSiddhiAppContext = new SiddhiAppContext();
SiddhiContext context = new SiddhiContext();
context.setConfigManager(configManager);
cudSiddhiAppContext.setSiddhiContext(context);
StringConstant datasource = new StringConstant(datasourceName);
ConstantExpressionExecutor datasourceExecutor = new ConstantExpressionExecutor(datasource.getValue(), Attribute.Type.STRING);
Expression[] streamHandler;
ExpressionExecutor[] cudStreamProcessorInputVariables;
if (isProcessingOnExternalTime) {
streamHandler = new Expression[7];
} else {
streamHandler = new Expression[5];
}
try {
DBAggregationQueryConfigurationEntry dbAggregationQueryConfigurationEntry = DBAggregationQueryUtil.lookupCurrentQueryConfigurationEntry(databaseType);
if (log.isDebugEnabled()) {
log.debug("CUD queries for aggregation " + aggregationDefinition.getId());
}
for (int i = aggregationDurations.size() - 1; i > 0; i--) {
if (aggregationDurations.get(i).ordinal() >= 3) {
if (log.isDebugEnabled()) {
log.debug(" Initializing cudProcessors for duration " + aggregationDurations.get(i));
}
String databaseSelectQuery = generateDatabaseQuery(processExpressionExecutorsMap.get(aggregationDurations.get(i)), dbAggregationQueryConfigurationEntry, incomingOutputStreamDefinition, isDistributed, shardID, isProcessingOnExternalTime, aggregationTables.get(aggregationDurations.get(i)), aggregationTables.get(aggregationDurations.get(i - 1)), groupByVariableList, aggregationDurations.get(i));
StringConstant selectQuery = new StringConstant(databaseSelectQuery);
if (log.isDebugEnabled()) {
log.debug(selectQuery);
}
ConstantExpressionExecutor selectExecutor = new ConstantExpressionExecutor(selectQuery.getValue(), Attribute.Type.STRING);
Map<Attribute, int[]> cudInputStreamAttributesMap = generateCUDInputStreamAttributes(isProcessingOnExternalTime);
if (isProcessingOnExternalTime) {
cudStreamProcessorInputVariables = new ExpressionExecutor[7];
} else {
cudStreamProcessorInputVariables = new ExpressionExecutor[5];
}
cudStreamProcessorInputVariables[0] = datasourceExecutor;
cudStreamProcessorInputVariables[1] = selectExecutor;
streamHandler[0] = datasource;
streamHandler[1] = selectQuery;
MetaStreamEvent metaStreamEvent = generateCUDMetaStreamEvent(isProcessingOnExternalTime);
StreamDefinition outputStream = new StreamDefinition();
VariableExpressionExecutor variableExpressionExecutor;
int j = 0;
for (Map.Entry<Attribute, int[]> entry : cudInputStreamAttributesMap.entrySet()) {
Attribute attribute = entry.getKey();
Variable timestampVariable = new Variable(attribute.getName());
for (int position : entry.getValue()) {
streamHandler[position + 2] = timestampVariable;
variableExpressionExecutor = new VariableExpressionExecutor(attribute, 0, 0);
variableExpressionExecutor.setPosition(new int[] { 2, j });
cudStreamProcessorInputVariables[position + 2] = variableExpressionExecutor;
}
outputStream.attribute(attribute.getName(), attribute.getType());
j++;
}
StreamFunction cudStreamFunction = new StreamFunction(NAMESPACE_RDBMS, FUNCTION_NAME_CUD, streamHandler);
cudProcessors.put(aggregationDurations.get(i), getCudProcessor(cudStreamFunction, siddhiQueryContext, metaStreamEvent, cudStreamProcessorInputVariables, aggregationDurations.get(i)));
}
}
return cudProcessors;
} catch (CannotLoadConfigurationException e) {
throw new SiddhiAppCreationException("Error occurred while initializing the persisted incremental " + "aggregation. Could not load the db quires for database type " + databaseType);
}
}
use of io.siddhi.core.query.processor.Processor in project siddhi by wso2.
the class SingleInputStreamParser method parseInputStream.
/**
* Parse single InputStream and return SingleStreamRuntime
*
* @param inputStream single input stream to be parsed
* @param variableExpressionExecutors List to hold VariableExpressionExecutors to update after query parsing
* @param streamDefinitionMap Stream Definition Map
* @param tableDefinitionMap Table Definition Map
* @param windowDefinitionMap window definition map
* @param aggregationDefinitionMap aggregation definition map
* @param tableMap Table Map
* @param metaComplexEvent MetaComplexEvent
* @param processStreamReceiver ProcessStreamReceiver
* @param supportsBatchProcessing supports batch processing
* @param outputExpectsExpiredEvents is expired events sent as output
* @param findToBeExecuted find will be executed in the steam stores
* @param multiValue event has the possibility to produce multiple values
* @param siddhiQueryContext @return SingleStreamRuntime
*/
public static SingleStreamRuntime parseInputStream(SingleInputStream inputStream, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, MetaComplexEvent metaComplexEvent, ProcessStreamReceiver processStreamReceiver, boolean supportsBatchProcessing, boolean outputExpectsExpiredEvents, boolean findToBeExecuted, boolean multiValue, SiddhiQueryContext siddhiQueryContext) {
Processor processor = null;
EntryValveProcessor entryValveProcessor = null;
ProcessingMode processingMode = ProcessingMode.BATCH;
boolean first = true;
MetaStreamEvent metaStreamEvent;
if (metaComplexEvent instanceof MetaStateEvent) {
metaStreamEvent = new MetaStreamEvent();
((MetaStateEvent) metaComplexEvent).addEvent(metaStreamEvent);
initMetaStreamEvent(inputStream, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, multiValue, metaStreamEvent);
} else {
metaStreamEvent = (MetaStreamEvent) metaComplexEvent;
initMetaStreamEvent(inputStream, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, multiValue, metaStreamEvent);
}
// A window cannot be defined for a window stream
if (!inputStream.getStreamHandlers().isEmpty() && windowDefinitionMap != null && windowDefinitionMap.containsKey(inputStream.getStreamId())) {
for (StreamHandler handler : inputStream.getStreamHandlers()) {
if (handler instanceof Window) {
throw new OperationNotSupportedException("Cannot create " + ((Window) handler).getName() + " " + "window for the window stream " + inputStream.getStreamId());
}
}
}
if (!inputStream.getStreamHandlers().isEmpty()) {
for (StreamHandler handler : inputStream.getStreamHandlers()) {
Processor currentProcessor = generateProcessor(handler, metaComplexEvent, variableExpressionExecutors, tableMap, supportsBatchProcessing, outputExpectsExpiredEvents, findToBeExecuted, siddhiQueryContext);
if (currentProcessor instanceof SchedulingProcessor) {
if (entryValveProcessor == null) {
entryValveProcessor = new EntryValveProcessor(siddhiQueryContext.getSiddhiAppContext());
if (first) {
processor = entryValveProcessor;
first = false;
} else {
processor.setToLast(entryValveProcessor);
}
}
Scheduler scheduler = SchedulerParser.parse(entryValveProcessor, siddhiQueryContext);
((SchedulingProcessor) currentProcessor).setScheduler(scheduler);
}
if (currentProcessor instanceof AbstractStreamProcessor) {
processingMode = ProcessingMode.findUpdatedProcessingMode(processingMode, ((AbstractStreamProcessor) currentProcessor).getProcessingMode());
}
if (first) {
processor = currentProcessor;
first = false;
} else {
processor.setToLast(currentProcessor);
}
}
}
metaStreamEvent.initializeOnAfterWindowData();
return new SingleStreamRuntime(processStreamReceiver, processor, processingMode, metaComplexEvent);
}
use of io.siddhi.core.query.processor.Processor in project siddhi by wso2.
the class QueryParserHelper method initSingleStreamRuntime.
private static void initSingleStreamRuntime(SingleStreamRuntime singleStreamRuntime, int streamEventChainIndex, MetaComplexEvent metaComplexEvent, StateEventFactory stateEventFactory, LockWrapper lockWrapper, String queryName) {
MetaStreamEvent metaStreamEvent;
if (metaComplexEvent instanceof MetaStateEvent) {
metaStreamEvent = ((MetaStateEvent) metaComplexEvent).getMetaStreamEvent(streamEventChainIndex);
} else {
metaStreamEvent = (MetaStreamEvent) metaComplexEvent;
}
StreamEventFactory streamEventFactory = new StreamEventFactory(metaStreamEvent);
ProcessStreamReceiver processStreamReceiver = singleStreamRuntime.getProcessStreamReceiver();
processStreamReceiver.setMetaStreamEvent(metaStreamEvent);
processStreamReceiver.setStreamEventFactory(streamEventFactory);
processStreamReceiver.setLockWrapper(lockWrapper);
processStreamReceiver.init();
Processor processor = singleStreamRuntime.getProcessorChain();
while (processor != null) {
if (processor instanceof SchedulingProcessor) {
((SchedulingProcessor) processor).getScheduler().setStreamEventFactory(streamEventFactory);
((SchedulingProcessor) processor).getScheduler().init(lockWrapper, queryName);
}
if (processor instanceof AbstractStreamProcessor) {
((AbstractStreamProcessor) processor).setStreamEventCloner(new StreamEventCloner(metaStreamEvent, streamEventFactory));
((AbstractStreamProcessor) processor).constructStreamEventPopulater(metaStreamEvent, streamEventChainIndex);
}
if (stateEventFactory != null && processor instanceof JoinProcessor) {
if (((JoinProcessor) processor).getCompiledCondition() instanceof IncrementalAggregateCompileCondition) {
IncrementalAggregateCompileCondition compiledCondition = (IncrementalAggregateCompileCondition) ((JoinProcessor) processor).getCompiledCondition();
compiledCondition.init();
ComplexEventPopulater complexEventPopulater = StreamEventPopulaterFactory.constructEventPopulator(metaStreamEvent, 0, compiledCondition.getAdditionalAttributes());
compiledCondition.setComplexEventPopulater(complexEventPopulater);
}
((JoinProcessor) processor).setStateEventFactory(stateEventFactory);
}
if (stateEventFactory != null && processor instanceof StreamPreStateProcessor) {
((StreamPreStateProcessor) processor).setStateEventFactory(stateEventFactory);
((StreamPreStateProcessor) processor).setStreamEventFactory(streamEventFactory);
((StreamPreStateProcessor) processor).setStreamEventCloner(new StreamEventCloner(metaStreamEvent, streamEventFactory));
if (metaComplexEvent instanceof MetaStateEvent) {
((StreamPreStateProcessor) processor).setStateEventCloner(new StateEventCloner(((MetaStateEvent) metaComplexEvent), stateEventFactory));
}
}
processor = processor.getNextProcessor();
}
}
Aggregations