use of io.siddhi.core.query.processor.ProcessingMode in project siddhi by wso2.
the class SelectorParser method parse.
/**
* Parse Selector portion of a query and return corresponding QuerySelector.
*
* @param selector selector to be parsed
* @param outputStream output stream
* @param metaComplexEvent Meta event used to collect execution info of stream associated with query
* @param tableMap Table Map
* @param variableExpressionExecutors variable expression executors
* @param metaPosition helps to identify the meta position of aggregates
* @param processingMode processing mode of the query
* @param outputExpectsExpiredEvents is expired events sent as output
* @param siddhiQueryContext current siddhi query context
* @return QuerySelector
*/
public static QuerySelector parse(Selector selector, OutputStream outputStream, MetaComplexEvent metaComplexEvent, Map<String, Table> tableMap, List<VariableExpressionExecutor> variableExpressionExecutors, int metaPosition, ProcessingMode processingMode, boolean outputExpectsExpiredEvents, SiddhiQueryContext siddhiQueryContext) {
boolean currentOn = false;
boolean expiredOn = false;
String id = null;
if (outputStream.getOutputEventType() == OutputStream.OutputEventType.CURRENT_EVENTS || outputStream.getOutputEventType() == OutputStream.OutputEventType.ALL_EVENTS) {
currentOn = true;
}
if (outputStream.getOutputEventType() == OutputStream.OutputEventType.EXPIRED_EVENTS || outputStream.getOutputEventType() == OutputStream.OutputEventType.ALL_EVENTS) {
expiredOn = true;
}
boolean groupBy = !selector.getGroupByList().isEmpty();
id = outputStream.getId();
containsAggregatorThreadLocal.remove();
QuerySelector querySelector = new QuerySelector(id, selector, currentOn, expiredOn, siddhiQueryContext);
List<AttributeProcessor> attributeProcessors = getAttributeProcessors(selector, id, metaComplexEvent, tableMap, variableExpressionExecutors, outputStream, metaPosition, processingMode, outputExpectsExpiredEvents, groupBy, siddhiQueryContext);
querySelector.setAttributeProcessorList(attributeProcessors, "true".equals(containsAggregatorThreadLocal.get()));
containsAggregatorThreadLocal.remove();
ConditionExpressionExecutor havingCondition = generateHavingExecutor(selector.getHavingExpression(), metaComplexEvent, tableMap, variableExpressionExecutors, siddhiQueryContext);
querySelector.setHavingConditionExecutor(havingCondition, "true".equals(containsAggregatorThreadLocal.get()));
containsAggregatorThreadLocal.remove();
if (!selector.getGroupByList().isEmpty()) {
List<Expression> groupByExpressionList = selector.getGroupByList().stream().map(groupByVariable -> (Expression) groupByVariable).collect(Collectors.toList());
querySelector.setGroupByKeyGenerator(new GroupByKeyGenerator(groupByExpressionList, metaComplexEvent, SiddhiConstants.UNKNOWN_STATE, null, variableExpressionExecutors, siddhiQueryContext));
}
if (!selector.getOrderByList().isEmpty()) {
querySelector.setOrderByEventComparator(new OrderByEventComparator(selector.getOrderByList(), metaComplexEvent, SiddhiConstants.HAVING_STATE, null, variableExpressionExecutors, siddhiQueryContext));
}
if (selector.getLimit() != null) {
ExpressionExecutor expressionExecutor = ExpressionParser.parseExpression((Expression) selector.getLimit(), metaComplexEvent, SiddhiConstants.HAVING_STATE, tableMap, variableExpressionExecutors, false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
containsAggregatorThreadLocal.remove();
querySelector.setLimit(((Number) (((ConstantExpressionExecutor) expressionExecutor).getValue())).longValue());
}
if (selector.getOffset() != null) {
ExpressionExecutor expressionExecutor = ExpressionParser.parseExpression((Expression) selector.getOffset(), metaComplexEvent, SiddhiConstants.HAVING_STATE, tableMap, variableExpressionExecutors, false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
containsAggregatorThreadLocal.remove();
querySelector.setOffset(((Number) (((ConstantExpressionExecutor) expressionExecutor).getValue())).longValue());
}
return querySelector;
}
use of io.siddhi.core.query.processor.ProcessingMode in project siddhi by wso2.
the class SingleInputStreamParser method parseInputStream.
/**
* Parse single InputStream and return SingleStreamRuntime
*
* @param inputStream single input stream to be parsed
* @param variableExpressionExecutors List to hold VariableExpressionExecutors to update after query parsing
* @param streamDefinitionMap Stream Definition Map
* @param tableDefinitionMap Table Definition Map
* @param windowDefinitionMap window definition map
* @param aggregationDefinitionMap aggregation definition map
* @param tableMap Table Map
* @param metaComplexEvent MetaComplexEvent
* @param processStreamReceiver ProcessStreamReceiver
* @param supportsBatchProcessing supports batch processing
* @param outputExpectsExpiredEvents is expired events sent as output
* @param findToBeExecuted find will be executed in the steam stores
* @param multiValue event has the possibility to produce multiple values
* @param siddhiQueryContext @return SingleStreamRuntime
*/
public static SingleStreamRuntime parseInputStream(SingleInputStream inputStream, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, MetaComplexEvent metaComplexEvent, ProcessStreamReceiver processStreamReceiver, boolean supportsBatchProcessing, boolean outputExpectsExpiredEvents, boolean findToBeExecuted, boolean multiValue, SiddhiQueryContext siddhiQueryContext) {
Processor processor = null;
EntryValveProcessor entryValveProcessor = null;
ProcessingMode processingMode = ProcessingMode.BATCH;
boolean first = true;
MetaStreamEvent metaStreamEvent;
if (metaComplexEvent instanceof MetaStateEvent) {
metaStreamEvent = new MetaStreamEvent();
((MetaStateEvent) metaComplexEvent).addEvent(metaStreamEvent);
initMetaStreamEvent(inputStream, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, multiValue, metaStreamEvent);
} else {
metaStreamEvent = (MetaStreamEvent) metaComplexEvent;
initMetaStreamEvent(inputStream, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, multiValue, metaStreamEvent);
}
// A window cannot be defined for a window stream
if (!inputStream.getStreamHandlers().isEmpty() && windowDefinitionMap != null && windowDefinitionMap.containsKey(inputStream.getStreamId())) {
for (StreamHandler handler : inputStream.getStreamHandlers()) {
if (handler instanceof Window) {
throw new OperationNotSupportedException("Cannot create " + ((Window) handler).getName() + " " + "window for the window stream " + inputStream.getStreamId());
}
}
}
if (!inputStream.getStreamHandlers().isEmpty()) {
for (StreamHandler handler : inputStream.getStreamHandlers()) {
Processor currentProcessor = generateProcessor(handler, metaComplexEvent, variableExpressionExecutors, tableMap, supportsBatchProcessing, outputExpectsExpiredEvents, findToBeExecuted, siddhiQueryContext);
if (currentProcessor instanceof SchedulingProcessor) {
if (entryValveProcessor == null) {
entryValveProcessor = new EntryValveProcessor(siddhiQueryContext.getSiddhiAppContext());
if (first) {
processor = entryValveProcessor;
first = false;
} else {
processor.setToLast(entryValveProcessor);
}
}
Scheduler scheduler = SchedulerParser.parse(entryValveProcessor, siddhiQueryContext);
((SchedulingProcessor) currentProcessor).setScheduler(scheduler);
}
if (currentProcessor instanceof AbstractStreamProcessor) {
processingMode = ProcessingMode.findUpdatedProcessingMode(processingMode, ((AbstractStreamProcessor) currentProcessor).getProcessingMode());
}
if (first) {
processor = currentProcessor;
first = false;
} else {
processor.setToLast(currentProcessor);
}
}
}
metaStreamEvent.initializeOnAfterWindowData();
return new SingleStreamRuntime(processStreamReceiver, processor, processingMode, metaComplexEvent);
}
Aggregations