use of io.siddhi.query.api.expression.Expression in project siddhi by wso2.
the class ExpressionParser method parseExpression.
/**
* Parse the given expression and create the appropriate Executor by recursively traversing the expression
*
* @param expression Expression to be parsed
* @param metaEvent Meta Event
* @param currentState Current state number
* @param tableMap Event Table Map
* @param executorList List to hold VariableExpressionExecutors to update after query parsing
* @param groupBy is for groupBy expression
* @param defaultStreamEventIndex Default StreamEvent Index
* @param processingMode processing mode of the query
* @param outputExpectsExpiredEvents is expired events sent as output
* @param siddhiQueryContext current siddhi query context
* @return ExpressionExecutor
*/
public static ExpressionExecutor parseExpression(Expression expression, MetaComplexEvent metaEvent, int currentState, Map<String, Table> tableMap, List<VariableExpressionExecutor> executorList, boolean groupBy, int defaultStreamEventIndex, ProcessingMode processingMode, boolean outputExpectsExpiredEvents, SiddhiQueryContext siddhiQueryContext) {
try {
if (expression instanceof And) {
return new AndConditionExpressionExecutor(parseExpression(((And) expression).getLeftExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext), parseExpression(((And) expression).getRightExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext));
} else if (expression instanceof Or) {
return new OrConditionExpressionExecutor(parseExpression(((Or) expression).getLeftExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext), parseExpression(((Or) expression).getRightExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext));
} else if (expression instanceof Not) {
return new NotConditionExpressionExecutor(parseExpression(((Not) expression).getExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext));
} else if (expression instanceof Compare) {
if (((Compare) expression).getOperator() == Compare.Operator.EQUAL) {
Expression leftExpression = ((Compare) expression).getLeftExpression();
Expression rightExpression = ((Compare) expression).getRightExpression();
ExpressionExecutor leftExpressionExecutor = parseExpression(leftExpression, metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
ExpressionExecutor rightExpressionExecutor = parseExpression(rightExpression, metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
return parseEqualCompare(leftExpressionExecutor, rightExpressionExecutor);
} else if (((Compare) expression).getOperator() == Compare.Operator.NOT_EQUAL) {
return parseNotEqualCompare(parseExpression(((Compare) expression).getLeftExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext), parseExpression(((Compare) expression).getRightExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext));
} else if (((Compare) expression).getOperator() == Compare.Operator.GREATER_THAN) {
return parseGreaterThanCompare(parseExpression(((Compare) expression).getLeftExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext), parseExpression(((Compare) expression).getRightExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext));
} else if (((Compare) expression).getOperator() == Compare.Operator.GREATER_THAN_EQUAL) {
return parseGreaterThanEqualCompare(parseExpression(((Compare) expression).getLeftExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext), parseExpression(((Compare) expression).getRightExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext));
} else if (((Compare) expression).getOperator() == Compare.Operator.LESS_THAN) {
return parseLessThanCompare(parseExpression(((Compare) expression).getLeftExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext), parseExpression(((Compare) expression).getRightExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext));
} else if (((Compare) expression).getOperator() == Compare.Operator.LESS_THAN_EQUAL) {
return parseLessThanEqualCompare(parseExpression(((Compare) expression).getLeftExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext), parseExpression(((Compare) expression).getRightExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext));
}
} else if (expression instanceof Constant) {
if (expression instanceof BoolConstant) {
return new ConstantExpressionExecutor(((BoolConstant) expression).getValue(), Attribute.Type.BOOL);
} else if (expression instanceof StringConstant) {
return new ConstantExpressionExecutor(((StringConstant) expression).getValue(), Attribute.Type.STRING);
} else if (expression instanceof IntConstant) {
return new ConstantExpressionExecutor(((IntConstant) expression).getValue(), Attribute.Type.INT);
} else if (expression instanceof LongConstant) {
return new ConstantExpressionExecutor(((LongConstant) expression).getValue(), Attribute.Type.LONG);
} else if (expression instanceof FloatConstant) {
return new ConstantExpressionExecutor(((FloatConstant) expression).getValue(), Attribute.Type.FLOAT);
} else if (expression instanceof DoubleConstant) {
return new ConstantExpressionExecutor(((DoubleConstant) expression).getValue(), Attribute.Type.DOUBLE);
}
} else if (expression instanceof Variable) {
return parseVariable((Variable) expression, metaEvent, currentState, executorList, defaultStreamEventIndex, siddhiQueryContext);
} else if (expression instanceof Multiply) {
ExpressionExecutor left = parseExpression(((Multiply) expression).getLeftValue(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
ExpressionExecutor right = parseExpression(((Multiply) expression).getRightValue(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
Attribute.Type type = parseArithmeticOperationResultType(left, right);
switch(type) {
case INT:
return new MultiplyExpressionExecutorInt(left, right);
case LONG:
return new MultiplyExpressionExecutorLong(left, right);
case FLOAT:
return new MultiplyExpressionExecutorFloat(left, right);
case DOUBLE:
return new MultiplyExpressionExecutorDouble(left, right);
// Will not happen. Handled in parseArithmeticOperationResultType()
default:
}
} else if (expression instanceof Add) {
ExpressionExecutor left = parseExpression(((Add) expression).getLeftValue(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
ExpressionExecutor right = parseExpression(((Add) expression).getRightValue(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
Attribute.Type type = parseArithmeticOperationResultType(left, right);
switch(type) {
case INT:
return new AddExpressionExecutorInt(left, right);
case LONG:
return new AddExpressionExecutorLong(left, right);
case FLOAT:
return new AddExpressionExecutorFloat(left, right);
case DOUBLE:
return new AddExpressionExecutorDouble(left, right);
// Will not happen. Handled in parseArithmeticOperationResultType()
default:
}
} else if (expression instanceof Subtract) {
ExpressionExecutor left = parseExpression(((Subtract) expression).getLeftValue(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
ExpressionExecutor right = parseExpression(((Subtract) expression).getRightValue(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
Attribute.Type type = parseArithmeticOperationResultType(left, right);
switch(type) {
case INT:
return new SubtractExpressionExecutorInt(left, right);
case LONG:
return new SubtractExpressionExecutorLong(left, right);
case FLOAT:
return new SubtractExpressionExecutorFloat(left, right);
case DOUBLE:
return new SubtractExpressionExecutorDouble(left, right);
// Will not happen. Handled in parseArithmeticOperationResultType()
default:
}
} else if (expression instanceof Mod) {
ExpressionExecutor left = parseExpression(((Mod) expression).getLeftValue(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
ExpressionExecutor right = parseExpression(((Mod) expression).getRightValue(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
Attribute.Type type = parseArithmeticOperationResultType(left, right);
switch(type) {
case INT:
return new ModExpressionExecutorInt(left, right);
case LONG:
return new ModExpressionExecutorLong(left, right);
case FLOAT:
return new ModExpressionExecutorFloat(left, right);
case DOUBLE:
return new ModExpressionExecutorDouble(left, right);
// Will not happen. Handled in parseArithmeticOperationResultType()
default:
}
} else if (expression instanceof Divide) {
ExpressionExecutor left = parseExpression(((Divide) expression).getLeftValue(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
ExpressionExecutor right = parseExpression(((Divide) expression).getRightValue(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
Attribute.Type type = parseArithmeticOperationResultType(left, right);
switch(type) {
case INT:
return new DivideExpressionExecutorInt(left, right);
case LONG:
return new DivideExpressionExecutorLong(left, right);
case FLOAT:
return new DivideExpressionExecutorFloat(left, right);
case DOUBLE:
return new DivideExpressionExecutorDouble(left, right);
// Will not happen. Handled in parseArithmeticOperationResultType()
default:
}
} else if (expression instanceof AttributeFunction) {
// extensions
Object executor;
try {
if ((siddhiQueryContext.getSiddhiAppContext().isFunctionExist(((AttributeFunction) expression).getName())) && (((AttributeFunction) expression).getNamespace()).isEmpty()) {
executor = new ScriptFunctionExecutor(((AttributeFunction) expression).getName());
} else {
executor = SiddhiClassLoader.loadExtensionImplementation((AttributeFunction) expression, FunctionExecutorExtensionHolder.getInstance(siddhiQueryContext.getSiddhiAppContext()));
}
} catch (SiddhiAppCreationException ex) {
try {
executor = SiddhiClassLoader.loadExtensionImplementation((AttributeFunction) expression, AttributeAggregatorExtensionHolder.getInstance(siddhiQueryContext.getSiddhiAppContext()));
} catch (SiddhiAppCreationException e) {
throw new ExtensionNotFoundException("'" + ((AttributeFunction) expression).getName() + "' is" + " neither a function extension nor an aggregated attribute extension", expression.getQueryContextStartIndex(), expression.getQueryContextEndIndex());
}
}
ConfigReader configReader = siddhiQueryContext.getSiddhiContext().getConfigManager().generateConfigReader(((AttributeFunction) expression).getNamespace(), ((AttributeFunction) expression).getName());
if (executor instanceof FunctionExecutor) {
FunctionExecutor expressionExecutor = (FunctionExecutor) executor;
Expression[] innerExpressions = ((AttributeFunction) expression).getParameters();
ExpressionExecutor[] innerExpressionExecutors = parseInnerExpression(innerExpressions, metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
expressionExecutor.initExecutor(innerExpressionExecutors, processingMode, configReader, groupBy, siddhiQueryContext);
if (expressionExecutor.getReturnType() == Attribute.Type.BOOL) {
return new BoolConditionExpressionExecutor(expressionExecutor);
}
return expressionExecutor;
} else {
AttributeAggregatorExecutor attributeAggregatorExecutor = (AttributeAggregatorExecutor) executor;
Expression[] innerExpressions = ((AttributeFunction) expression).getParameters();
ExpressionExecutor[] innerExpressionExecutors = parseInnerExpression(innerExpressions, metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
attributeAggregatorExecutor.initAggregator(innerExpressionExecutors, processingMode, outputExpectsExpiredEvents, configReader, groupBy, siddhiQueryContext);
// AbstractAggregationAttributeExecutor aggregationAttributeProcessor;
// if (groupBy) {
// aggregationAttributeProcessor = new GroupByAggregationAttributeExecutor(attributeAggregatorExecutor,
// innerExpressionExecutors, configReader, siddhiQueryContext);
// } else {
// aggregationAttributeProcessor = new AggregationAttributeExecutor(attributeAggregatorExecutor,
// innerExpressionExecutors, siddhiQueryContext);
// }
SelectorParser.getContainsAggregatorThreadLocal().set("true");
return attributeAggregatorExecutor;
}
} else if (expression instanceof In) {
Table table = tableMap.get(((In) expression).getSourceId());
MatchingMetaInfoHolder matchingMetaInfoHolder = MatcherParser.constructMatchingMetaStateHolder(metaEvent, defaultStreamEventIndex, table.getTableDefinition(), defaultStreamEventIndex);
CompiledCondition compiledCondition = table.compileCondition(((In) expression).getExpression(), matchingMetaInfoHolder, executorList, tableMap, siddhiQueryContext);
return new InConditionExpressionExecutor(table, compiledCondition, matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvents().length, metaEvent instanceof StateEvent, 0);
} else if (expression instanceof IsNull) {
IsNull isNull = (IsNull) expression;
if (isNull.getExpression() != null) {
ExpressionExecutor innerExpressionExecutor = parseExpression(isNull.getExpression(), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
return new IsNullConditionExpressionExecutor(innerExpressionExecutor);
} else {
String streamId = isNull.getStreamId();
Integer streamIndex = isNull.getStreamIndex();
if (metaEvent instanceof MetaStateEvent) {
int[] eventPosition = new int[2];
if (streamIndex != null) {
if (streamIndex <= SiddhiConstants.LAST) {
eventPosition[SiddhiConstants.STREAM_EVENT_INDEX_IN_CHAIN] = streamIndex + 1;
} else {
eventPosition[SiddhiConstants.STREAM_EVENT_INDEX_IN_CHAIN] = streamIndex;
}
} else {
eventPosition[SiddhiConstants.STREAM_EVENT_INDEX_IN_CHAIN] = defaultStreamEventIndex;
}
eventPosition[SiddhiConstants.STREAM_EVENT_CHAIN_INDEX] = SiddhiConstants.UNKNOWN_STATE;
MetaStateEvent metaStateEvent = (MetaStateEvent) metaEvent;
if (streamId == null) {
throw new SiddhiAppCreationException("IsNull does not support streamId being null", expression.getQueryContextStartIndex(), expression.getQueryContextEndIndex());
} else {
MetaStreamEvent[] metaStreamEvents = metaStateEvent.getMetaStreamEvents();
for (int i = 0, metaStreamEventsLength = metaStreamEvents.length; i < metaStreamEventsLength; i++) {
MetaStreamEvent metaStreamEvent = metaStreamEvents[i];
AbstractDefinition definition = metaStreamEvent.getLastInputDefinition();
if (metaStreamEvent.getInputReferenceId() == null) {
if (definition.getId().equals(streamId)) {
eventPosition[SiddhiConstants.STREAM_EVENT_CHAIN_INDEX] = i;
break;
}
} else {
if (metaStreamEvent.getInputReferenceId().equals(streamId)) {
eventPosition[SiddhiConstants.STREAM_EVENT_CHAIN_INDEX] = i;
if (currentState > -1 && metaStreamEvents[currentState].getInputReferenceId() != null && streamIndex != null && streamIndex <= SiddhiConstants.LAST) {
if (streamId.equals(metaStreamEvents[currentState].getInputReferenceId())) {
eventPosition[SiddhiConstants.STREAM_EVENT_INDEX_IN_CHAIN] = streamIndex;
}
}
break;
}
}
}
}
return new IsNullStreamConditionExpressionExecutor(eventPosition);
} else {
return new IsNullStreamConditionExpressionExecutor(null);
}
}
}
throw new UnsupportedOperationException(expression.toString() + " not supported!");
} catch (Throwable t) {
ExceptionUtil.populateQueryContext(t, expression, siddhiQueryContext.getSiddhiAppContext(), siddhiQueryContext);
throw t;
}
}
use of io.siddhi.query.api.expression.Expression in project siddhi by wso2.
the class ExpressionParser method parseInnerExpression.
/**
* Parse the set of inner expression of AttributeFunctionExtensions and handling all (*) cases
*
* @param innerExpressions InnerExpressions to be parsed
* @param metaEvent Meta Event
* @param currentState Current state number
* @param tableMap Event Table Map
* @param executorList List to hold VariableExpressionExecutors to update after query parsing @return
* @param groupBy is for groupBy expression
* @param defaultStreamEventIndex Default StreamEvent Index
* @param processingMode processing mode of the query
* @param outputExpectsExpiredEvents is expired events sent as output
* @param siddhiQueryContext current siddhi query context
* @return List of expressionExecutors
*/
private static ExpressionExecutor[] parseInnerExpression(Expression[] innerExpressions, MetaComplexEvent metaEvent, int currentState, Map<String, Table> tableMap, List<VariableExpressionExecutor> executorList, boolean groupBy, int defaultStreamEventIndex, ProcessingMode processingMode, boolean outputExpectsExpiredEvents, SiddhiQueryContext siddhiQueryContext) {
ExpressionExecutor[] innerExpressionExecutors;
if (innerExpressions != null) {
if (innerExpressions.length > 0) {
innerExpressionExecutors = new ExpressionExecutor[innerExpressions.length];
for (int i = 0, innerExpressionsLength = innerExpressions.length; i < innerExpressionsLength; i++) {
innerExpressionExecutors[i] = parseExpression(innerExpressions[i], metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
}
} else {
List<Expression> outputAttributes = new ArrayList<Expression>();
if (metaEvent instanceof MetaStreamEvent) {
List<Attribute> attributeList = ((MetaStreamEvent) metaEvent).getLastInputDefinition().getAttributeList();
for (Attribute attribute : attributeList) {
outputAttributes.add(new Variable(attribute.getName()));
}
} else {
for (MetaStreamEvent metaStreamEvent : ((MetaStateEvent) metaEvent).getMetaStreamEvents()) {
List<Attribute> attributeList = metaStreamEvent.getLastInputDefinition().getAttributeList();
for (Attribute attribute : attributeList) {
Expression outputAttribute = new Variable(attribute.getName());
if (!outputAttributes.contains(outputAttribute)) {
outputAttributes.add(outputAttribute);
} else {
List<AbstractDefinition> definitions = new ArrayList<AbstractDefinition>();
for (MetaStreamEvent aMetaStreamEvent : ((MetaStateEvent) metaEvent).getMetaStreamEvents()) {
definitions.add(aMetaStreamEvent.getLastInputDefinition());
}
throw new DuplicateAttributeException("Duplicate attribute exist in streams " + definitions, attribute.getQueryContextStartIndex(), attribute.getQueryContextEndIndex());
}
}
}
}
innerExpressionExecutors = new ExpressionExecutor[outputAttributes.size()];
for (int i = 0, innerExpressionsLength = outputAttributes.size(); i < innerExpressionsLength; i++) {
innerExpressionExecutors[i] = parseExpression(outputAttributes.get(i), metaEvent, currentState, tableMap, executorList, groupBy, defaultStreamEventIndex, processingMode, outputExpectsExpiredEvents, siddhiQueryContext);
}
}
} else {
innerExpressionExecutors = new ExpressionExecutor[0];
}
return innerExpressionExecutors;
}
use of io.siddhi.query.api.expression.Expression in project siddhi by wso2.
the class OnDemandQueryParser method parse.
public static OnDemandQueryRuntime parse(OnDemandQuery onDemandQuery, String onDemandQueryString, SiddhiAppContext siddhiAppContext, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap) {
final LockWrapper lockWrapper = new LockWrapper("OnDemandQueryLock");
lockWrapper.setLock(new ReentrantLock());
MetaStreamEvent metaStreamEvent = new MetaStreamEvent();
int metaPosition = SiddhiConstants.UNKNOWN_STATE;
String queryName;
Table table;
SiddhiQueryContext siddhiQueryContext;
Expression onCondition;
SnapshotService.getSkipStateStorageThreadLocal().set(true);
switch(onDemandQuery.getType()) {
case FIND:
Within within = null;
Expression per = null;
queryName = "store_select_query_" + onDemandQuery.getInputStore().getStoreId();
siddhiQueryContext = new SiddhiOnDemandQueryContext(siddhiAppContext, queryName, onDemandQueryString);
InputStore inputStore = onDemandQuery.getInputStore();
try {
onCondition = Expression.value(true);
metaStreamEvent.setInputReferenceId(inputStore.getStoreReferenceId());
if (inputStore instanceof AggregationInputStore) {
AggregationInputStore aggregationInputStore = (AggregationInputStore) inputStore;
if (aggregationMap.get(inputStore.getStoreId()) == null) {
throw new OnDemandQueryCreationException("Aggregation \"" + inputStore.getStoreId() + "\" has not been defined");
}
if (aggregationInputStore.getPer() != null && aggregationInputStore.getWithin() != null) {
within = aggregationInputStore.getWithin();
per = aggregationInputStore.getPer();
} else if (aggregationInputStore.getPer() != null || aggregationInputStore.getWithin() != null) {
throw new OnDemandQueryCreationException(inputStore.getStoreId() + " should either have both 'within' and 'per' " + "defined or none.");
}
if (((AggregationInputStore) inputStore).getOnCondition() != null) {
onCondition = ((AggregationInputStore) inputStore).getOnCondition();
}
} else if (inputStore instanceof ConditionInputStore) {
if (((ConditionInputStore) inputStore).getOnCondition() != null) {
onCondition = ((ConditionInputStore) inputStore).getOnCondition();
}
}
List<VariableExpressionExecutor> variableExpressionExecutors = new ArrayList<>();
table = tableMap.get(inputStore.getStoreId());
if (table != null) {
return constructOnDemandQueryRuntime(table, onDemandQuery, tableMap, windowMap, metaPosition, onCondition, metaStreamEvent, variableExpressionExecutors, lockWrapper, siddhiQueryContext);
} else {
AggregationRuntime aggregation = aggregationMap.get(inputStore.getStoreId());
if (aggregation != null) {
return constructOnDemandQueryRuntime(aggregation, onDemandQuery, tableMap, windowMap, within, per, onCondition, metaStreamEvent, variableExpressionExecutors, lockWrapper, siddhiQueryContext);
} else {
Window window = windowMap.get(inputStore.getStoreId());
if (window != null) {
return constructOnDemandQueryRuntime(window, onDemandQuery, tableMap, windowMap, metaPosition, onCondition, metaStreamEvent, variableExpressionExecutors, lockWrapper, siddhiQueryContext);
} else {
throw new OnDemandQueryCreationException(inputStore.getStoreId() + " is neither a table, aggregation or window");
}
}
}
} finally {
SnapshotService.getSkipStateStorageThreadLocal().set(null);
}
case INSERT:
InsertIntoStream inserIntoStreamt = (InsertIntoStream) onDemandQuery.getOutputStream();
queryName = "store_insert_query_" + inserIntoStreamt.getId();
siddhiQueryContext = new SiddhiOnDemandQueryContext(siddhiAppContext, queryName, onDemandQueryString);
onCondition = Expression.value(true);
return getOnDemandQueryRuntime(onDemandQuery, tableMap, windowMap, metaPosition, lockWrapper, metaStreamEvent, inserIntoStreamt, onCondition, siddhiQueryContext);
case DELETE:
DeleteStream deleteStream = (DeleteStream) onDemandQuery.getOutputStream();
queryName = "store_delete_query_" + deleteStream.getId();
siddhiQueryContext = new SiddhiOnDemandQueryContext(siddhiAppContext, queryName, onDemandQueryString);
onCondition = deleteStream.getOnDeleteExpression();
return getOnDemandQueryRuntime(onDemandQuery, tableMap, windowMap, metaPosition, lockWrapper, metaStreamEvent, deleteStream, onCondition, siddhiQueryContext);
case UPDATE:
UpdateStream outputStream = (UpdateStream) onDemandQuery.getOutputStream();
queryName = "store_update_query_" + outputStream.getId();
siddhiQueryContext = new SiddhiOnDemandQueryContext(siddhiAppContext, queryName, onDemandQueryString);
onCondition = outputStream.getOnUpdateExpression();
return getOnDemandQueryRuntime(onDemandQuery, tableMap, windowMap, metaPosition, lockWrapper, metaStreamEvent, outputStream, onCondition, siddhiQueryContext);
case UPDATE_OR_INSERT:
UpdateOrInsertStream onDemandQueryOutputStream = (UpdateOrInsertStream) onDemandQuery.getOutputStream();
queryName = "store_update_or_insert_query_" + onDemandQueryOutputStream.getId();
siddhiQueryContext = new SiddhiOnDemandQueryContext(siddhiAppContext, queryName, onDemandQueryString);
onCondition = onDemandQueryOutputStream.getOnUpdateExpression();
return getOnDemandQueryRuntime(onDemandQuery, tableMap, windowMap, metaPosition, lockWrapper, metaStreamEvent, onDemandQueryOutputStream, onCondition, siddhiQueryContext);
default:
return null;
}
}
use of io.siddhi.query.api.expression.Expression in project siddhi by wso2.
the class AggregationParser method populateFinalBaseAggregators.
private static boolean populateFinalBaseAggregators(Map<String, Table> tableMap, List<VariableExpressionExecutor> incomingVariableExpressionExecutors, MetaStreamEvent incomingMetaStreamEvent, List<ExpressionExecutor> incomingExpressionExecutors, List<IncrementalAttributeAggregator> incrementalAttributeAggregators, SiddhiQueryContext siddhiQueryContext, List<Expression> finalBaseAggregators) {
boolean isOptimisedLookup = true;
List<Attribute> finalBaseAttributes = new ArrayList<>();
for (IncrementalAttributeAggregator incrementalAttributeAggregator : incrementalAttributeAggregators) {
Attribute[] baseAttributes = incrementalAttributeAggregator.getBaseAttributes();
Expression[] baseAttributeInitialValues = incrementalAttributeAggregator.getBaseAttributeInitialValues();
Expression[] baseAggregators = incrementalAttributeAggregator.getBaseAggregators();
if (baseAggregators.length > 1) {
isOptimisedLookup = false;
}
for (int i = 0; i < baseAttributes.length; i++) {
validateBaseAggregators(incrementalAttributeAggregators, incrementalAttributeAggregator, baseAttributes, baseAttributeInitialValues, baseAggregators, i);
if (!finalBaseAttributes.contains(baseAttributes[i])) {
finalBaseAttributes.add(baseAttributes[i]);
finalBaseAggregators.add(baseAggregators[i]);
incomingMetaStreamEvent.addOutputData(baseAttributes[i]);
incomingExpressionExecutors.add(ExpressionParser.parseExpression(baseAttributeInitialValues[i], incomingMetaStreamEvent, 0, tableMap, incomingVariableExpressionExecutors, false, 0, ProcessingMode.BATCH, false, siddhiQueryContext));
}
}
}
return isOptimisedLookup;
}
use of io.siddhi.query.api.expression.Expression in project siddhi by wso2.
the class AggregationParser method parse.
public static AggregationRuntime parse(AggregationDefinition aggregationDefinition, SiddhiAppContext siddhiAppContext, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder) {
// set timeZone for aggregation
String timeZone = getTimeZone(siddhiAppContext);
boolean isDebugEnabled = log.isDebugEnabled();
boolean isPersistedAggregation = false;
boolean isReadOnly = false;
if (!validateTimeZone(timeZone)) {
throw new SiddhiAppCreationException("Given timeZone '" + timeZone + "' for aggregations is invalid. Please provide a valid time zone.");
}
// get aggregation name
String aggregatorName = aggregationDefinition.getId();
if (isDebugEnabled) {
log.debug("Incremental aggregation initialization process started for aggregation " + aggregatorName);
}
Annotation aggregationProperties = AnnotationHelper.getAnnotation(ANNOTATION_PERSISTED_AGGREGATION, aggregationDefinition.getAnnotations());
if (aggregationProperties != null) {
String persistedAggregationMode = aggregationProperties.getElement(ANNOTATION_ELEMENT_ENABLE);
isPersistedAggregation = persistedAggregationMode == null || Boolean.parseBoolean(persistedAggregationMode);
String readOnlyMode = aggregationProperties.getElement(ANNOTATION_ELEMENT_IS_READ_ONLY);
isReadOnly = Boolean.parseBoolean(readOnlyMode);
}
if (isPersistedAggregation) {
aggregationDefinition.getSelector().getSelectionList().stream().forEach(outputAttribute -> {
if (outputAttribute.getExpression() instanceof AttributeFunction && ((AttributeFunction) outputAttribute.getExpression()).getName().equals("distinctCount")) {
throw new SiddhiAppCreationException("Aggregation function 'distinctCount' does not supported " + "with persisted aggregation type please use default incremental aggregation");
}
});
}
if (isDebugEnabled) {
log.debug("Aggregation mode is defined as " + (isPersistedAggregation ? "persisted" : "inMemory") + " for aggregation " + aggregatorName);
}
if (aggregationDefinition.getTimePeriod() == null) {
throw new SiddhiAppCreationException("Aggregation Definition '" + aggregationDefinition.getId() + "'s timePeriod is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (aggregationDefinition.getSelector() == null) {
throw new SiddhiAppCreationException("Aggregation Definition '" + aggregationDefinition.getId() + "'s selection is not defined. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (streamDefinitionMap.get(aggregationDefinition.getBasicSingleInputStream().getStreamId()) == null) {
throw new SiddhiAppCreationException("Stream " + aggregationDefinition.getBasicSingleInputStream().getStreamId() + " has not been defined");
}
// Check if the user defined primary keys exists for @store annotation on aggregation if yes, an error is
// is thrown
Element userDefinedPrimaryKey = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_PRIMARY_KEY, null, aggregationDefinition.getAnnotations());
if (userDefinedPrimaryKey != null) {
throw new SiddhiAppCreationException("Aggregation Tables have predefined primary key, but found '" + userDefinedPrimaryKey.getValue() + "' primary key defined though annotation.");
}
try {
List<VariableExpressionExecutor> incomingVariableExpressionExecutors = new ArrayList<>();
SiddhiQueryContext siddhiQueryContext = new SiddhiQueryContext(siddhiAppContext, aggregatorName);
StreamRuntime streamRuntime = InputStreamParser.parse(aggregationDefinition.getBasicSingleInputStream(), null, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, windowMap, aggregationMap, incomingVariableExpressionExecutors, false, siddhiQueryContext);
// Get original meta for later use.
MetaStreamEvent incomingMetaStreamEvent = (MetaStreamEvent) streamRuntime.getMetaComplexEvent();
// Create new meta stream event.
// This must hold the timestamp, group by attributes (if given) and the incremental attributes, in
// onAfterWindowData array
// Example format: AGG_TIMESTAMP, groupByAttribute1, groupByAttribute2, AGG_incAttribute1, AGG_incAttribute2
// AGG_incAttribute1, AGG_incAttribute2 would have the same attribute names as in
// finalListOfIncrementalAttributes
// To enter data as onAfterWindowData
incomingMetaStreamEvent.initializeOnAfterWindowData();
// List of all aggregationDurations
List<TimePeriod.Duration> aggregationDurations = getSortedPeriods(aggregationDefinition.getTimePeriod(), isPersistedAggregation);
// Incoming executors will be executors for timestamp, externalTimestamp(if used),
List<ExpressionExecutor> incomingExpressionExecutors = new ArrayList<>();
List<IncrementalAttributeAggregator> incrementalAttributeAggregators = new ArrayList<>();
// group by attributes (if given) and the incremental attributes expression executors
List<Variable> groupByVariableList = aggregationDefinition.getSelector().getGroupByList();
// Expressions to get final aggregate outputs. e.g for avg the expression is Divide expression with
// AGG_SUM/ AGG_COUNT
List<Expression> outputExpressions = new ArrayList<>();
boolean isProcessingOnExternalTime = aggregationDefinition.getAggregateAttribute() != null;
boolean isGroupBy = aggregationDefinition.getSelector().getGroupByList().size() != 0;
final boolean isDistributed;
ConfigManager configManager = siddhiAppContext.getSiddhiContext().getConfigManager();
final String shardId = configManager.extractProperty("shardId");
boolean enablePartitioning = false;
// check if the setup is Active Active(distributed deployment) by checking availability of partitionById
// config
Annotation partitionById = AnnotationHelper.getAnnotation(ANNOTATION_PARTITION_BY_ID, aggregationDefinition.getAnnotations());
if (partitionById != null) {
String enableElement = partitionById.getElement(ANNOTATION_ELEMENT_ENABLE);
enablePartitioning = enableElement == null || Boolean.parseBoolean(enableElement);
}
boolean shouldPartitionById = Boolean.parseBoolean(configManager.extractProperty("partitionById"));
if (enablePartitioning || shouldPartitionById) {
if (shardId == null) {
throw new SiddhiAppCreationException("Configuration 'shardId' not provided for @partitionById " + "annotation");
}
isDistributed = true;
} else {
isDistributed = false;
}
if (isDebugEnabled) {
log.debug("Distributed aggregation processing is " + (isDistributed ? "enabled" : "disabled") + " in " + aggregatorName + " aggregation ");
}
populateIncomingAggregatorsAndExecutors(aggregationDefinition, siddhiQueryContext, tableMap, incomingVariableExpressionExecutors, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators, groupByVariableList, outputExpressions, isProcessingOnExternalTime, isDistributed, shardId);
// check if the populateIncomingAggregatorsAndExecutors process has been completed successfully
boolean isLatestEventColAdded = incomingMetaStreamEvent.getOutputData().get(incomingMetaStreamEvent.getOutputData().size() - 1).getName().equals(AGG_LAST_TIMESTAMP_COL);
int baseAggregatorBeginIndex = incomingMetaStreamEvent.getOutputData().size();
List<Expression> finalBaseExpressions = new ArrayList<>();
boolean isOptimisedLookup = populateFinalBaseAggregators(tableMap, incomingVariableExpressionExecutors, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators, siddhiQueryContext, finalBaseExpressions);
if (isDebugEnabled) {
log.debug("Optimised lookup mode is " + (isOptimisedLookup ? "enabled" : "disabled") + " for " + "aggregation " + aggregatorName);
}
// Creating an intermediate stream with aggregated stream and above extracted output variables
StreamDefinition incomingOutputStreamDefinition = StreamDefinition.id(aggregatorName + "_intermediate");
incomingOutputStreamDefinition.setQueryContextStartIndex(aggregationDefinition.getQueryContextStartIndex());
incomingOutputStreamDefinition.setQueryContextEndIndex(aggregationDefinition.getQueryContextEndIndex());
MetaStreamEvent processedMetaStreamEvent = new MetaStreamEvent();
for (Attribute attribute : incomingMetaStreamEvent.getOutputData()) {
incomingOutputStreamDefinition.attribute(attribute.getName(), attribute.getType());
processedMetaStreamEvent.addOutputData(attribute);
}
incomingMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.addInputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
// Executors of processing meta
List<VariableExpressionExecutor> processVariableExpressionExecutors = new ArrayList<>();
Map<TimePeriod.Duration, List<ExpressionExecutor>> processExpressionExecutorsMap = new HashMap<>();
Map<TimePeriod.Duration, List<ExpressionExecutor>> processExpressionExecutorsMapForFind = new HashMap<>();
aggregationDurations.forEach(incrementalDuration -> {
processExpressionExecutorsMap.put(incrementalDuration, constructProcessExpressionExecutors(siddhiQueryContext, tableMap, baseAggregatorBeginIndex, finalBaseExpressions, incomingOutputStreamDefinition, processedMetaStreamEvent, processVariableExpressionExecutors, isProcessingOnExternalTime, incrementalDuration, isDistributed, shardId, isLatestEventColAdded));
processExpressionExecutorsMapForFind.put(incrementalDuration, constructProcessExpressionExecutors(siddhiQueryContext, tableMap, baseAggregatorBeginIndex, finalBaseExpressions, incomingOutputStreamDefinition, processedMetaStreamEvent, processVariableExpressionExecutors, isProcessingOnExternalTime, incrementalDuration, isDistributed, shardId, isLatestEventColAdded));
});
ExpressionExecutor shouldUpdateTimestamp = null;
if (isLatestEventColAdded) {
Expression shouldUpdateTimestampExp = new Variable(AGG_LAST_TIMESTAMP_COL);
shouldUpdateTimestamp = ExpressionParser.parseExpression(shouldUpdateTimestampExp, processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
}
List<ExpressionExecutor> outputExpressionExecutors = outputExpressions.stream().map(expression -> ExpressionParser.parseExpression(expression, processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, isGroupBy, 0, ProcessingMode.BATCH, false, siddhiQueryContext)).collect(Collectors.toList());
// Create group by key generator
Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorMap = new HashMap<>();
aggregationDurations.forEach(incrementalDuration -> {
GroupByKeyGenerator groupByKeyGenerator = null;
if (isProcessingOnExternalTime || isGroupBy) {
List<Expression> groupByExpressionList = new ArrayList<>();
if (isProcessingOnExternalTime) {
Expression externalTimestampExpression = AttributeFunction.function("incrementalAggregator", "getAggregationStartTime", new Variable(AGG_EXTERNAL_TIMESTAMP_COL), new StringConstant(incrementalDuration.name()));
groupByExpressionList.add(externalTimestampExpression);
}
groupByExpressionList.addAll(groupByVariableList.stream().map(groupByVariable -> (Expression) groupByVariable).collect(Collectors.toList()));
groupByKeyGenerator = new GroupByKeyGenerator(groupByExpressionList, processedMetaStreamEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, processVariableExpressionExecutors, siddhiQueryContext);
}
groupByKeyGeneratorMap.put(incrementalDuration, groupByKeyGenerator);
});
// GroupBy for reading
Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorMapForReading = new HashMap<>();
if (isDistributed && !isProcessingOnExternalTime) {
aggregationDurations.forEach(incrementalDuration -> {
List<Expression> groupByExpressionList = new ArrayList<>();
Expression timestampExpression = AttributeFunction.function("incrementalAggregator", "getAggregationStartTime", new Variable(AGG_START_TIMESTAMP_COL), new StringConstant(incrementalDuration.name()));
groupByExpressionList.add(timestampExpression);
if (isGroupBy) {
groupByExpressionList.addAll(groupByVariableList.stream().map(groupByVariable -> (Expression) groupByVariable).collect(Collectors.toList()));
}
GroupByKeyGenerator groupByKeyGenerator = new GroupByKeyGenerator(groupByExpressionList, processedMetaStreamEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, processVariableExpressionExecutors, siddhiQueryContext);
groupByKeyGeneratorMapForReading.put(incrementalDuration, groupByKeyGenerator);
});
} else {
groupByKeyGeneratorMapForReading.putAll(groupByKeyGeneratorMap);
}
// Create new scheduler
EntryValveExecutor entryValveExecutor = new EntryValveExecutor(siddhiAppContext);
LockWrapper lockWrapper = new LockWrapper(aggregatorName);
lockWrapper.setLock(new ReentrantLock());
Scheduler scheduler = SchedulerParser.parse(entryValveExecutor, siddhiQueryContext);
scheduler.init(lockWrapper, aggregatorName);
scheduler.setStreamEventFactory(new StreamEventFactory(processedMetaStreamEvent));
QueryParserHelper.reduceMetaComplexEvent(incomingMetaStreamEvent);
QueryParserHelper.reduceMetaComplexEvent(processedMetaStreamEvent);
QueryParserHelper.updateVariablePosition(incomingMetaStreamEvent, incomingVariableExpressionExecutors);
QueryParserHelper.updateVariablePosition(processedMetaStreamEvent, processVariableExpressionExecutors);
Map<TimePeriod.Duration, Table> aggregationTables = initDefaultTables(aggregatorName, aggregationDurations, processedMetaStreamEvent.getOutputStreamDefinition(), siddhiAppRuntimeBuilder, aggregationDefinition.getAnnotations(), groupByVariableList, isProcessingOnExternalTime, isDistributed);
Map<TimePeriod.Duration, Executor> incrementalExecutorMap = buildIncrementalExecutors(processedMetaStreamEvent, processExpressionExecutorsMap, groupByKeyGeneratorMap, aggregationDurations, aggregationTables, siddhiQueryContext, aggregatorName, shouldUpdateTimestamp, timeZone, isPersistedAggregation, incomingOutputStreamDefinition, isDistributed, shardId, isProcessingOnExternalTime, aggregationDefinition, configManager, groupByVariableList, isReadOnly);
isOptimisedLookup = isOptimisedLookup && aggregationTables.get(aggregationDurations.get(0)) instanceof QueryableProcessor;
List<String> groupByVariablesList = groupByVariableList.stream().map(Variable::getAttributeName).collect(Collectors.toList());
List<OutputAttribute> defaultSelectorList = new ArrayList<>();
if (isOptimisedLookup) {
defaultSelectorList = incomingOutputStreamDefinition.getAttributeList().stream().map((attribute) -> new OutputAttribute(new Variable(attribute.getName()))).collect(Collectors.toList());
}
IncrementalDataPurger incrementalDataPurger = new IncrementalDataPurger();
incrementalDataPurger.init(aggregationDefinition, new StreamEventFactory(processedMetaStreamEvent), aggregationTables, isProcessingOnExternalTime, siddhiQueryContext, aggregationDurations, timeZone, windowMap, aggregationMap);
// Recreate in-memory data from tables
IncrementalExecutorsInitialiser incrementalExecutorsInitialiser = new IncrementalExecutorsInitialiser(aggregationDurations, aggregationTables, incrementalExecutorMap, isDistributed, shardId, siddhiAppContext, processedMetaStreamEvent, tableMap, windowMap, aggregationMap, timeZone, isReadOnly, isPersistedAggregation);
IncrementalExecutor rootIncrementalExecutor = (IncrementalExecutor) incrementalExecutorMap.get(aggregationDurations.get(0));
rootIncrementalExecutor.setScheduler(scheduler);
// Connect entry valve to root incremental executor
entryValveExecutor.setNextExecutor(rootIncrementalExecutor);
QueryParserHelper.initStreamRuntime(streamRuntime, incomingMetaStreamEvent, lockWrapper, aggregatorName);
LatencyTracker latencyTrackerFind = null;
LatencyTracker latencyTrackerInsert = null;
ThroughputTracker throughputTrackerFind = null;
ThroughputTracker throughputTrackerInsert = null;
if (siddhiAppContext.getStatisticsManager() != null) {
latencyTrackerFind = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), METRIC_INFIX_AGGREGATIONS, METRIC_TYPE_FIND);
latencyTrackerInsert = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), METRIC_INFIX_AGGREGATIONS, METRIC_TYPE_INSERT);
throughputTrackerFind = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), METRIC_INFIX_AGGREGATIONS, METRIC_TYPE_FIND);
throughputTrackerInsert = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), METRIC_INFIX_AGGREGATIONS, METRIC_TYPE_INSERT);
}
AggregationRuntime aggregationRuntime = new AggregationRuntime(aggregationDefinition, isProcessingOnExternalTime, isDistributed, aggregationDurations, incrementalExecutorMap, aggregationTables, outputExpressionExecutors, processExpressionExecutorsMapForFind, shouldUpdateTimestamp, groupByKeyGeneratorMapForReading, isOptimisedLookup, defaultSelectorList, groupByVariablesList, isLatestEventColAdded, baseAggregatorBeginIndex, finalBaseExpressions, incrementalDataPurger, incrementalExecutorsInitialiser, ((SingleStreamRuntime) streamRuntime), processedMetaStreamEvent, latencyTrackerFind, throughputTrackerFind, timeZone);
streamRuntime.setCommonProcessor(new IncrementalAggregationProcessor(aggregationRuntime, incomingExpressionExecutors, processedMetaStreamEvent, latencyTrackerInsert, throughputTrackerInsert, siddhiAppContext));
return aggregationRuntime;
} catch (Throwable t) {
ExceptionUtil.populateQueryContext(t, aggregationDefinition, siddhiAppContext);
throw t;
}
}
Aggregations