use of io.siddhi.core.executor.ConstantExpressionExecutor in project siddhi by wso2.
the class AggregationRuntime method compileExpression.
public CompiledCondition compileExpression(Expression expression, Within within, Expression per, List<Variable> queryGroupByList, MatchingMetaInfoHolder matchingMetaInfoHolder, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, Table> tableMap, SiddhiQueryContext siddhiQueryContext) {
String aggregationName = aggregationDefinition.getId();
boolean isOptimisedTableLookup = isOptimisedLookup;
Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions = new HashMap<>();
CompiledCondition withinInMemoryCompileCondition;
CompiledCondition onCompiledCondition;
List<Attribute> additionalAttributes = new ArrayList<>();
// Define additional attribute list
additionalAttributes.add(new Attribute("_START", Attribute.Type.LONG));
additionalAttributes.add(new Attribute("_END", Attribute.Type.LONG));
int lowerGranularitySize = this.activeIncrementalDurations.size() - 1;
List<String> lowerGranularityAttributes = new ArrayList<>();
if (isDistributed) {
// for values calculated in in-memory in the shards
for (int i = 0; i < lowerGranularitySize; i++) {
String attributeName = "_AGG_TIMESTAMP_FILTER_" + i;
additionalAttributes.add(new Attribute(attributeName, Attribute.Type.LONG));
lowerGranularityAttributes.add(attributeName);
}
}
// Get table definition. Table definitions for all the tables used to persist aggregates are similar.
// Therefore it's enough to get the definition from one table.
AbstractDefinition tableDefinition = aggregationTables.get(activeIncrementalDurations.get(0)).getTableDefinition();
boolean isOnDemandQuery = matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvents().length == 1;
// Alter existing meta stream event or create new one if a meta stream doesn't exist
// After calling this method the original MatchingMetaInfoHolder's meta stream event would be altered
// Alter meta info holder to contain stream event and aggregate both when it's a on-demand query
MetaStreamEvent metaStreamEventForTableLookups;
if (isOnDemandQuery) {
metaStreamEventForTableLookups = alterMetaStreamEvent(true, new MetaStreamEvent(), additionalAttributes);
matchingMetaInfoHolder = alterMetaInfoHolderForOnDemandQuery(metaStreamEventForTableLookups, matchingMetaInfoHolder);
} else {
metaStreamEventForTableLookups = alterMetaStreamEvent(false, matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvent(0), additionalAttributes);
}
// Create new MatchingMetaInfoHolder containing newMetaStreamEventWithStartEnd and table meta event
String aggReferenceId = matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvent(1).getInputReferenceId();
String referenceName = aggReferenceId == null ? aggregationName : aggReferenceId;
MetaStreamEvent metaStoreEventForTableLookups = createMetaStoreEvent(tableDefinition, referenceName);
// Create new MatchingMetaInfoHolder containing metaStreamEventForTableLookups and table meta event
MatchingMetaInfoHolder metaInfoHolderForTableLookups = createNewStreamTableMetaInfoHolder(metaStreamEventForTableLookups, metaStoreEventForTableLookups);
// Create per expression executor
ExpressionExecutor perExpressionExecutor;
if (per != null) {
perExpressionExecutor = ExpressionParser.parseExpression(per, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
if (perExpressionExecutor.getReturnType() != Attribute.Type.STRING) {
throw new SiddhiAppCreationException("Query " + siddhiQueryContext.getName() + "'s per value expected a string but found " + perExpressionExecutor.getReturnType(), per.getQueryContextStartIndex(), per.getQueryContextEndIndex());
}
// Additional Per time function verification at compile time if it is a constant
if (perExpressionExecutor instanceof ConstantExpressionExecutor) {
String perValue = ((ConstantExpressionExecutor) perExpressionExecutor).getValue().toString();
try {
normalizeDuration(perValue);
} catch (SiddhiAppValidationException e) {
throw new SiddhiAppValidationException("Aggregation Query's per value is expected to be of a valid time function of the " + "following " + TimePeriod.Duration.SECONDS + ", " + TimePeriod.Duration.MINUTES + ", " + TimePeriod.Duration.HOURS + ", " + TimePeriod.Duration.DAYS + ", " + TimePeriod.Duration.MONTHS + ", " + TimePeriod.Duration.YEARS + ".");
}
}
} else {
throw new SiddhiAppCreationException("Syntax Error: Aggregation join query must contain a `per` " + "definition for granularity");
}
// Create start and end time expression
Expression startEndTimeExpression;
ExpressionExecutor startTimeEndTimeExpressionExecutor;
if (within != null) {
if (within.getTimeRange().size() == 1) {
startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0));
} else {
// within.getTimeRange().size() == 2
startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0), within.getTimeRange().get(1));
}
startTimeEndTimeExpressionExecutor = ExpressionParser.parseExpression(startEndTimeExpression, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
} else {
throw new SiddhiAppCreationException("Syntax Error : Aggregation read query must contain a `within` " + "definition for filtering of aggregation data.");
}
// Create within expression
Expression timeFilterExpression;
if (isProcessingOnExternalTime) {
timeFilterExpression = Expression.variable(AGG_EXTERNAL_TIMESTAMP_COL);
} else {
timeFilterExpression = Expression.variable(AGG_START_TIMESTAMP_COL);
}
Expression withinExpression;
Expression start = Expression.variable(additionalAttributes.get(0).getName());
Expression end = Expression.variable(additionalAttributes.get(1).getName());
Expression compareWithStartTime = Compare.compare(start, Compare.Operator.LESS_THAN_EQUAL, timeFilterExpression);
Expression compareWithEndTime = Compare.compare(timeFilterExpression, Compare.Operator.LESS_THAN, end);
withinExpression = Expression.and(compareWithStartTime, compareWithEndTime);
List<ExpressionExecutor> timestampFilterExecutors = new ArrayList<>();
if (isDistributed) {
for (int i = 0; i < lowerGranularitySize; i++) {
Expression[] expressionArray = new Expression[] { new AttributeFunction("", "currentTimeMillis", null), Expression.value(this.activeIncrementalDurations.get(i + 1).toString()) };
Expression filterExpression = new AttributeFunction("incrementalAggregator", "getAggregationStartTime", expressionArray);
timestampFilterExecutors.add(ExpressionParser.parseExpression(filterExpression, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, false, 0, ProcessingMode.BATCH, false, siddhiQueryContext));
}
}
// Create compile condition per each table used to persist aggregates.
// These compile conditions are used to check whether the aggregates in tables are within the given duration.
// Combine with and on condition for table query
boolean shouldApplyReducedCondition = false;
Expression reducedExpression = null;
// Check if there is no on conditions
if (!(expression instanceof BoolConstant)) {
// For abstract queryable table
AggregationExpressionBuilder aggregationExpressionBuilder = new AggregationExpressionBuilder(expression);
AggregationExpressionVisitor expressionVisitor = new AggregationExpressionVisitor(metaStreamEventForTableLookups.getInputReferenceId(), metaStreamEventForTableLookups.getLastInputDefinition().getAttributeList(), this.tableAttributesNameList);
aggregationExpressionBuilder.build(expressionVisitor);
shouldApplyReducedCondition = expressionVisitor.applyReducedExpression();
reducedExpression = expressionVisitor.getReducedExpression();
}
Expression withinExpressionTable;
if (shouldApplyReducedCondition) {
withinExpressionTable = Expression.and(withinExpression, reducedExpression);
} else {
withinExpressionTable = withinExpression;
}
List<Variable> queryGroupByListCopy = new ArrayList<>(queryGroupByList);
Variable timestampVariable = new Variable(AGG_START_TIMESTAMP_COL);
List<String> queryGroupByNamesList = queryGroupByListCopy.stream().map(Variable::getAttributeName).collect(Collectors.toList());
boolean queryGroupByContainsTimestamp = queryGroupByNamesList.remove(AGG_START_TIMESTAMP_COL);
boolean isQueryGroupBySameAsAggGroupBy = queryGroupByListCopy.isEmpty() || (queryGroupByListCopy.contains(timestampVariable) && queryGroupByNamesList.equals(groupByVariablesList));
List<VariableExpressionExecutor> variableExpExecutorsForTableLookups = new ArrayList<>();
Map<TimePeriod.Duration, CompiledSelection> withinTableCompiledSelection = new HashMap<>();
if (isOptimisedTableLookup) {
Selector selector = Selector.selector();
List<Variable> groupByList = new ArrayList<>();
if (!isQueryGroupBySameAsAggGroupBy) {
if (queryGroupByContainsTimestamp) {
if (isProcessingOnExternalTime) {
groupByList.add(new Variable(AGG_EXTERNAL_TIMESTAMP_COL));
} else {
groupByList.add(new Variable(AGG_START_TIMESTAMP_COL));
}
// Remove timestamp to process the rest
queryGroupByListCopy.remove(timestampVariable);
}
for (Variable queryGroupBy : queryGroupByListCopy) {
String referenceId = queryGroupBy.getStreamId();
if (referenceId == null) {
if (tableAttributesNameList.contains(queryGroupBy.getAttributeName())) {
groupByList.add(queryGroupBy);
}
} else if (referenceId.equalsIgnoreCase(referenceName)) {
groupByList.add(queryGroupBy);
}
}
// If query group bys are based on joining stream
if (groupByList.isEmpty()) {
isQueryGroupBySameAsAggGroupBy = true;
}
}
groupByList.forEach((groupBy) -> groupBy.setStreamId(referenceName));
selector.addGroupByList(groupByList);
List<OutputAttribute> selectorList;
if (!isQueryGroupBySameAsAggGroupBy) {
selectorList = constructSelectorList(isProcessingOnExternalTime, isDistributed, isLatestEventColAdded, baseAggregatorBeginIndex, groupByVariablesList.size(), finalBaseExpressionsList, tableDefinition, groupByList);
} else {
selectorList = defaultSelectorList;
}
for (OutputAttribute outputAttribute : selectorList) {
if (outputAttribute.getExpression() instanceof Variable) {
((Variable) outputAttribute.getExpression()).setStreamId(referenceName);
} else {
for (Expression parameter : ((AttributeFunction) outputAttribute.getExpression()).getParameters()) {
((Variable) parameter).setStreamId(referenceName);
}
}
}
selector.addSelectionList(selectorList);
try {
aggregationTables.entrySet().forEach((durationTableEntry -> {
CompiledSelection compiledSelection = ((QueryableProcessor) durationTableEntry.getValue()).compileSelection(selector, tableDefinition.getAttributeList(), metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups, tableMap, siddhiQueryContext);
withinTableCompiledSelection.put(durationTableEntry.getKey(), compiledSelection);
}));
} catch (SiddhiAppCreationException | SiddhiAppValidationException | QueryableRecordTableException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Aggregation Query optimization failed for aggregation: '" + aggregationName + "'. " + "Creating table lookup query in normal mode. Reason for failure: " + e.getMessage(), e);
}
isOptimisedTableLookup = false;
}
}
for (Map.Entry<TimePeriod.Duration, Table> entry : aggregationTables.entrySet()) {
CompiledCondition withinTableCompileCondition = entry.getValue().compileCondition(withinExpressionTable, metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups, tableMap, siddhiQueryContext);
withinTableCompiledConditions.put(entry.getKey(), withinTableCompileCondition);
}
// Create compile condition for in-memory data.
// This compile condition is used to check whether the running aggregates (in-memory data)
// are within given duration
withinInMemoryCompileCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(), withinExpression, metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups, tableMap, siddhiQueryContext);
// Create compile condition for in-memory data, in case of distributed
// Look at the lower level granularities
Map<TimePeriod.Duration, CompiledCondition> withinTableLowerGranularityCompileCondition = new HashMap<>();
Expression lowerGranularity;
if (isDistributed) {
for (int i = 0; i < lowerGranularitySize; i++) {
if (isProcessingOnExternalTime) {
lowerGranularity = Expression.and(Expression.compare(Expression.variable("AGG_TIMESTAMP"), Compare.Operator.GREATER_THAN_EQUAL, Expression.variable(lowerGranularityAttributes.get(i))), withinExpressionTable);
} else {
if (shouldApplyReducedCondition) {
lowerGranularity = Expression.and(Expression.compare(Expression.variable("AGG_TIMESTAMP"), Compare.Operator.GREATER_THAN_EQUAL, Expression.variable(lowerGranularityAttributes.get(i))), reducedExpression);
} else {
lowerGranularity = Expression.compare(Expression.variable("AGG_TIMESTAMP"), Compare.Operator.GREATER_THAN_EQUAL, Expression.variable(lowerGranularityAttributes.get(i)));
}
}
TimePeriod.Duration duration = this.activeIncrementalDurations.get(i);
String tableName = aggregationName + "_" + duration.toString();
CompiledCondition compiledCondition = tableMap.get(tableName).compileCondition(lowerGranularity, metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups, tableMap, siddhiQueryContext);
withinTableLowerGranularityCompileCondition.put(duration, compiledCondition);
}
}
QueryParserHelper.reduceMetaComplexEvent(metaInfoHolderForTableLookups.getMetaStateEvent());
// On compile condition.
// After finding all the aggregates belonging to within duration, the final on condition (given as
// "on stream1.name == aggregator.nickName ..." in the join query) must be executed on that data.
// This condition is used for that purpose.
onCompiledCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(), expression, matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiQueryContext);
return new IncrementalAggregateCompileCondition(isOnDemandQuery, aggregationName, isProcessingOnExternalTime, isDistributed, activeIncrementalDurations, aggregationTables, outputExpressionExecutors, isOptimisedTableLookup, withinTableCompiledSelection, withinTableCompiledConditions, withinInMemoryCompileCondition, withinTableLowerGranularityCompileCondition, onCompiledCondition, additionalAttributes, perExpressionExecutor, startTimeEndTimeExpressionExecutor, timestampFilterExecutors, aggregateMetaSteamEvent, matchingMetaInfoHolder, metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups);
}
use of io.siddhi.core.executor.ConstantExpressionExecutor in project siddhi by wso2.
the class SelectorParser method parse.
/**
* Parse Selector portion of a query and return corresponding QuerySelector.
*
* @param selector selector to be parsed
* @param outputStream output stream
* @param metaComplexEvent Meta event used to collect execution info of stream associated with query
* @param tableMap Table Map
* @param variableExpressionExecutors variable expression executors
* @param metaPosition helps to identify the meta position of aggregates
* @param processingMode processing mode of the query
* @param outputExpectsExpiredEvents is expired events sent as output
* @param siddhiQueryContext current siddhi query context
* @return QuerySelector
*/
public static QuerySelector parse(Selector selector, OutputStream outputStream, MetaComplexEvent metaComplexEvent, Map<String, Table> tableMap, List<VariableExpressionExecutor> variableExpressionExecutors, int metaPosition, ProcessingMode processingMode, boolean outputExpectsExpiredEvents, SiddhiQueryContext siddhiQueryContext) {
boolean currentOn = false;
boolean expiredOn = false;
String id = null;
if (outputStream.getOutputEventType() == OutputStream.OutputEventType.CURRENT_EVENTS || outputStream.getOutputEventType() == OutputStream.OutputEventType.ALL_EVENTS) {
currentOn = true;
}
if (outputStream.getOutputEventType() == OutputStream.OutputEventType.EXPIRED_EVENTS || outputStream.getOutputEventType() == OutputStream.OutputEventType.ALL_EVENTS) {
expiredOn = true;
}
boolean groupBy = !selector.getGroupByList().isEmpty();
id = outputStream.getId();
containsAggregatorThreadLocal.remove();
QuerySelector querySelector = new QuerySelector(id, selector, currentOn, expiredOn, siddhiQueryContext);
List<AttributeProcessor> attributeProcessors = getAttributeProcessors(selector, id, metaComplexEvent, tableMap, variableExpressionExecutors, outputStream, metaPosition, processingMode, outputExpectsExpiredEvents, groupBy, siddhiQueryContext);
querySelector.setAttributeProcessorList(attributeProcessors, "true".equals(containsAggregatorThreadLocal.get()));
containsAggregatorThreadLocal.remove();
ConditionExpressionExecutor havingCondition = generateHavingExecutor(selector.getHavingExpression(), metaComplexEvent, tableMap, variableExpressionExecutors, siddhiQueryContext);
querySelector.setHavingConditionExecutor(havingCondition, "true".equals(containsAggregatorThreadLocal.get()));
containsAggregatorThreadLocal.remove();
if (!selector.getGroupByList().isEmpty()) {
List<Expression> groupByExpressionList = selector.getGroupByList().stream().map(groupByVariable -> (Expression) groupByVariable).collect(Collectors.toList());
querySelector.setGroupByKeyGenerator(new GroupByKeyGenerator(groupByExpressionList, metaComplexEvent, SiddhiConstants.UNKNOWN_STATE, null, variableExpressionExecutors, siddhiQueryContext));
}
if (!selector.getOrderByList().isEmpty()) {
querySelector.setOrderByEventComparator(new OrderByEventComparator(selector.getOrderByList(), metaComplexEvent, SiddhiConstants.HAVING_STATE, null, variableExpressionExecutors, siddhiQueryContext));
}
if (selector.getLimit() != null) {
ExpressionExecutor expressionExecutor = ExpressionParser.parseExpression((Expression) selector.getLimit(), metaComplexEvent, SiddhiConstants.HAVING_STATE, tableMap, variableExpressionExecutors, false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
containsAggregatorThreadLocal.remove();
querySelector.setLimit(((Number) (((ConstantExpressionExecutor) expressionExecutor).getValue())).longValue());
}
if (selector.getOffset() != null) {
ExpressionExecutor expressionExecutor = ExpressionParser.parseExpression((Expression) selector.getOffset(), metaComplexEvent, SiddhiConstants.HAVING_STATE, tableMap, variableExpressionExecutors, false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
containsAggregatorThreadLocal.remove();
querySelector.setOffset(((Number) (((ConstantExpressionExecutor) expressionExecutor).getValue())).longValue());
}
return querySelector;
}
use of io.siddhi.core.executor.ConstantExpressionExecutor in project siddhi by wso2.
the class JoinInputStreamParser method insertJoinProcessorsAndGetFindable.
private static FindableProcessor insertJoinProcessorsAndGetFindable(JoinProcessor preJoinProcessor, JoinProcessor postJoinProcessor, SingleStreamRuntime streamRuntime, boolean outputExpectsExpiredEvents, InputStream inputStream, SiddhiQueryContext siddhiQueryContext) {
Processor lastProcessor = streamRuntime.getProcessorChain();
Processor prevLastProcessor = null;
boolean containFindable = false;
if (lastProcessor != null) {
containFindable = lastProcessor instanceof FindableProcessor;
while (lastProcessor.getNextProcessor() != null) {
prevLastProcessor = lastProcessor;
lastProcessor = lastProcessor.getNextProcessor();
if (!containFindable) {
containFindable = lastProcessor instanceof FindableProcessor;
}
}
}
if (!containFindable) {
try {
WindowProcessor windowProcessor = new EmptyWindowProcessor();
ExpressionExecutor[] expressionExecutors = new ExpressionExecutor[1];
expressionExecutors[0] = new ConstantExpressionExecutor(0, Attribute.Type.INT);
ConfigReader configReader = siddhiQueryContext.getSiddhiContext().getConfigManager().generateConfigReader("", "lengthBatch");
windowProcessor.initProcessor(((MetaStreamEvent) streamRuntime.getMetaComplexEvent()), expressionExecutors, configReader, outputExpectsExpiredEvents, true, false, inputStream, siddhiQueryContext);
if (lastProcessor != null) {
prevLastProcessor = lastProcessor;
prevLastProcessor.setNextProcessor(windowProcessor);
lastProcessor = windowProcessor;
} else {
lastProcessor = windowProcessor;
}
} catch (Throwable t) {
throw new SiddhiAppCreationException(t);
}
}
if (lastProcessor instanceof FindableProcessor) {
if (prevLastProcessor != null) {
prevLastProcessor.setNextProcessor(preJoinProcessor);
} else {
streamRuntime.setProcessorChain(preJoinProcessor);
}
preJoinProcessor.setNextProcessor(lastProcessor);
lastProcessor.setNextProcessor(postJoinProcessor);
return (FindableProcessor) lastProcessor;
} else {
throw new OperationNotSupportedException("Stream " + ((MetaStreamEvent) streamRuntime.getMetaComplexEvent()).getLastInputDefinition().getId() + "'s last processor " + lastProcessor.getClass().getCanonicalName() + " is not an instance of " + FindableProcessor.class.getCanonicalName() + " hence join cannot be proceed");
}
}
use of io.siddhi.core.executor.ConstantExpressionExecutor in project siddhi by wso2.
the class AggregationParser method initAggregateQueryExecutor.
private static Map<TimePeriod.Duration, Processor> initAggregateQueryExecutor(List<TimePeriod.Duration> aggregationDurations, Map<TimePeriod.Duration, List<ExpressionExecutor>> processExpressionExecutorsMap, StreamDefinition incomingOutputStreamDefinition, boolean isDistributed, String shardID, boolean isProcessingOnExternalTime, SiddhiQueryContext siddhiQueryContext, AggregationDefinition aggregationDefinition, ConfigManager configManager, Map<TimePeriod.Duration, Table> aggregationTables, List<Variable> groupByVariableList) {
Map<TimePeriod.Duration, Processor> cudProcessors = new LinkedHashMap<>();
String datasourceName = AnnotationHelper.getAnnotationElement(SiddhiConstants.NAMESPACE_STORE, "datasource", aggregationDefinition.getAnnotations()).getValue();
if (datasourceName == null || datasourceName.isEmpty()) {
throw new SiddhiAppCreationException("Datasource configuration must be provided inorder to use persisted " + "aggregation mode");
}
Database databaseType = getDatabaseType(configManager, datasourceName);
if (log.isDebugEnabled()) {
log.debug("Database type " + databaseType);
}
SiddhiAppContext cudSiddhiAppContext = new SiddhiAppContext();
SiddhiContext context = new SiddhiContext();
context.setConfigManager(configManager);
cudSiddhiAppContext.setSiddhiContext(context);
StringConstant datasource = new StringConstant(datasourceName);
ConstantExpressionExecutor datasourceExecutor = new ConstantExpressionExecutor(datasource.getValue(), Attribute.Type.STRING);
Expression[] streamHandler;
ExpressionExecutor[] cudStreamProcessorInputVariables;
if (isProcessingOnExternalTime) {
streamHandler = new Expression[7];
} else {
streamHandler = new Expression[5];
}
try {
DBAggregationQueryConfigurationEntry dbAggregationQueryConfigurationEntry = DBAggregationQueryUtil.lookupCurrentQueryConfigurationEntry(databaseType);
if (log.isDebugEnabled()) {
log.debug("CUD queries for aggregation " + aggregationDefinition.getId());
}
for (int i = aggregationDurations.size() - 1; i > 0; i--) {
if (aggregationDurations.get(i).ordinal() >= 3) {
if (log.isDebugEnabled()) {
log.debug(" Initializing cudProcessors for duration " + aggregationDurations.get(i));
}
String databaseSelectQuery = generateDatabaseQuery(processExpressionExecutorsMap.get(aggregationDurations.get(i)), dbAggregationQueryConfigurationEntry, incomingOutputStreamDefinition, isDistributed, shardID, isProcessingOnExternalTime, aggregationTables.get(aggregationDurations.get(i)), aggregationTables.get(aggregationDurations.get(i - 1)), groupByVariableList, aggregationDurations.get(i));
StringConstant selectQuery = new StringConstant(databaseSelectQuery);
if (log.isDebugEnabled()) {
log.debug(selectQuery);
}
ConstantExpressionExecutor selectExecutor = new ConstantExpressionExecutor(selectQuery.getValue(), Attribute.Type.STRING);
Map<Attribute, int[]> cudInputStreamAttributesMap = generateCUDInputStreamAttributes(isProcessingOnExternalTime);
if (isProcessingOnExternalTime) {
cudStreamProcessorInputVariables = new ExpressionExecutor[7];
} else {
cudStreamProcessorInputVariables = new ExpressionExecutor[5];
}
cudStreamProcessorInputVariables[0] = datasourceExecutor;
cudStreamProcessorInputVariables[1] = selectExecutor;
streamHandler[0] = datasource;
streamHandler[1] = selectQuery;
MetaStreamEvent metaStreamEvent = generateCUDMetaStreamEvent(isProcessingOnExternalTime);
StreamDefinition outputStream = new StreamDefinition();
VariableExpressionExecutor variableExpressionExecutor;
int j = 0;
for (Map.Entry<Attribute, int[]> entry : cudInputStreamAttributesMap.entrySet()) {
Attribute attribute = entry.getKey();
Variable timestampVariable = new Variable(attribute.getName());
for (int position : entry.getValue()) {
streamHandler[position + 2] = timestampVariable;
variableExpressionExecutor = new VariableExpressionExecutor(attribute, 0, 0);
variableExpressionExecutor.setPosition(new int[] { 2, j });
cudStreamProcessorInputVariables[position + 2] = variableExpressionExecutor;
}
outputStream.attribute(attribute.getName(), attribute.getType());
j++;
}
StreamFunction cudStreamFunction = new StreamFunction(NAMESPACE_RDBMS, FUNCTION_NAME_CUD, streamHandler);
cudProcessors.put(aggregationDurations.get(i), getCudProcessor(cudStreamFunction, siddhiQueryContext, metaStreamEvent, cudStreamProcessorInputVariables, aggregationDurations.get(i)));
}
}
return cudProcessors;
} catch (CannotLoadConfigurationException e) {
throw new SiddhiAppCreationException("Error occurred while initializing the persisted incremental " + "aggregation. Could not load the db quires for database type " + databaseType);
}
}
use of io.siddhi.core.executor.ConstantExpressionExecutor in project siddhi by wso2.
the class InputParameterValidator method validateExpressionExecutors.
/**
* The method which validates the extension specific parameters of siddhi App with the pattern specified in the
* {@link ParameterOverload} annotation in the extension class
*
* @param objectHavingAnnotation the object which has Extension annotation
* @param attributeExpressionExecutors the executors of each function parameters
* @throws SiddhiAppValidationException SiddhiAppValidation exception
*/
public static void validateExpressionExecutors(Object objectHavingAnnotation, ExpressionExecutor[] attributeExpressionExecutors) throws SiddhiAppValidationException {
Extension annotation = objectHavingAnnotation.getClass().getAnnotation(Extension.class);
if (annotation == null) {
return;
}
ParameterOverload[] parameterOverloads = annotation.parameterOverloads();
Parameter[] parameters = annotation.parameters();
String key = AnnotationHelper.createAnnotationKey(annotation);
// Count the mandatory number of parameters specified in @Extension
int mandatoryCount = 0;
Map<String, Parameter> parameterMap = new HashMap<>();
for (Parameter parameter : parameters) {
if (!parameter.optional()) {
mandatoryCount++;
}
parameterMap.put(parameter.name(), parameter);
}
// Find the parameterOverLoad
ParameterOverload parameterOverload = null;
for (ParameterOverload aParameterOverload : parameterOverloads) {
String[] overloadParameterNames = aParameterOverload.parameterNames();
if (overloadParameterNames.length == attributeExpressionExecutors.length && (overloadParameterNames.length == 0 || !overloadParameterNames[overloadParameterNames.length - 1].equals(REPETITIVE_PARAMETER_NOTATION))) {
boolean isExpectedParameterOverload = true;
for (int i = 0; i < overloadParameterNames.length; i++) {
String overloadParameterName = overloadParameterNames[i];
Parameter parameter = parameterMap.get(overloadParameterName);
boolean supportedReturnType = false;
for (DataType type : parameter.type()) {
if (attributeExpressionExecutors[i].getReturnType().toString().equalsIgnoreCase(type.toString())) {
supportedReturnType = true;
break;
}
}
if (!supportedReturnType) {
isExpectedParameterOverload = false;
break;
}
}
if (isExpectedParameterOverload) {
parameterOverload = aParameterOverload;
break;
}
} else if (overloadParameterNames.length - 1 <= attributeExpressionExecutors.length && overloadParameterNames.length > 0 && overloadParameterNames[overloadParameterNames.length - 1].equals(REPETITIVE_PARAMETER_NOTATION)) {
if (attributeExpressionExecutors.length > 0) {
boolean isExpectedParameterOverload = true;
for (int i = 0; i < attributeExpressionExecutors.length; i++) {
Parameter parameter = null;
String overloadParameterName = null;
if (i < overloadParameterNames.length - 1) {
overloadParameterName = overloadParameterNames[i];
} else {
overloadParameterName = overloadParameterNames[overloadParameterNames.length - 2];
}
parameter = parameterMap.get(overloadParameterName);
boolean supportedReturnType = false;
for (DataType type : parameter.type()) {
if (attributeExpressionExecutors[i].getReturnType().toString().equalsIgnoreCase(type.toString())) {
supportedReturnType = true;
break;
}
}
if (!supportedReturnType) {
isExpectedParameterOverload = false;
break;
}
}
if (isExpectedParameterOverload) {
parameterOverload = aParameterOverload;
break;
}
}
}
}
if (parameterOverload == null) {
if (parameterOverloads.length > 0) {
List<Attribute.Type> returnTypes = new ArrayList<>();
for (ExpressionExecutor expressionExecutor : attributeExpressionExecutors) {
returnTypes.add(expressionExecutor.getReturnType());
}
String formattedParamOverloadString = getSupportedParamOverloads(parameterMap, parameterOverloads);
throw new SiddhiAppValidationException("There is no parameterOverload for '" + key + "' that matches attribute types '" + returnTypes.stream().map(String::valueOf).collect(Collectors.joining(", ", "<", ">")) + "'. Supported parameter overloads are " + formattedParamOverloadString + ".");
} else {
if (mandatoryCount > attributeExpressionExecutors.length) {
throw new SiddhiAppValidationException("The '" + key + "' expects at least " + mandatoryCount + " parameters, but found only " + attributeExpressionExecutors.length + " input parameters.");
}
}
} else {
String[] overloadParameterNames = parameterOverload.parameterNames();
for (int i = 0; i < overloadParameterNames.length; i++) {
String overloadParameterName = overloadParameterNames[i];
Parameter parameter = parameterMap.get(overloadParameterName);
if (parameter != null && !parameter.dynamic() && !(attributeExpressionExecutors[i] instanceof ConstantExpressionExecutor)) {
throw new SiddhiAppValidationException("The '" + key + "' expects input parameter '" + parameter.name() + "' at position '" + i + "' to be static," + " but found a dynamic attribute.");
}
}
}
}
Aggregations