use of org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException in project ballerina by ballerina-lang.
the class SingleInputStreamParser method generateProcessor.
public static Processor generateProcessor(StreamHandler streamHandler, MetaComplexEvent metaEvent, List<VariableExpressionExecutor> variableExpressionExecutors, SiddhiAppContext siddhiAppContext, Map<String, Table> tableMap, boolean supportsBatchProcessing, boolean outputExpectsExpiredEvents, String queryName) {
Expression[] parameters = streamHandler.getParameters();
MetaStreamEvent metaStreamEvent;
int stateIndex = SiddhiConstants.UNKNOWN_STATE;
if (metaEvent instanceof MetaStateEvent) {
stateIndex = ((MetaStateEvent) metaEvent).getStreamEventCount() - 1;
metaStreamEvent = ((MetaStateEvent) metaEvent).getMetaStreamEvent(stateIndex);
} else {
metaStreamEvent = (MetaStreamEvent) metaEvent;
}
ExpressionExecutor[] attributeExpressionExecutors;
if (parameters != null) {
if (parameters.length > 0) {
attributeExpressionExecutors = new ExpressionExecutor[parameters.length];
for (int i = 0, parametersLength = parameters.length; i < parametersLength; i++) {
attributeExpressionExecutors[i] = ExpressionParser.parseExpression(parameters[i], metaEvent, stateIndex, tableMap, variableExpressionExecutors, siddhiAppContext, false, SiddhiConstants.CURRENT, queryName);
}
} else {
List<Attribute> attributeList = metaStreamEvent.getLastInputDefinition().getAttributeList();
int parameterSize = attributeList.size();
attributeExpressionExecutors = new ExpressionExecutor[parameterSize];
for (int i = 0; i < parameterSize; i++) {
attributeExpressionExecutors[i] = ExpressionParser.parseExpression(new Variable(attributeList.get(i).getName()), metaEvent, stateIndex, tableMap, variableExpressionExecutors, siddhiAppContext, false, SiddhiConstants.CURRENT, queryName);
}
}
} else {
attributeExpressionExecutors = new ExpressionExecutor[0];
}
ConfigReader configReader;
if (streamHandler instanceof Filter) {
return new FilterProcessor(attributeExpressionExecutors[0]);
} else if (streamHandler instanceof Window) {
WindowProcessor windowProcessor = (WindowProcessor) SiddhiClassLoader.loadExtensionImplementation((Extension) streamHandler, WindowProcessorExtensionHolder.getInstance(siddhiAppContext));
configReader = siddhiAppContext.getSiddhiContext().getConfigManager().generateConfigReader(((Window) streamHandler).getNamespace(), ((Window) streamHandler).getName());
windowProcessor.initProcessor(metaStreamEvent.getLastInputDefinition(), attributeExpressionExecutors, configReader, siddhiAppContext, outputExpectsExpiredEvents, queryName, streamHandler);
return windowProcessor;
} else if (streamHandler instanceof StreamFunction) {
AbstractStreamProcessor abstractStreamProcessor;
configReader = siddhiAppContext.getSiddhiContext().getConfigManager().generateConfigReader(((StreamFunction) streamHandler).getNamespace(), ((StreamFunction) streamHandler).getName());
if (supportsBatchProcessing) {
try {
abstractStreamProcessor = (StreamProcessor) SiddhiClassLoader.loadExtensionImplementation((Extension) streamHandler, StreamProcessorExtensionHolder.getInstance(siddhiAppContext));
metaStreamEvent.addInputDefinition(abstractStreamProcessor.initProcessor(metaStreamEvent.getLastInputDefinition(), attributeExpressionExecutors, configReader, siddhiAppContext, outputExpectsExpiredEvents, queryName, streamHandler));
return abstractStreamProcessor;
} catch (SiddhiAppCreationException e) {
if (!e.isClassLoadingIssue()) {
ExceptionUtil.populateQueryContext(e, streamHandler, siddhiAppContext);
throw e;
}
}
}
abstractStreamProcessor = (StreamFunctionProcessor) SiddhiClassLoader.loadExtensionImplementation((Extension) streamHandler, StreamFunctionProcessorExtensionHolder.getInstance(siddhiAppContext));
metaStreamEvent.addInputDefinition(abstractStreamProcessor.initProcessor(metaStreamEvent.getLastInputDefinition(), attributeExpressionExecutors, configReader, siddhiAppContext, outputExpectsExpiredEvents, queryName, streamHandler));
return abstractStreamProcessor;
} else {
throw new SiddhiAppCreationException(streamHandler.getClass().getName() + " is not supported", streamHandler.getQueryContextStartIndex(), streamHandler.getQueryContextEndIndex());
}
}
use of org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException in project ballerina by ballerina-lang.
the class DefinitionParserHelper method constructOptionHolder.
private static OptionHolder constructOptionHolder(StreamDefinition streamDefinition, Annotation annotation, org.ballerinalang.siddhi.annotation.Extension extension, String[] supportedDynamicOptions) {
List<String> supportedDynamicOptionList = new ArrayList<>();
if (supportedDynamicOptions != null) {
supportedDynamicOptionList = Arrays.asList(supportedDynamicOptions);
}
Map<String, String> options = new HashMap<String, String>();
Map<String, String> dynamicOptions = new HashMap<String, String>();
for (Element element : annotation.getElements()) {
if (Pattern.matches("(.*?)\\{\\{.*?\\}\\}(.*?)", element.getValue())) {
if (supportedDynamicOptionList.contains(element.getKey())) {
dynamicOptions.put(element.getKey(), element.getValue());
} else {
throw new SiddhiAppCreationException("'" + element.getKey() + "' is not a supported " + "DynamicOption " + "for the Extension '" + extension.namespace() + ":" + extension.name() + "', it only supports following as its DynamicOptions: " + supportedDynamicOptionList, annotation.getQueryContextStartIndex(), annotation.getQueryContextEndIndex());
}
} else {
options.put(element.getKey(), element.getValue());
}
}
return new OptionHolder(streamDefinition, options, dynamicOptions, extension);
}
use of org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException in project ballerina by ballerina-lang.
the class DefinitionParserHelper method getAttributeMappings.
private static AttributesHolder getAttributeMappings(Annotation mapAnnotation, String mapType, StreamDefinition streamDefinition) {
List<Annotation> attributeAnnotations = mapAnnotation.getAnnotations(SiddhiConstants.ANNOTATION_ATTRIBUTES);
DefinitionParserHelper.AttributesHolder attributesHolder = new DefinitionParserHelper.AttributesHolder();
if (attributeAnnotations.size() > 0) {
Map<String, String> elementMap = new HashMap<>();
List<String> elementList = new ArrayList<>();
Boolean attributesNameDefined = null;
for (Element element : attributeAnnotations.get(0).getElements()) {
if (element.getKey() == null) {
if (attributesNameDefined != null && attributesNameDefined) {
throw new SiddhiAppCreationException("Error at '" + mapType + "' defined at stream'" + streamDefinition.getId() + "', some attributes are defined and some are not defined.", element.getQueryContextStartIndex(), element.getQueryContextEndIndex());
}
attributesNameDefined = false;
elementList.add(element.getValue());
} else {
if (attributesNameDefined != null && !attributesNameDefined) {
throw new SiddhiAppCreationException("Error at '" + mapType + "' defined at stream '" + streamDefinition.getId() + "', some attributes are defined and some are not defined.", element.getQueryContextStartIndex(), element.getQueryContextEndIndex());
}
attributesNameDefined = true;
elementMap.put(element.getKey(), element.getValue());
}
}
if (elementMap.size() > 0) {
List<Attribute> attributeList = streamDefinition.getAttributeList();
for (int i = 0, attributeListSize = attributeList.size(); i < attributeListSize; i++) {
Attribute attribute = attributeList.get(i);
String value = elementMap.get(attribute.getName());
if (value == null) {
throw new SiddhiAppCreationException("Error at '" + mapType + "' defined at stream '" + streamDefinition.getId() + "', attribute '" + attribute.getName() + "' is not mapped.", mapAnnotation.getQueryContextStartIndex(), mapAnnotation.getQueryContextEndIndex());
}
assignMapping(attributesHolder, elementMap, i, attribute);
}
} else {
List<Attribute> attributeList = streamDefinition.getAttributeList();
if (elementList.size() != attributeList.size()) {
throw new SiddhiAppCreationException("Error at '" + mapType + "' defined at stream '" + streamDefinition.getId() + "', '" + elementList.size() + "' mapping attributes are " + "provided but expected attributes are '" + attributeList.size() + "'.", mapAnnotation.getQueryContextStartIndex(), mapAnnotation.getQueryContextEndIndex());
}
for (int i = 0; i < attributeList.size(); i++) {
Attribute attribute = attributeList.get(i);
assignMapping(attributesHolder, elementMap, i, attribute);
}
}
}
return attributesHolder;
}
use of org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException in project ballerina by ballerina-lang.
the class DefinitionParserHelper method addEventSource.
public static void addEventSource(StreamDefinition streamDefinition, ConcurrentMap<String, List<Source>> eventSourceMap, SiddhiAppContext siddhiAppContext) {
for (Annotation sourceAnnotation : streamDefinition.getAnnotations()) {
if (SiddhiConstants.ANNOTATION_SOURCE.equalsIgnoreCase(sourceAnnotation.getName())) {
sourceAnnotation = updateAnnotationRef(sourceAnnotation, SiddhiConstants.NAMESPACE_SOURCE, siddhiAppContext);
Annotation mapAnnotation = AnnotationHelper.getAnnotation(SiddhiConstants.ANNOTATION_MAP, sourceAnnotation.getAnnotations());
if (mapAnnotation == null) {
mapAnnotation = Annotation.annotation(SiddhiConstants.ANNOTATION_MAP).element(SiddhiConstants.ANNOTATION_ELEMENT_TYPE, "passThrough");
}
final String sourceType = sourceAnnotation.getElement(SiddhiConstants.ANNOTATION_ELEMENT_TYPE);
final String mapType = mapAnnotation.getElement(SiddhiConstants.ANNOTATION_ELEMENT_TYPE);
if (sourceType != null && mapType != null) {
SourceHandlerManager sourceHandlerManager = siddhiAppContext.getSiddhiContext().getSourceHandlerManager();
SourceHandler sourceHandler = null;
if (sourceHandlerManager != null) {
sourceHandler = sourceHandlerManager.generateSourceHandler();
}
// load input transport extension
Extension sourceExtension = constructExtension(streamDefinition, SiddhiConstants.ANNOTATION_SOURCE, sourceType, sourceAnnotation, SiddhiConstants.NAMESPACE_SOURCE);
Source source = (Source) SiddhiClassLoader.loadExtensionImplementation(sourceExtension, SourceExecutorExtensionHolder.getInstance(siddhiAppContext));
ConfigReader configReader = siddhiAppContext.getSiddhiContext().getConfigManager().generateConfigReader(sourceExtension.getNamespace(), sourceExtension.getName());
// load input mapper extension
Extension mapperExtension = constructExtension(streamDefinition, SiddhiConstants.ANNOTATION_MAP, mapType, sourceAnnotation, SiddhiConstants.NAMESPACE_SOURCE_MAPPER);
SourceMapper sourceMapper = (SourceMapper) SiddhiClassLoader.loadExtensionImplementation(mapperExtension, SourceMapperExecutorExtensionHolder.getInstance(siddhiAppContext));
ConfigReader mapperConfigReader = siddhiAppContext.getSiddhiContext().getConfigManager().generateConfigReader(mapperExtension.getNamespace(), mapperExtension.getName());
validateSourceMapperCompatibility(streamDefinition, sourceType, mapType, source, sourceMapper, sourceAnnotation);
OptionHolder sourceOptionHolder = constructOptionHolder(streamDefinition, sourceAnnotation, source.getClass().getAnnotation(org.ballerinalang.siddhi.annotation.Extension.class), null);
OptionHolder mapOptionHolder = constructOptionHolder(streamDefinition, mapAnnotation, sourceMapper.getClass().getAnnotation(org.ballerinalang.siddhi.annotation.Extension.class), null);
AttributesHolder attributesHolder = getAttributeMappings(mapAnnotation, mapType, streamDefinition);
String[] transportPropertyNames = getTransportPropertyNames(attributesHolder);
try {
source.init(sourceType, sourceOptionHolder, sourceMapper, transportPropertyNames, configReader, mapType, mapOptionHolder, attributesHolder.payloadMappings, attributesHolder.transportMappings, mapperConfigReader, sourceHandler, streamDefinition, siddhiAppContext);
} catch (Throwable t) {
ExceptionUtil.populateQueryContext(t, sourceAnnotation, siddhiAppContext);
throw t;
}
siddhiAppContext.getSnapshotService().addSnapshotable(source.getStreamDefinition().getId(), source);
if (sourceHandlerManager != null) {
sourceHandlerManager.registerSourceHandler(sourceHandler.getElementId(), sourceHandler);
siddhiAppContext.getSnapshotService().addSnapshotable(streamDefinition.getId(), sourceHandler);
}
List<Source> eventSources = eventSourceMap.get(streamDefinition.getId());
if (eventSources == null) {
eventSources = new ArrayList<>();
eventSources.add(source);
eventSourceMap.put(streamDefinition.getId(), eventSources);
} else {
eventSources.add(source);
}
} else {
throw new SiddhiAppCreationException("Both @Sink(type=) and @map(type=) are required.", sourceAnnotation.getQueryContextStartIndex(), sourceAnnotation.getQueryContextEndIndex());
}
}
}
}
use of org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException in project ballerina by ballerina-lang.
the class AggregationRuntime method compileExpression.
public CompiledCondition compileExpression(Expression expression, Within within, Expression per, MatchingMetaInfoHolder matchingMetaInfoHolder, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, Table> tableMap, String queryName, SiddhiAppContext siddhiAppContext) {
Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions = new HashMap<>();
CompiledCondition withinInMemoryCompileCondition;
CompiledCondition onCompiledCondition;
List<Attribute> additionalAttributes = new ArrayList<>();
// Define additional attribute list
additionalAttributes.add(new Attribute("_START", Attribute.Type.LONG));
additionalAttributes.add(new Attribute("_END", Attribute.Type.LONG));
// Get table definition. Table definitions for all the tables used to persist aggregates are similar.
// Therefore it's enough to get the definition from one table.
AbstractDefinition tableDefinition = ((Table) aggregationTables.values().toArray()[0]).getTableDefinition();
// Alter existing meta stream event or create new one if a meta stream doesn't exist
// After calling this method the original MatchingMetaInfoHolder's meta stream event would be altered
MetaStreamEvent newMetaStreamEventWithStartEnd = createNewMetaStreamEventWithStartEnd(matchingMetaInfoHolder, additionalAttributes);
MatchingMetaInfoHolder alteredMatchingMetaInfoHolder = null;
// Alter meta info holder to contain stream event and aggregate both when it's a store query
if (matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvents().length == 1) {
matchingMetaInfoHolder = alterMetaInfoHolderForStoreQuery(newMetaStreamEventWithStartEnd, matchingMetaInfoHolder);
alteredMatchingMetaInfoHolder = matchingMetaInfoHolder;
}
// Create new MatchingMetaInfoHolder containing newMetaStreamEventWithStartEnd and table meta event
MatchingMetaInfoHolder streamTableMetaInfoHolderWithStartEnd = createNewStreamTableMetaInfoHolder(newMetaStreamEventWithStartEnd, tableDefinition);
// Create per expression executor
ExpressionExecutor perExpressionExecutor = ExpressionParser.parseExpression(per, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, queryName);
if (perExpressionExecutor.getReturnType() != Attribute.Type.STRING) {
throw new SiddhiAppCreationException("Query " + queryName + "'s per value expected a string but found " + perExpressionExecutor.getReturnType(), per.getQueryContextStartIndex(), per.getQueryContextEndIndex());
}
// Create within expression
Expression withinExpression;
Expression start = Expression.variable(additionalAttributes.get(0).getName());
Expression end = Expression.variable(additionalAttributes.get(1).getName());
Expression compareWithStartTime = Compare.compare(start, Compare.Operator.LESS_THAN_EQUAL, Expression.variable("AGG_TIMESTAMP"));
Expression compareWithEndTime = Compare.compare(Expression.variable("AGG_TIMESTAMP"), Compare.Operator.LESS_THAN, end);
withinExpression = Expression.and(compareWithStartTime, compareWithEndTime);
// Create start and end time expression
Expression startEndTimeExpression;
if (within.getTimeRange().size() == 1) {
startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0));
} else {
// within.getTimeRange().size() == 2
startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0), within.getTimeRange().get(1));
}
ExpressionExecutor startTimeEndTimeExpressionExecutor = ExpressionParser.parseExpression(startEndTimeExpression, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, queryName);
// These compile conditions are used to check whether the aggregates in tables are within the given duration.
for (Map.Entry<TimePeriod.Duration, Table> entry : aggregationTables.entrySet()) {
CompiledCondition withinTableCompileCondition = entry.getValue().compileCondition(withinExpression, streamTableMetaInfoHolderWithStartEnd, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
withinTableCompiledConditions.put(entry.getKey(), withinTableCompileCondition);
}
// Create compile condition for in-memory data.
// This compile condition is used to check whether the running aggregates (in-memory data)
// are within given duration
withinInMemoryCompileCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), withinExpression, streamTableMetaInfoHolderWithStartEnd, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
// On compile condition.
// After finding all the aggregates belonging to within duration, the final on condition (given as
// "on stream1.name == aggregator.nickName ..." in the join query) must be executed on that data.
// This condition is used for that purpose.
onCompiledCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), expression, matchingMetaInfoHolder, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
return new IncrementalAggregateCompileCondition(withinTableCompiledConditions, withinInMemoryCompileCondition, onCompiledCondition, tableMetaStreamEvent, aggregateMetaSteamEvent, additionalAttributes, alteredMatchingMetaInfoHolder, perExpressionExecutor, startTimeEndTimeExpressionExecutor);
}
Aggregations