use of org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException in project ballerina by ballerina-lang.
the class AggregationParser method validateBaseAggregators.
private static void validateBaseAggregators(List<IncrementalAttributeAggregator> incrementalAttributeAggregators, IncrementalAttributeAggregator incrementalAttributeAggregator, Attribute[] baseAttributes, Expression[] baseAttributeInitialValues, Expression[] baseAggregators, int i) {
for (int i1 = i; i1 < incrementalAttributeAggregators.size(); i1++) {
IncrementalAttributeAggregator otherAttributeAggregator = incrementalAttributeAggregators.get(i1);
if (otherAttributeAggregator != incrementalAttributeAggregator) {
Attribute[] otherBaseAttributes = otherAttributeAggregator.getBaseAttributes();
Expression[] otherBaseAttributeInitialValues = otherAttributeAggregator.getBaseAttributeInitialValues();
Expression[] otherBaseAggregators = otherAttributeAggregator.getBaseAggregators();
for (int j = 0; j < otherBaseAttributes.length; j++) {
if (baseAttributes[i].equals(otherBaseAttributes[j])) {
if (!baseAttributeInitialValues[i].equals(otherBaseAttributeInitialValues[j])) {
throw new SiddhiAppCreationException("BaseAttributes having same name should " + "be defined with same initial values, but baseAttribute '" + baseAttributes[i] + "' is defined in '" + incrementalAttributeAggregator.getClass().getName() + "' and '" + otherAttributeAggregator.getClass().getName() + "' with different initial values.");
}
if (!baseAggregators[i].equals(otherBaseAggregators[j])) {
throw new SiddhiAppCreationException("BaseAttributes having same name should " + "be defined with same baseAggregators, but baseAttribute '" + baseAttributes[i] + "' is defined in '" + incrementalAttributeAggregator.getClass().getName() + "' and '" + otherAttributeAggregator.getClass().getName() + "' with different baseAggregators.");
}
}
}
}
}
}
use of org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException in project ballerina by ballerina-lang.
the class AggregationParser method setTimeStampTimeZoneExecutors.
private static ExpressionExecutor[] setTimeStampTimeZoneExecutors(AggregationDefinition aggregationDefinition, SiddhiAppContext siddhiAppContext, Map<String, Table> tableMap, List<VariableExpressionExecutor> variableExpressionExecutors, String aggregatorName, MetaStreamEvent metaStreamEvent) {
Expression timestampExpression = aggregationDefinition.getAggregateAttribute();
Expression timeZoneExpression;
ExpressionExecutor timestampExecutor;
ExpressionExecutor timeZoneExecutor;
boolean isSystemTimeBased = false;
// When execution is based on system time, the system's time zone would be used.
if (timestampExpression == null) {
isSystemTimeBased = true;
timestampExpression = AttributeFunction.function("currentTimeMillis", null);
}
timestampExecutor = ExpressionParser.parseExpression(timestampExpression, metaStreamEvent, 0, tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, aggregatorName);
if (timestampExecutor.getReturnType() == Attribute.Type.STRING) {
Expression expression = AttributeFunction.function("incrementalAggregator", "timestampInMilliseconds", timestampExpression);
timestampExecutor = ExpressionParser.parseExpression(expression, metaStreamEvent, 0, tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, aggregatorName);
timeZoneExpression = AttributeFunction.function("incrementalAggregator", "getTimeZone", timestampExpression);
timeZoneExecutor = ExpressionParser.parseExpression(timeZoneExpression, metaStreamEvent, 0, tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, aggregatorName);
} else if (timestampExecutor.getReturnType() == Attribute.Type.LONG) {
if (isSystemTimeBased) {
timeZoneExpression = AttributeFunction.function("incrementalAggregator", "getTimeZone", null);
timeZoneExecutor = ExpressionParser.parseExpression(timeZoneExpression, metaStreamEvent, 0, tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, aggregatorName);
} else {
// If long value is given, it's assumed that the
timeZoneExpression = Expression.value("+00:00");
// time zone is GMT
timeZoneExecutor = ExpressionParser.parseExpression(timeZoneExpression, metaStreamEvent, 0, tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, aggregatorName);
}
} else {
throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s aggregateAttribute expects " + "long or string, but found " + timestampExecutor.getReturnType() + ". " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", timestampExpression.getQueryContextStartIndex(), timestampExpression.getQueryContextEndIndex());
}
return new ExpressionExecutor[] { timestampExecutor, timeZoneExecutor };
}
use of org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException in project ballerina by ballerina-lang.
the class AggregationParser method parse.
public static AggregationRuntime parse(AggregationDefinition aggregationDefinition, SiddhiAppContext siddhiAppContext, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder) {
if (aggregationDefinition == null) {
throw new SiddhiAppCreationException("AggregationDefinition instance is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'");
}
if (aggregationDefinition.getTimePeriod() == null) {
throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s timePeriod is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (aggregationDefinition.getSelector() == null) {
throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s selection is not defined. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (streamDefinitionMap.get(aggregationDefinition.getBasicSingleInputStream().getStreamId()) == null) {
throw new SiddhiAppCreationException("Stream " + aggregationDefinition.getBasicSingleInputStream().getStreamId() + " has not been defined");
}
try {
List<VariableExpressionExecutor> incomingVariableExpressionExecutors = new ArrayList<>();
String aggregatorName = aggregationDefinition.getId();
StreamRuntime streamRuntime = InputStreamParser.parse(aggregationDefinition.getBasicSingleInputStream(), siddhiAppContext, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, windowMap, aggregationMap, incomingVariableExpressionExecutors, null, false, aggregatorName);
// Get original meta for later use.
MetaStreamEvent incomingMetaStreamEvent = (MetaStreamEvent) streamRuntime.getMetaComplexEvent();
// Create new meta stream event.
// This must hold the timestamp, group by attributes (if given) and the incremental attributes, in
// onAfterWindowData array
// Example format: AGG_TIMESTAMP, groupByAttribute1, groupByAttribute2, AGG_incAttribute1, AGG_incAttribute2
// AGG_incAttribute1, AGG_incAttribute2 would have the same attribute names as in
// finalListOfIncrementalAttributes
// To enter data as onAfterWindowData
incomingMetaStreamEvent.initializeAfterWindowData();
List<ExpressionExecutor> incomingExpressionExecutors = new ArrayList<>();
List<IncrementalAttributeAggregator> incrementalAttributeAggregators = new ArrayList<>();
List<Variable> groupByVariableList = aggregationDefinition.getSelector().getGroupByList();
boolean isProcessingOnExternalTime = aggregationDefinition.getAggregateAttribute() != null;
// Expressions to get
List<Expression> outputExpressions = new ArrayList<>();
// final aggregate outputs. e.g avg = sum/count
// Expression executors to get
List<ExpressionExecutor> outputExpressionExecutors = new ArrayList<>();
// final aggregate outputs. e.g avg = sum/count
populateIncomingAggregatorsAndExecutors(aggregationDefinition, siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators, groupByVariableList, outputExpressions);
int baseAggregatorBeginIndex = incomingMetaStreamEvent.getOutputData().size();
List<Expression> finalBaseAggregators = getFinalBaseAggregators(siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators);
StreamDefinition incomingOutputStreamDefinition = StreamDefinition.id("");
incomingOutputStreamDefinition.setQueryContextStartIndex(aggregationDefinition.getQueryContextStartIndex());
incomingOutputStreamDefinition.setQueryContextEndIndex(aggregationDefinition.getQueryContextEndIndex());
MetaStreamEvent processedMetaStreamEvent = new MetaStreamEvent();
for (Attribute attribute : incomingMetaStreamEvent.getOutputData()) {
incomingOutputStreamDefinition.attribute(attribute.getName(), attribute.getType());
processedMetaStreamEvent.addOutputData(attribute);
}
incomingMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.addInputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
// Executors of processing meta
List<VariableExpressionExecutor> processVariableExpressionExecutors = new ArrayList<>();
boolean groupBy = aggregationDefinition.getSelector().getGroupByList().size() != 0;
List<ExpressionExecutor> processExpressionExecutors = constructProcessExpressionExecutors(siddhiAppContext, tableMap, aggregatorName, baseAggregatorBeginIndex, finalBaseAggregators, incomingOutputStreamDefinition, processedMetaStreamEvent, processVariableExpressionExecutors, groupBy);
outputExpressionExecutors.addAll(outputExpressions.stream().map(expression -> ExpressionParser.parseExpression(expression, processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, siddhiAppContext, groupBy, 0, aggregatorName)).collect(Collectors.toList()));
// Create group by key generator
GroupByKeyGenerator groupByKeyGenerator = null;
if (groupBy) {
groupByKeyGenerator = new GroupByKeyGenerator(groupByVariableList, processedMetaStreamEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, processVariableExpressionExecutors, siddhiAppContext, aggregatorName);
}
// Create new scheduler
EntryValveExecutor entryValveExecutor = new EntryValveExecutor(siddhiAppContext);
LockWrapper lockWrapper = new LockWrapper(aggregatorName);
lockWrapper.setLock(new ReentrantLock());
Scheduler scheduler = SchedulerParser.parse(siddhiAppContext.getScheduledExecutorService(), entryValveExecutor, siddhiAppContext);
scheduler.init(lockWrapper, aggregatorName);
scheduler.setStreamEventPool(new StreamEventPool(processedMetaStreamEvent, 10));
QueryParserHelper.reduceMetaComplexEvent(incomingMetaStreamEvent);
QueryParserHelper.reduceMetaComplexEvent(processedMetaStreamEvent);
QueryParserHelper.updateVariablePosition(incomingMetaStreamEvent, incomingVariableExpressionExecutors);
QueryParserHelper.updateVariablePosition(processedMetaStreamEvent, processVariableExpressionExecutors);
List<TimePeriod.Duration> incrementalDurations = getSortedPeriods(aggregationDefinition.getTimePeriod());
Map<TimePeriod.Duration, Table> aggregationTables = initDefaultTables(aggregatorName, incrementalDurations, processedMetaStreamEvent.getOutputStreamDefinition(), siddhiAppRuntimeBuilder, aggregationDefinition.getAnnotations(), groupByVariableList);
int bufferSize = 0;
Element element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_BUFFER_SIZE, null, aggregationDefinition.getAnnotations());
if (element != null) {
try {
bufferSize = Integer.parseInt(element.getValue());
} catch (NumberFormatException e) {
throw new SiddhiAppCreationException(e.getMessage() + ": BufferSize must be an integer");
}
}
if (bufferSize > 0) {
TimePeriod.Duration rootDuration = incrementalDurations.get(0);
if (rootDuration == TimePeriod.Duration.MONTHS || rootDuration == TimePeriod.Duration.YEARS) {
throw new SiddhiAppCreationException("A buffer size greater than 0 can be provided, only when the " + "first duration value is seconds, minutes, hours or days");
}
if (!isProcessingOnExternalTime) {
throw new SiddhiAppCreationException("Buffer size cannot be specified when events are aggregated " + "based on event arrival time.");
// Buffer size is used to process out of order events. However, events would never be out of
// order if they are processed based on event arrival time.
}
} else if (bufferSize < 0) {
throw new SiddhiAppCreationException("Expected a positive integer as the buffer size, but found " + bufferSize + " as the provided value");
}
boolean ignoreEventsOlderThanBuffer = false;
element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_IGNORE_EVENTS_OLDER_THAN_BUFFER, null, aggregationDefinition.getAnnotations());
if (element != null) {
if (element.getValue().equalsIgnoreCase("true")) {
ignoreEventsOlderThanBuffer = true;
} else if (!element.getValue().equalsIgnoreCase("false")) {
throw new SiddhiAppCreationException("IgnoreEventsOlderThanBuffer value must " + "be true or false");
}
}
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap = buildIncrementalExecutors(isProcessingOnExternalTime, processedMetaStreamEvent, processExpressionExecutors, groupByKeyGenerator, bufferSize, ignoreEventsOlderThanBuffer, incrementalDurations, aggregationTables, siddhiAppContext, aggregatorName);
// Recreate in-memory data from tables
RecreateInMemoryData recreateInMemoryData = new RecreateInMemoryData(incrementalDurations, aggregationTables, incrementalExecutorMap, siddhiAppContext, processedMetaStreamEvent, tableMap, windowMap, aggregationMap);
IncrementalExecutor rootIncrementalExecutor = incrementalExecutorMap.get(incrementalDurations.get(0));
rootIncrementalExecutor.setScheduler(scheduler);
// Connect entry valve to root incremental executor
entryValveExecutor.setNextExecutor(rootIncrementalExecutor);
QueryParserHelper.initStreamRuntime(streamRuntime, incomingMetaStreamEvent, lockWrapper, aggregatorName);
LatencyTracker latencyTrackerFind = null;
LatencyTracker latencyTrackerInsert = null;
ThroughputTracker throughputTrackerFind = null;
ThroughputTracker throughputTrackerInsert = null;
if (siddhiAppContext.getStatisticsManager() != null) {
latencyTrackerFind = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
latencyTrackerInsert = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
throughputTrackerFind = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
throughputTrackerInsert = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
}
streamRuntime.setCommonProcessor(new IncrementalAggregationProcessor(rootIncrementalExecutor, incomingExpressionExecutors, processedMetaStreamEvent, latencyTrackerInsert, throughputTrackerInsert, siddhiAppContext));
List<ExpressionExecutor> baseExecutors = cloneExpressionExecutors(processExpressionExecutors);
ExpressionExecutor timestampExecutor = baseExecutors.remove(0);
return new AggregationRuntime(aggregationDefinition, incrementalExecutorMap, aggregationTables, ((SingleStreamRuntime) streamRuntime), entryValveExecutor, incrementalDurations, siddhiAppContext, baseExecutors, timestampExecutor, processedMetaStreamEvent, outputExpressionExecutors, latencyTrackerFind, throughputTrackerFind, recreateInMemoryData);
} catch (Throwable t) {
ExceptionUtil.populateQueryContext(t, aggregationDefinition, siddhiAppContext);
throw t;
}
}
use of org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException in project ballerina by ballerina-lang.
the class CollectionExpressionParser method buildMultiPrimaryKeyExpressionExecutors.
private static Map<String, ExpressionExecutor> buildMultiPrimaryKeyExpressionExecutors(CollectionExpression collectionExpression, MatchingMetaInfoHolder matchingMetaInfoHolder, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, Table> tableMap, SiddhiAppContext siddhiAppContext, String queryName) {
if (collectionExpression instanceof AndMultiPrimaryKeyCollectionExpression) {
CollectionExpression leftCollectionExpression = ((AndMultiPrimaryKeyCollectionExpression) collectionExpression).getLeftCollectionExpression();
CollectionExpression rightCollectionExpression = ((AndMultiPrimaryKeyCollectionExpression) collectionExpression).getRightCollectionExpression();
Map<String, ExpressionExecutor> expressionExecutors = buildMultiPrimaryKeyExpressionExecutors(leftCollectionExpression, matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiAppContext, queryName);
expressionExecutors.putAll(buildMultiPrimaryKeyExpressionExecutors(rightCollectionExpression, matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiAppContext, queryName));
return expressionExecutors;
} else if (collectionExpression instanceof AndCollectionExpression) {
CollectionExpression leftCollectionExpression = ((AndCollectionExpression) collectionExpression).getLeftCollectionExpression();
CollectionExpression rightCollectionExpression = ((AndCollectionExpression) collectionExpression).getLeftCollectionExpression();
Map<String, ExpressionExecutor> expressionExecutors = buildMultiPrimaryKeyExpressionExecutors(leftCollectionExpression, matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiAppContext, queryName);
expressionExecutors.putAll(buildMultiPrimaryKeyExpressionExecutors(rightCollectionExpression, matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiAppContext, queryName));
return expressionExecutors;
} else if (collectionExpression instanceof CompareCollectionExpression) {
if (((CompareCollectionExpression) collectionExpression).getOperator() == Compare.Operator.EQUAL) {
CollectionExpression attributeCollectionExpression = ((CompareCollectionExpression) collectionExpression).getAttributeCollectionExpression();
if (attributeCollectionExpression instanceof AttributeCollectionExpression) {
String attribue = ((AttributeCollectionExpression) attributeCollectionExpression).getAttribute();
CollectionExpression valueCollectionExpression = ((CompareCollectionExpression) collectionExpression).getValueCollectionExpression();
ExpressionExecutor valueExpressionExecutor = ExpressionParser.parseExpression(valueCollectionExpression.getExpression(), matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, queryName);
Map<String, ExpressionExecutor> expressionExecutors = new HashMap<String, ExpressionExecutor>();
expressionExecutors.put(attribue, valueExpressionExecutor);
return expressionExecutors;
} else {
throw new SiddhiAppCreationException("Only attribute EQUAL " + "comparision supported for multiple primary key optimization, " + "but found '" + attributeCollectionExpression.getClass() + "'", collectionExpression.getExpression().getQueryContextStartIndex(), collectionExpression.getExpression().getQueryContextEndIndex());
}
} else {
throw new SiddhiAppCreationException("Only '" + Compare.Operator.EQUAL + "' supported for multiple " + "primary key for multiple primary key optimization, but found '" + ((CompareCollectionExpression) collectionExpression).getOperator() + "'", collectionExpression.getExpression().getQueryContextStartIndex(), collectionExpression.getExpression().getQueryContextEndIndex());
}
} else {
// Attribute Collection
throw new SiddhiAppCreationException("Only 'AND' and '" + Compare.Operator.EQUAL + "' operators are " + "supported for multiple primary key optimization, but found '" + ((CompareCollectionExpression) collectionExpression).getOperator() + "'", collectionExpression.getExpression().getQueryContextStartIndex(), collectionExpression.getExpression().getQueryContextEndIndex());
}
}
use of org.ballerinalang.siddhi.core.exception.SiddhiAppCreationException in project ballerina by ballerina-lang.
the class EventHolderPasser method parse.
public static EventHolder parse(AbstractDefinition tableDefinition, StreamEventPool tableStreamEventPool, SiddhiAppContext siddhiAppContext) {
ZeroStreamEventConverter eventConverter = new ZeroStreamEventConverter();
PrimaryKeyReferenceHolder[] primaryKeyReferenceHolders = null;
Map<String, Integer> indexMetaData = new HashMap<String, Integer>();
// primaryKey.
Annotation primaryKeyAnnotation = AnnotationHelper.getAnnotation(SiddhiConstants.ANNOTATION_PRIMARY_KEY, tableDefinition.getAnnotations());
if (primaryKeyAnnotation != null) {
if (primaryKeyAnnotation.getElements().size() == 0) {
throw new SiddhiAppValidationException(SiddhiConstants.ANNOTATION_PRIMARY_KEY + " annotation " + "contains " + primaryKeyAnnotation.getElements().size() + " element, at '" + tableDefinition.getId() + "'");
}
primaryKeyReferenceHolders = primaryKeyAnnotation.getElements().stream().map(element -> element.getValue().trim()).map(key -> new PrimaryKeyReferenceHolder(key, tableDefinition.getAttributePosition(key))).toArray(PrimaryKeyReferenceHolder[]::new);
}
// indexes.
Annotation indexAnnotation = AnnotationHelper.getAnnotation(SiddhiConstants.ANNOTATION_INDEX, tableDefinition.getAnnotations());
if (indexAnnotation != null) {
if (indexAnnotation.getElements().size() == 0) {
throw new SiddhiAppValidationException(SiddhiConstants.ANNOTATION_INDEX + " annotation contains " + indexAnnotation.getElements().size() + " element");
}
for (Element element : indexAnnotation.getElements()) {
Integer previousValue = indexMetaData.put(element.getValue().trim(), tableDefinition.getAttributePosition(element.getValue().trim()));
if (previousValue != null) {
throw new SiddhiAppCreationException("Multiple " + SiddhiConstants.ANNOTATION_INDEX + " " + "annotations defined with same attribute '" + element.getValue().trim() + "', at '" + tableDefinition.getId() + "'", indexAnnotation.getQueryContextStartIndex(), indexAnnotation.getQueryContextEndIndex());
}
}
}
// not support indexBy.
Annotation indexByAnnotation = AnnotationHelper.getAnnotation(SiddhiConstants.ANNOTATION_INDEX_BY, tableDefinition.getAnnotations());
if (indexByAnnotation != null) {
throw new OperationNotSupportedException(SiddhiConstants.ANNOTATION_INDEX_BY + " annotation is not " + "supported anymore, please use @PrimaryKey or @Index annotations instead," + " at '" + tableDefinition.getId() + "'");
}
if (primaryKeyReferenceHolders != null || indexMetaData.size() > 0) {
boolean isNumeric = false;
if (primaryKeyReferenceHolders != null) {
if (primaryKeyReferenceHolders.length == 1) {
Attribute.Type type = tableDefinition.getAttributeType(primaryKeyReferenceHolders[0].getPrimaryKeyAttribute());
if (type == Attribute.Type.DOUBLE || type == Attribute.Type.FLOAT || type == Attribute.Type.INT || type == Attribute.Type.LONG) {
isNumeric = true;
}
}
}
return new IndexEventHolder(tableStreamEventPool, eventConverter, primaryKeyReferenceHolders, isNumeric, indexMetaData, tableDefinition, siddhiAppContext);
} else {
return new ListEventHolder(tableStreamEventPool, eventConverter);
}
}
Aggregations