Search in sources :

Example 6 with SiddhiAppRuntimeException

use of io.siddhi.core.exception.SiddhiAppRuntimeException in project siddhi by wso2.

the class IncrementalAggregateCompileCondition method find.

public StreamEvent find(StateEvent matchingEvent, Map<TimePeriod.Duration, Executor> incrementalExecutorMap, Map<TimePeriod.Duration, List<ExpressionExecutor>> aggregateProcessingExecutorsMap, Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorMap, ExpressionExecutor shouldUpdateTimestamp, String timeZone) {
    ComplexEventChunk<StreamEvent> complexEventChunkToHoldWithinMatches = new ComplexEventChunk<>();
    // Create matching event if it is on-demand query
    int additionTimestampAttributesSize = this.timestampFilterExecutors.size() + 2;
    Long[] timestampFilters = new Long[additionTimestampAttributesSize];
    if (matchingEvent.getStreamEvent(0) == null) {
        StreamEvent streamEvent = new StreamEvent(0, additionTimestampAttributesSize, 0);
        matchingEvent.addEvent(0, streamEvent);
    }
    Long[] startTimeEndTime = (Long[]) startTimeEndTimeExpressionExecutor.execute(matchingEvent);
    if (startTimeEndTime == null) {
        throw new SiddhiAppRuntimeException("Start and end times for within duration cannot be retrieved");
    }
    timestampFilters[0] = startTimeEndTime[0];
    timestampFilters[1] = startTimeEndTime[1];
    if (isDistributed) {
        for (int i = 0; i < additionTimestampAttributesSize - 2; i++) {
            timestampFilters[i + 2] = ((Long) this.timestampFilterExecutors.get(i).execute(matchingEvent));
        }
    }
    complexEventPopulater.populateComplexEvent(matchingEvent.getStreamEvent(0), timestampFilters);
    // Get all the aggregates within the given duration, from table corresponding to "per" duration
    // Retrieve per value
    String perValueAsString = perExpressionExecutor.execute(matchingEvent).toString();
    TimePeriod.Duration perValue;
    try {
        // Per time function verification
        perValue = normalizeDuration(perValueAsString);
    } catch (SiddhiAppValidationException e) {
        throw new SiddhiAppRuntimeException("Aggregation Query's per value is expected to be of a valid time function of the " + "following " + TimePeriod.Duration.SECONDS + ", " + TimePeriod.Duration.MINUTES + ", " + TimePeriod.Duration.HOURS + ", " + TimePeriod.Duration.DAYS + ", " + TimePeriod.Duration.MONTHS + ", " + TimePeriod.Duration.YEARS + ".");
    }
    if (!incrementalExecutorMap.keySet().contains(perValue)) {
        throw new SiddhiAppRuntimeException("The aggregate values for " + perValue.toString() + " granularity cannot be provided since aggregation definition " + aggregationName + " does not contain " + perValue.toString() + " duration");
    }
    Table tableForPerDuration = aggregationTableMap.get(perValue);
    StreamEvent withinMatchFromPersistedEvents;
    if (isOptimisedLookup) {
        withinMatchFromPersistedEvents = query(tableForPerDuration, matchingEvent, withinTableCompiledConditions.get(perValue), withinTableCompiledSelection.get(perValue), tableMetaStreamEvent.getLastInputDefinition().getAttributeList().toArray(new Attribute[0]));
    } else {
        withinMatchFromPersistedEvents = tableForPerDuration.find(matchingEvent, withinTableCompiledConditions.get(perValue));
    }
    complexEventChunkToHoldWithinMatches.add(withinMatchFromPersistedEvents);
    // Optimization step.
    long oldestInMemoryEventTimestamp = getOldestInMemoryEventTimestamp(incrementalExecutorMap, activeIncrementalDurations, perValue);
    // If processing on external time, the in-memory data also needs to be queried
    if (isProcessingOnExternalTime || requiresAggregatingInMemoryData(oldestInMemoryEventTimestamp, startTimeEndTime)) {
        if (isDistributed) {
            int perValueIndex = this.activeIncrementalDurations.indexOf(perValue);
            if (perValueIndex != 0) {
                Map<TimePeriod.Duration, CompiledCondition> lowerGranularityLookups = new HashMap<>();
                for (int i = 0; i < perValueIndex; i++) {
                    TimePeriod.Duration key = this.activeIncrementalDurations.get(i);
                    lowerGranularityLookups.put(key, withinTableLowerGranularityCompileCondition.get(key));
                }
                List<StreamEvent> eventChunks = lowerGranularityLookups.entrySet().stream().map((entry) -> {
                    Table table = aggregationTableMap.get(entry.getKey());
                    if (isOptimisedLookup) {
                        return query(table, matchingEvent, entry.getValue(), withinTableCompiledSelection.get(entry.getKey()), tableMetaStreamEvent.getLastInputDefinition().getAttributeList().toArray(new Attribute[0]));
                    } else {
                        return table.find(matchingEvent, entry.getValue());
                    }
                }).filter(Objects::nonNull).collect(Collectors.toList());
                eventChunks.forEach(complexEventChunkToHoldWithinMatches::add);
            }
        } else {
            TimePeriod.Duration rootDuration = activeIncrementalDurations.get(0);
            IncrementalDataAggregator incrementalDataAggregator = new IncrementalDataAggregator(activeIncrementalDurations, perValue, oldestInMemoryEventTimestamp, aggregateProcessingExecutorsMap.get(rootDuration), shouldUpdateTimestamp, groupByKeyGeneratorMap.get(rootDuration) != null, tableMetaStreamEvent, timeZone);
            ComplexEventChunk<StreamEvent> aggregatedInMemoryEventChunk;
            // Aggregate in-memory data and create an event chunk out of it
            aggregatedInMemoryEventChunk = incrementalDataAggregator.aggregateInMemoryData(incrementalExecutorMap);
            // Get the in-memory aggregate data, which is within given duration
            StreamEvent withinMatchFromInMemory = ((Operator) inMemoryStoreCompileCondition).find(matchingEvent, aggregatedInMemoryEventChunk, tableEventCloner);
            complexEventChunkToHoldWithinMatches.add(withinMatchFromInMemory);
        }
    }
    ComplexEventChunk<StreamEvent> processedEvents;
    if (isDistributed || isProcessingOnExternalTime) {
        List<ExpressionExecutor> expressionExecutors = aggregateProcessingExecutorsMap.get(perValue);
        GroupByKeyGenerator groupByKeyGenerator = groupByKeyGeneratorMap.get(perValue);
        OutOfOrderEventsDataAggregator outOfOrderEventsDataAggregator = new OutOfOrderEventsDataAggregator(expressionExecutors, shouldUpdateTimestamp, groupByKeyGenerator, tableMetaStreamEvent);
        processedEvents = outOfOrderEventsDataAggregator.aggregateData(complexEventChunkToHoldWithinMatches);
    } else {
        processedEvents = complexEventChunkToHoldWithinMatches;
    }
    // Get the final event chunk from the data which is within given duration. This event chunk contains the values
    // in the select clause of an aggregate definition.
    ComplexEventChunk<StreamEvent> aggregateSelectionComplexEventChunk = createAggregateSelectionEventChunk(processedEvents, outputExpressionExecutors);
    // Execute the on compile condition
    return ((Operator) onCompiledCondition).find(matchingEvent, aggregateSelectionComplexEventChunk, aggregateEventCloner);
}
Also used : ComplexEventChunk(io.siddhi.core.event.ComplexEventChunk) HashMap(java.util.HashMap) Attribute(io.siddhi.query.api.definition.Attribute) GroupByKeyGenerator(io.siddhi.core.query.selector.GroupByKeyGenerator) SiddhiAppRuntimeException(io.siddhi.core.exception.SiddhiAppRuntimeException) OutOfOrderEventsDataAggregator(io.siddhi.core.aggregation.OutOfOrderEventsDataAggregator) Table(io.siddhi.core.table.Table) VariableExpressionExecutor(io.siddhi.core.executor.VariableExpressionExecutor) ExpressionExecutor(io.siddhi.core.executor.ExpressionExecutor) TimePeriod(io.siddhi.query.api.aggregation.TimePeriod) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) SiddhiAppValidationException(io.siddhi.query.api.exception.SiddhiAppValidationException) Time.normalizeDuration(io.siddhi.query.api.expression.Expression.Time.normalizeDuration) IncrementalDataAggregator(io.siddhi.core.aggregation.IncrementalDataAggregator)

Example 7 with SiddhiAppRuntimeException

use of io.siddhi.core.exception.SiddhiAppRuntimeException in project siddhi by wso2.

the class AbstractQueryableRecordTable method handleCacheExpiry.

public void handleCacheExpiry(CompiledCondition cacheExpiryCompiledCondition, ComplexEventChunk<StateEvent> deleteEventChunk) {
    if (log.isDebugEnabled()) {
        log.debug(siddhiAppContext.getName() + ": CacheExpirer started");
    }
    StateEvent stateEventForCaching = new StateEvent(1, 0);
    StreamEvent loadedDataFromStore;
    readWriteLock.writeLock().lock();
    try {
        if (storeTableSize != -1 && storeSizeLastCheckedTime > siddhiAppContext.getTimestampGenerator().currentTime() - retentionPeriod * 10) {
            if (log.isDebugEnabled()) {
                log.debug(siddhiAppContext.getName() + ": checking size of store table");
            }
            try {
                if (storeTableSize <= maxCacheSize) {
                    AbstractQueryableRecordTable.queryStoreWithoutCheckingCache.set(Boolean.TRUE);
                    try {
                        if (cacheLastReloadTime < siddhiAppContext.getTimestampGenerator().currentTime() + retentionPeriod) {
                            loadedDataFromStore = query(stateEventForCaching, compiledConditionForCaching, compiledSelectionForCaching, outputAttributesForCaching);
                            clearCacheAndReload(loadedDataFromStore);
                            cacheLastReloadTime = siddhiAppContext.getTimestampGenerator().currentTime();
                        }
                    } finally {
                        AbstractQueryableRecordTable.queryStoreWithoutCheckingCache.set(Boolean.FALSE);
                    }
                } else {
                    cacheTable.delete(deleteEventChunk, cacheExpiryCompiledCondition);
                }
            } catch (ConnectionUnavailableException e) {
                throw new SiddhiAppRuntimeException(siddhiAppContext.getName() + ": " + e.getMessage());
            }
        } else {
            try {
                AbstractQueryableRecordTable.queryStoreWithoutCheckingCache.set(Boolean.TRUE);
                try {
                    loadedDataFromStore = query(stateEventForCaching, compiledConditionForCaching, compiledSelectionForCaching, outputAttributesForCaching);
                    storeTableSize = findEventChunkSize(loadedDataFromStore);
                    storeSizeLastCheckedTime = siddhiAppContext.getTimestampGenerator().currentTime();
                } finally {
                    AbstractQueryableRecordTable.queryStoreWithoutCheckingCache.set(Boolean.FALSE);
                }
                if (storeTableSize <= maxCacheSize) {
                    if (cacheLastReloadTime < siddhiAppContext.getTimestampGenerator().currentTime() + retentionPeriod) {
                        clearCacheAndReload(loadedDataFromStore);
                        cacheLastReloadTime = siddhiAppContext.getTimestampGenerator().currentTime();
                    }
                } else {
                    cacheTable.delete(deleteEventChunk, cacheExpiryCompiledCondition);
                }
            } catch (Exception e) {
                throw new SiddhiAppRuntimeException(siddhiAppContext.getName() + ": " + e.getMessage());
            }
        }
        if (log.isDebugEnabled()) {
            log.debug(siddhiAppContext.getName() + ": CacheExpirer ended");
        }
    } finally {
        readWriteLock.writeLock().unlock();
    }
}
Also used : OnDemandQueryRuntimeUtil.executeSelectorAndReturnStreamEvent(io.siddhi.core.util.OnDemandQueryRuntimeUtil.executeSelectorAndReturnStreamEvent) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) SiddhiAppRuntimeException(io.siddhi.core.exception.SiddhiAppRuntimeException) MetaStateEvent(io.siddhi.core.event.state.MetaStateEvent) StateEvent(io.siddhi.core.event.state.StateEvent) ConnectionUnavailableException(io.siddhi.core.exception.ConnectionUnavailableException) SiddhiAppCreationException(io.siddhi.core.exception.SiddhiAppCreationException) SiddhiAppRuntimeException(io.siddhi.core.exception.SiddhiAppRuntimeException) ConnectionUnavailableException(io.siddhi.core.exception.ConnectionUnavailableException)

Example 8 with SiddhiAppRuntimeException

use of io.siddhi.core.exception.SiddhiAppRuntimeException in project siddhi by wso2.

the class MaxIncrementalAttributeAggregator method init.

@Override
public void init(String attributeName, Attribute.Type attributeType) {
    if (attributeName == null) {
        throw new SiddhiAppCreationException("Max incremental attribute aggregation cannot be executed " + "when no parameters are given");
    }
    if (attributeType.equals(Attribute.Type.INT) || attributeType.equals(Attribute.Type.LONG) || attributeType.equals(Attribute.Type.DOUBLE) || attributeType.equals(Attribute.Type.FLOAT)) {
        this.baseAttributes = new Attribute[] { new Attribute("AGG_MAX_".concat(attributeName), attributeType) };
        this.baseAttributesInitialValues = new Expression[] { Expression.variable(attributeName) };
        this.returnType = attributeType;
    } else {
        throw new SiddhiAppRuntimeException("Max aggregation cannot be executed on attribute type " + attributeType.toString());
    }
}
Also used : ReturnAttribute(io.siddhi.annotation.ReturnAttribute) Attribute(io.siddhi.query.api.definition.Attribute) SiddhiAppCreationException(io.siddhi.core.exception.SiddhiAppCreationException) SiddhiAppRuntimeException(io.siddhi.core.exception.SiddhiAppRuntimeException)

Example 9 with SiddhiAppRuntimeException

use of io.siddhi.core.exception.SiddhiAppRuntimeException in project siddhi by wso2.

the class MinIncrementalAttributeAggregator method init.

@Override
public void init(String attributeName, Attribute.Type attributeType) {
    if (attributeName == null) {
        throw new SiddhiAppCreationException("Min incremental attribute aggregation cannot be executed " + "when no parameters are given");
    }
    if (attributeType.equals(Attribute.Type.INT) || attributeType.equals(Attribute.Type.LONG) || attributeType.equals(Attribute.Type.DOUBLE) || attributeType.equals(Attribute.Type.FLOAT)) {
        this.baseAttributes = new Attribute[] { new Attribute("AGG_MIN_".concat(attributeName), attributeType) };
        this.baseAttributesInitialValues = new Expression[] { Expression.variable(attributeName) };
        this.returnType = attributeType;
    } else {
        throw new SiddhiAppRuntimeException("Min aggregation cannot be executed on attribute type " + attributeType.toString());
    }
}
Also used : ReturnAttribute(io.siddhi.annotation.ReturnAttribute) Attribute(io.siddhi.query.api.definition.Attribute) SiddhiAppCreationException(io.siddhi.core.exception.SiddhiAppCreationException) SiddhiAppRuntimeException(io.siddhi.core.exception.SiddhiAppRuntimeException)

Example 10 with SiddhiAppRuntimeException

use of io.siddhi.core.exception.SiddhiAppRuntimeException in project siddhi by wso2.

the class JoinProcessor method execute.

private void execute(ComplexEventChunk complexEventChunk, List<ComplexEventChunk> returnEventChunkList) {
    StateEvent joinStateEvent = new StateEvent(2, 0);
    StreamEvent nextEvent = (StreamEvent) complexEventChunk.getFirst();
    complexEventChunk.clear();
    while (nextEvent != null) {
        StreamEvent streamEvent = nextEvent;
        nextEvent = streamEvent.getNext();
        streamEvent.setNext(null);
        ComplexEvent.Type eventType = streamEvent.getType();
        if (eventType == ComplexEvent.Type.TIMER) {
            continue;
        } else if (eventType == ComplexEvent.Type.RESET) {
            if (!leftJoinProcessor) {
                StateEvent outputStateEvent = joinEventBuilder(null, streamEvent, eventType);
                returnEventChunkList.add(new SelectorTypeComplexEventChunk(new ComplexEventChunk<>(outputStateEvent, outputStateEvent), false));
            } else {
                StateEvent outputStateEvent = joinEventBuilder(streamEvent, null, eventType);
                returnEventChunkList.add(new SelectorTypeComplexEventChunk(new ComplexEventChunk<>(outputStateEvent, outputStateEvent), false));
            }
        } else {
            joinStateEvent.setEvent(matchingStreamIndex, streamEvent);
            StreamEvent foundStreamEvent;
            if (this.isOptimisedQuery) {
                try {
                    foundStreamEvent = query(joinStateEvent);
                } catch (SiddhiAppRuntimeException e) {
                    log.warn("Performing select clause in databases failed due to '" + e.getMessage() + " in query '" + queryName + "' within Siddhi app '" + siddhiAppName + "' hence reverting back to querying only with where clause.", e);
                    this.isOptimisedQuery = false;
                    foundStreamEvent = findableProcessor.find(joinStateEvent, compiledCondition);
                }
            } else {
                foundStreamEvent = findableProcessor.find(joinStateEvent, compiledCondition);
            }
            joinStateEvent.setEvent(matchingStreamIndex, null);
            if (foundStreamEvent == null) {
                if (outerJoinProcessor && !leftJoinProcessor) {
                    StateEvent outputStateEvent = joinEventBuilder(null, streamEvent, eventType);
                    returnEventChunkList.add(new SelectorTypeComplexEventChunk(new ComplexEventChunk<>(outputStateEvent, outputStateEvent), false));
                } else if (outerJoinProcessor && leftJoinProcessor) {
                    StateEvent outputStateEvent = joinEventBuilder(streamEvent, null, eventType);
                    returnEventChunkList.add(new SelectorTypeComplexEventChunk(new ComplexEventChunk<>(outputStateEvent, outputStateEvent), false));
                }
            } else if (!isOptimisedQuery) {
                ComplexEventChunk<ComplexEvent> returnEventChunk = new ComplexEventChunk<>();
                while (foundStreamEvent != null) {
                    StreamEvent nextFoundStreamEvent = foundStreamEvent.getNext();
                    foundStreamEvent.setNext(null);
                    if (!leftJoinProcessor) {
                        returnEventChunk.add(joinEventBuilder(foundStreamEvent, streamEvent, eventType));
                    } else {
                        returnEventChunk.add(joinEventBuilder(streamEvent, foundStreamEvent, eventType));
                    }
                    foundStreamEvent = nextFoundStreamEvent;
                }
                returnEventChunkList.add(new SelectorTypeComplexEventChunk(returnEventChunk, false));
            } else {
                ComplexEventChunk<ComplexEvent> returnEventChunk = new ComplexEventChunk<>();
                while (foundStreamEvent != null) {
                    StreamEvent nextFoundStreamEvent = foundStreamEvent.getNext();
                    StateEvent returnEvent = stateEventFactory.newInstance();
                    returnEvent.setType(eventType);
                    returnEvent.setTimestamp(foundStreamEvent.getTimestamp());
                    Object[] outputData = foundStreamEvent.getOutputData();
                    for (int i = 0; i < outputData.length; i++) {
                        Object data = outputData[i];
                        returnEvent.setOutputData(data, i);
                    }
                    returnEventChunk.add(returnEvent);
                    foundStreamEvent = nextFoundStreamEvent;
                }
                returnEventChunkList.add(new SelectorTypeComplexEventChunk(returnEventChunk, true));
            }
        }
    }
}
Also used : ComplexEvent(io.siddhi.core.event.ComplexEvent) ComplexEventChunk(io.siddhi.core.event.ComplexEventChunk) SelectorTypeComplexEventChunk(io.siddhi.core.query.selector.SelectorTypeComplexEventChunk) StreamEvent(io.siddhi.core.event.stream.StreamEvent) SiddhiAppRuntimeException(io.siddhi.core.exception.SiddhiAppRuntimeException) StateEvent(io.siddhi.core.event.state.StateEvent) SelectorTypeComplexEventChunk(io.siddhi.core.query.selector.SelectorTypeComplexEventChunk)

Aggregations

SiddhiAppRuntimeException (io.siddhi.core.exception.SiddhiAppRuntimeException)19 HashMap (java.util.HashMap)7 MetaStreamEvent (io.siddhi.core.event.stream.MetaStreamEvent)5 StreamEvent (io.siddhi.core.event.stream.StreamEvent)5 Attribute (io.siddhi.query.api.definition.Attribute)5 ReturnAttribute (io.siddhi.annotation.ReturnAttribute)4 SiddhiAppCreationException (io.siddhi.core.exception.SiddhiAppCreationException)4 Expression (io.siddhi.query.api.expression.Expression)4 Map (java.util.Map)4 ComplexEventChunk (io.siddhi.core.event.ComplexEventChunk)3 MetaStateEvent (io.siddhi.core.event.state.MetaStateEvent)3 Set (java.util.Set)3 SiddhiOnDemandQueryContext (io.siddhi.core.config.SiddhiOnDemandQueryContext)2 SiddhiQueryContext (io.siddhi.core.config.SiddhiQueryContext)2 ComplexEvent (io.siddhi.core.event.ComplexEvent)2 StateEvent (io.siddhi.core.event.state.StateEvent)2 ExpressionExecutor (io.siddhi.core.executor.ExpressionExecutor)2 TreeMap (java.util.TreeMap)2 IncrementalDataAggregator (io.siddhi.core.aggregation.IncrementalDataAggregator)1 OutOfOrderEventsDataAggregator (io.siddhi.core.aggregation.OutOfOrderEventsDataAggregator)1