Search in sources :

Example 21 with ExpressionExecutor

use of io.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.

the class AbstractRecordTable method update.

@Override
public void update(ComplexEventChunk<StateEvent> updatingEventChunk, CompiledCondition compiledCondition, CompiledUpdateSet compiledUpdateSet) {
    RecordStoreCompiledCondition recordStoreCompiledCondition = ((RecordStoreCompiledCondition) compiledCondition);
    RecordTableCompiledUpdateSet recordTableCompiledUpdateSet = (RecordTableCompiledUpdateSet) compiledUpdateSet;
    List<Map<String, Object>> updateConditionParameterMaps = new ArrayList<>();
    List<Map<String, Object>> updateSetParameterMaps = new ArrayList<>();
    updatingEventChunk.reset();
    long timestamp = 0L;
    while (updatingEventChunk.hasNext()) {
        StateEvent stateEvent = updatingEventChunk.next();
        Map<String, Object> variableMap = new HashMap<>();
        for (Map.Entry<String, ExpressionExecutor> entry : recordStoreCompiledCondition.variableExpressionExecutorMap.entrySet()) {
            variableMap.put(entry.getKey(), entry.getValue().execute(stateEvent));
        }
        updateConditionParameterMaps.add(variableMap);
        Map<String, Object> variableMapForUpdateSet = new HashMap<>();
        for (Map.Entry<String, ExpressionExecutor> entry : recordTableCompiledUpdateSet.getExpressionExecutorMap().entrySet()) {
            variableMapForUpdateSet.put(entry.getKey(), entry.getValue().execute(stateEvent));
        }
        updateSetParameterMaps.add(variableMapForUpdateSet);
        timestamp = stateEvent.getTimestamp();
    }
    try {
        if (recordTableHandler != null) {
            recordTableHandler.update(timestamp, recordStoreCompiledCondition.getCompiledCondition(), updateConditionParameterMaps, recordTableCompiledUpdateSet.getUpdateSetMap(), updateSetParameterMaps);
        } else {
            update(recordStoreCompiledCondition.getCompiledCondition(), updateConditionParameterMaps, recordTableCompiledUpdateSet.getUpdateSetMap(), updateSetParameterMaps);
        }
    } catch (ConnectionUnavailableException | DatabaseRuntimeException e) {
        onUpdateError(updatingEventChunk, compiledCondition, compiledUpdateSet, e);
    }
}
Also used : VariableExpressionExecutor(io.siddhi.core.executor.VariableExpressionExecutor) ExpressionExecutor(io.siddhi.core.executor.ExpressionExecutor) HashMap(java.util.HashMap) DatabaseRuntimeException(io.siddhi.core.exception.DatabaseRuntimeException) ArrayList(java.util.ArrayList) StateEvent(io.siddhi.core.event.state.StateEvent) HashMap(java.util.HashMap) Map(java.util.Map) ConnectionUnavailableException(io.siddhi.core.exception.ConnectionUnavailableException)

Example 22 with ExpressionExecutor

use of io.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.

the class AbstractRecordTable method delete.

@Override
public void delete(ComplexEventChunk<StateEvent> deletingEventChunk, CompiledCondition compiledCondition) {
    RecordStoreCompiledCondition recordStoreCompiledCondition = ((RecordStoreCompiledCondition) compiledCondition);
    List<Map<String, Object>> deleteConditionParameterMaps = new ArrayList<>();
    deletingEventChunk.reset();
    long timestamp = 0L;
    while (deletingEventChunk.hasNext()) {
        StateEvent stateEvent = deletingEventChunk.next();
        Map<String, Object> variableMap = new HashMap<>();
        for (Map.Entry<String, ExpressionExecutor> entry : recordStoreCompiledCondition.variableExpressionExecutorMap.entrySet()) {
            variableMap.put(entry.getKey(), entry.getValue().execute(stateEvent));
        }
        deleteConditionParameterMaps.add(variableMap);
        timestamp = stateEvent.getTimestamp();
    }
    try {
        if (recordTableHandler != null) {
            recordTableHandler.delete(timestamp, deleteConditionParameterMaps, recordStoreCompiledCondition.getCompiledCondition());
        } else {
            delete(deleteConditionParameterMaps, recordStoreCompiledCondition.getCompiledCondition());
        }
    } catch (ConnectionUnavailableException | DatabaseRuntimeException e) {
        onDeleteError(deletingEventChunk, compiledCondition, e);
    }
}
Also used : VariableExpressionExecutor(io.siddhi.core.executor.VariableExpressionExecutor) ExpressionExecutor(io.siddhi.core.executor.ExpressionExecutor) HashMap(java.util.HashMap) DatabaseRuntimeException(io.siddhi.core.exception.DatabaseRuntimeException) ArrayList(java.util.ArrayList) StateEvent(io.siddhi.core.event.state.StateEvent) HashMap(java.util.HashMap) Map(java.util.Map) ConnectionUnavailableException(io.siddhi.core.exception.ConnectionUnavailableException)

Example 23 with ExpressionExecutor

use of io.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.

the class IncrementalAggregateCompileCondition method find.

public StreamEvent find(StateEvent matchingEvent, Map<TimePeriod.Duration, Executor> incrementalExecutorMap, Map<TimePeriod.Duration, List<ExpressionExecutor>> aggregateProcessingExecutorsMap, Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorMap, ExpressionExecutor shouldUpdateTimestamp, String timeZone) {
    ComplexEventChunk<StreamEvent> complexEventChunkToHoldWithinMatches = new ComplexEventChunk<>();
    // Create matching event if it is on-demand query
    int additionTimestampAttributesSize = this.timestampFilterExecutors.size() + 2;
    Long[] timestampFilters = new Long[additionTimestampAttributesSize];
    if (matchingEvent.getStreamEvent(0) == null) {
        StreamEvent streamEvent = new StreamEvent(0, additionTimestampAttributesSize, 0);
        matchingEvent.addEvent(0, streamEvent);
    }
    Long[] startTimeEndTime = (Long[]) startTimeEndTimeExpressionExecutor.execute(matchingEvent);
    if (startTimeEndTime == null) {
        throw new SiddhiAppRuntimeException("Start and end times for within duration cannot be retrieved");
    }
    timestampFilters[0] = startTimeEndTime[0];
    timestampFilters[1] = startTimeEndTime[1];
    if (isDistributed) {
        for (int i = 0; i < additionTimestampAttributesSize - 2; i++) {
            timestampFilters[i + 2] = ((Long) this.timestampFilterExecutors.get(i).execute(matchingEvent));
        }
    }
    complexEventPopulater.populateComplexEvent(matchingEvent.getStreamEvent(0), timestampFilters);
    // Get all the aggregates within the given duration, from table corresponding to "per" duration
    // Retrieve per value
    String perValueAsString = perExpressionExecutor.execute(matchingEvent).toString();
    TimePeriod.Duration perValue;
    try {
        // Per time function verification
        perValue = normalizeDuration(perValueAsString);
    } catch (SiddhiAppValidationException e) {
        throw new SiddhiAppRuntimeException("Aggregation Query's per value is expected to be of a valid time function of the " + "following " + TimePeriod.Duration.SECONDS + ", " + TimePeriod.Duration.MINUTES + ", " + TimePeriod.Duration.HOURS + ", " + TimePeriod.Duration.DAYS + ", " + TimePeriod.Duration.MONTHS + ", " + TimePeriod.Duration.YEARS + ".");
    }
    if (!incrementalExecutorMap.keySet().contains(perValue)) {
        throw new SiddhiAppRuntimeException("The aggregate values for " + perValue.toString() + " granularity cannot be provided since aggregation definition " + aggregationName + " does not contain " + perValue.toString() + " duration");
    }
    Table tableForPerDuration = aggregationTableMap.get(perValue);
    StreamEvent withinMatchFromPersistedEvents;
    if (isOptimisedLookup) {
        withinMatchFromPersistedEvents = query(tableForPerDuration, matchingEvent, withinTableCompiledConditions.get(perValue), withinTableCompiledSelection.get(perValue), tableMetaStreamEvent.getLastInputDefinition().getAttributeList().toArray(new Attribute[0]));
    } else {
        withinMatchFromPersistedEvents = tableForPerDuration.find(matchingEvent, withinTableCompiledConditions.get(perValue));
    }
    complexEventChunkToHoldWithinMatches.add(withinMatchFromPersistedEvents);
    // Optimization step.
    long oldestInMemoryEventTimestamp = getOldestInMemoryEventTimestamp(incrementalExecutorMap, activeIncrementalDurations, perValue);
    // If processing on external time, the in-memory data also needs to be queried
    if (isProcessingOnExternalTime || requiresAggregatingInMemoryData(oldestInMemoryEventTimestamp, startTimeEndTime)) {
        if (isDistributed) {
            int perValueIndex = this.activeIncrementalDurations.indexOf(perValue);
            if (perValueIndex != 0) {
                Map<TimePeriod.Duration, CompiledCondition> lowerGranularityLookups = new HashMap<>();
                for (int i = 0; i < perValueIndex; i++) {
                    TimePeriod.Duration key = this.activeIncrementalDurations.get(i);
                    lowerGranularityLookups.put(key, withinTableLowerGranularityCompileCondition.get(key));
                }
                List<StreamEvent> eventChunks = lowerGranularityLookups.entrySet().stream().map((entry) -> {
                    Table table = aggregationTableMap.get(entry.getKey());
                    if (isOptimisedLookup) {
                        return query(table, matchingEvent, entry.getValue(), withinTableCompiledSelection.get(entry.getKey()), tableMetaStreamEvent.getLastInputDefinition().getAttributeList().toArray(new Attribute[0]));
                    } else {
                        return table.find(matchingEvent, entry.getValue());
                    }
                }).filter(Objects::nonNull).collect(Collectors.toList());
                eventChunks.forEach(complexEventChunkToHoldWithinMatches::add);
            }
        } else {
            TimePeriod.Duration rootDuration = activeIncrementalDurations.get(0);
            IncrementalDataAggregator incrementalDataAggregator = new IncrementalDataAggregator(activeIncrementalDurations, perValue, oldestInMemoryEventTimestamp, aggregateProcessingExecutorsMap.get(rootDuration), shouldUpdateTimestamp, groupByKeyGeneratorMap.get(rootDuration) != null, tableMetaStreamEvent, timeZone);
            ComplexEventChunk<StreamEvent> aggregatedInMemoryEventChunk;
            // Aggregate in-memory data and create an event chunk out of it
            aggregatedInMemoryEventChunk = incrementalDataAggregator.aggregateInMemoryData(incrementalExecutorMap);
            // Get the in-memory aggregate data, which is within given duration
            StreamEvent withinMatchFromInMemory = ((Operator) inMemoryStoreCompileCondition).find(matchingEvent, aggregatedInMemoryEventChunk, tableEventCloner);
            complexEventChunkToHoldWithinMatches.add(withinMatchFromInMemory);
        }
    }
    ComplexEventChunk<StreamEvent> processedEvents;
    if (isDistributed || isProcessingOnExternalTime) {
        List<ExpressionExecutor> expressionExecutors = aggregateProcessingExecutorsMap.get(perValue);
        GroupByKeyGenerator groupByKeyGenerator = groupByKeyGeneratorMap.get(perValue);
        OutOfOrderEventsDataAggregator outOfOrderEventsDataAggregator = new OutOfOrderEventsDataAggregator(expressionExecutors, shouldUpdateTimestamp, groupByKeyGenerator, tableMetaStreamEvent);
        processedEvents = outOfOrderEventsDataAggregator.aggregateData(complexEventChunkToHoldWithinMatches);
    } else {
        processedEvents = complexEventChunkToHoldWithinMatches;
    }
    // Get the final event chunk from the data which is within given duration. This event chunk contains the values
    // in the select clause of an aggregate definition.
    ComplexEventChunk<StreamEvent> aggregateSelectionComplexEventChunk = createAggregateSelectionEventChunk(processedEvents, outputExpressionExecutors);
    // Execute the on compile condition
    return ((Operator) onCompiledCondition).find(matchingEvent, aggregateSelectionComplexEventChunk, aggregateEventCloner);
}
Also used : ComplexEventChunk(io.siddhi.core.event.ComplexEventChunk) HashMap(java.util.HashMap) Attribute(io.siddhi.query.api.definition.Attribute) GroupByKeyGenerator(io.siddhi.core.query.selector.GroupByKeyGenerator) SiddhiAppRuntimeException(io.siddhi.core.exception.SiddhiAppRuntimeException) OutOfOrderEventsDataAggregator(io.siddhi.core.aggregation.OutOfOrderEventsDataAggregator) Table(io.siddhi.core.table.Table) VariableExpressionExecutor(io.siddhi.core.executor.VariableExpressionExecutor) ExpressionExecutor(io.siddhi.core.executor.ExpressionExecutor) TimePeriod(io.siddhi.query.api.aggregation.TimePeriod) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) SiddhiAppValidationException(io.siddhi.query.api.exception.SiddhiAppValidationException) Time.normalizeDuration(io.siddhi.query.api.expression.Expression.Time.normalizeDuration) IncrementalDataAggregator(io.siddhi.core.aggregation.IncrementalDataAggregator)

Example 24 with ExpressionExecutor

use of io.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.

the class EventChunkOperator method update.

@Override
public void update(ComplexEventChunk<StateEvent> updatingEventChunk, Object storeEvents, InMemoryCompiledUpdateSet compiledUpdateSet) {
    ComplexEventChunk<StreamEvent> storeEventChunk = (ComplexEventChunk<StreamEvent>) storeEvents;
    updatingEventChunk.reset();
    while (updatingEventChunk.hasNext()) {
        StateEvent updatingEvent = updatingEventChunk.next();
        try {
            storeEventChunk.reset();
            while (storeEventChunk.hasNext()) {
                StreamEvent storeEvent = storeEventChunk.next();
                updatingEvent.setEvent(storeEventPosition, storeEvent);
                if ((Boolean) expressionExecutor.execute(updatingEvent)) {
                    for (Map.Entry<Integer, ExpressionExecutor> entry : compiledUpdateSet.getExpressionExecutorMap().entrySet()) {
                        storeEvent.setOutputData(entry.getValue().execute(updatingEvent), entry.getKey());
                    }
                }
            }
        } finally {
            updatingEvent.setEvent(storeEventPosition, null);
        }
    }
}
Also used : ComplexEventChunk(io.siddhi.core.event.ComplexEventChunk) ExpressionExecutor(io.siddhi.core.executor.ExpressionExecutor) StreamEvent(io.siddhi.core.event.stream.StreamEvent) StateEvent(io.siddhi.core.event.state.StateEvent) Map(java.util.Map)

Example 25 with ExpressionExecutor

use of io.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.

the class IndexOperator method update.

private void update(IndexedEventHolder storeEvents, InMemoryCompiledUpdateSet compiledUpdateSet, StateEvent overwritingOrAddingEvent, ComplexEventChunk<StreamEvent> foundEventChunk) {
    // for cases when indexed attribute is also updated but that not changed
    // to reduce number of passes needed to update the events
    boolean doDeleteUpdate = false;
    boolean fail = false;
    for (Map.Entry<Integer, ExpressionExecutor> entry : compiledUpdateSet.getExpressionExecutorMap().entrySet()) {
        if (doDeleteUpdate || fail) {
            break;
        }
        if (storeEvents.isAttributeIndexed(entry.getKey())) {
            // Todo how much check we need to do before falling back to Delete and then Update
            foundEventChunk.reset();
            Set<Object> keys = null;
            PrimaryKeyReferenceHolder[] primaryKeyReferenceHolders = storeEvents.getPrimaryKeyReferenceHolders();
            if (primaryKeyReferenceHolders != null && primaryKeyReferenceHolders.length == 1 && entry.getKey() == primaryKeyReferenceHolders[0].getPrimaryKeyPosition()) {
                keys = new HashSet<>(storeEvents.getAllPrimaryKeyValues());
            }
            while (foundEventChunk.hasNext()) {
                StreamEvent streamEvent = foundEventChunk.next();
                Object updatingData = entry.getValue().execute(overwritingOrAddingEvent);
                Object storeEventData = streamEvent.getOutputData()[entry.getKey()];
                if (updatingData != null && storeEventData != null && !updatingData.equals(storeEventData)) {
                    doDeleteUpdate = true;
                    if (keys == null || keys.size() == 0) {
                        break;
                    } else {
                        keys.remove(storeEventData);
                        if (!keys.add(updatingData)) {
                            log.error("Update failed for event :" + overwritingOrAddingEvent + ", as there is " + "already an event stored with primary key '" + updatingData + "' at '" + queryName + "'");
                            fail = true;
                            break;
                        }
                    }
                }
            }
        }
    }
    foundEventChunk.reset();
    if (!fail) {
        if (doDeleteUpdate) {
            collectionExecutor.delete(overwritingOrAddingEvent, storeEvents);
            ComplexEventChunk<StreamEvent> toUpdateEventChunk = new ComplexEventChunk<>();
            while (foundEventChunk.hasNext()) {
                StreamEvent streamEvent = foundEventChunk.next();
                foundEventChunk.remove();
                // to make the chained state back to normal
                streamEvent.setNext(null);
                for (Map.Entry<Integer, ExpressionExecutor> entry : compiledUpdateSet.getExpressionExecutorMap().entrySet()) {
                    streamEvent.setOutputData(entry.getValue().execute(overwritingOrAddingEvent), entry.getKey());
                }
                toUpdateEventChunk.add(streamEvent);
            }
            storeEvents.add(toUpdateEventChunk);
        } else {
            StreamEvent first = foundEventChunk.getFirst();
            while (first != null) {
                StreamEvent streamEvent = first;
                handleCachePolicyAttributeUpdate(streamEvent);
                for (Map.Entry<Integer, ExpressionExecutor> entry : compiledUpdateSet.getExpressionExecutorMap().entrySet()) {
                    streamEvent.setOutputData(entry.getValue().execute(overwritingOrAddingEvent), entry.getKey());
                }
                StreamEvent next = first.getNext();
                // to make the chained state back to normal
                first.setNext(null);
                first = next;
            }
        }
    }
}
Also used : ExpressionExecutor(io.siddhi.core.executor.ExpressionExecutor) ComplexEventChunk(io.siddhi.core.event.ComplexEventChunk) StreamEvent(io.siddhi.core.event.stream.StreamEvent) PrimaryKeyReferenceHolder(io.siddhi.core.table.holder.PrimaryKeyReferenceHolder) Map(java.util.Map)

Aggregations

ExpressionExecutor (io.siddhi.core.executor.ExpressionExecutor)53 VariableExpressionExecutor (io.siddhi.core.executor.VariableExpressionExecutor)43 ConstantExpressionExecutor (io.siddhi.core.executor.ConstantExpressionExecutor)26 Map (java.util.Map)22 Attribute (io.siddhi.query.api.definition.Attribute)21 StreamEvent (io.siddhi.core.event.stream.StreamEvent)20 HashMap (java.util.HashMap)20 MetaStreamEvent (io.siddhi.core.event.stream.MetaStreamEvent)19 ComplexEventChunk (io.siddhi.core.event.ComplexEventChunk)14 ArrayList (java.util.ArrayList)13 StateEvent (io.siddhi.core.event.state.StateEvent)12 Variable (io.siddhi.query.api.expression.Variable)11 SiddhiAppCreationException (io.siddhi.core.exception.SiddhiAppCreationException)10 OutputAttribute (io.siddhi.query.api.execution.query.selection.OutputAttribute)10 Expression (io.siddhi.query.api.expression.Expression)10 MetaStateEvent (io.siddhi.core.event.state.MetaStateEvent)8 AbstractDefinition (io.siddhi.query.api.definition.AbstractDefinition)8 AndConditionExpressionExecutor (io.siddhi.core.executor.condition.AndConditionExpressionExecutor)5 ConditionExpressionExecutor (io.siddhi.core.executor.condition.ConditionExpressionExecutor)5 QueryableProcessor (io.siddhi.core.query.processor.stream.window.QueryableProcessor)4