use of org.wso2.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.
the class AbstractRecordTable method compileCondition.
@Override
public CompiledCondition compileCondition(Expression condition, MatchingMetaInfoHolder matchingMetaInfoHolder, SiddhiAppContext siddhiAppContext, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, Table> tableMap, String queryName) {
ExpressionBuilder expressionBuilder = new ExpressionBuilder(condition, matchingMetaInfoHolder, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
CompiledCondition compileCondition = compileCondition(expressionBuilder);
Map<String, ExpressionExecutor> expressionExecutorMap = expressionBuilder.getVariableExpressionExecutorMap();
return new RecordStoreCompiledCondition(expressionExecutorMap, compileCondition);
}
use of org.wso2.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.
the class AbstractRecordTable method updateOrAdd.
@Override
public void updateOrAdd(ComplexEventChunk<StateEvent> updateOrAddingEventChunk, CompiledCondition compiledCondition, CompiledUpdateSet compiledUpdateSet, AddingStreamEventExtractor addingStreamEventExtractor) throws ConnectionUnavailableException {
RecordStoreCompiledCondition recordStoreCompiledCondition = ((RecordStoreCompiledCondition) compiledCondition);
RecordTableCompiledUpdateSet recordTableCompiledUpdateSet = (RecordTableCompiledUpdateSet) compiledUpdateSet;
List<Map<String, Object>> updateConditionParameterMaps = new ArrayList<>();
List<Map<String, Object>> updateSetParameterMaps = new ArrayList<>();
List<Object[]> addingRecords = new ArrayList<>();
updateOrAddingEventChunk.reset();
long timestamp = 0L;
while (updateOrAddingEventChunk.hasNext()) {
StateEvent stateEvent = updateOrAddingEventChunk.next();
Map<String, Object> variableMap = new HashMap<>();
for (Map.Entry<String, ExpressionExecutor> entry : recordStoreCompiledCondition.variableExpressionExecutorMap.entrySet()) {
variableMap.put(entry.getKey(), entry.getValue().execute(stateEvent));
}
updateConditionParameterMaps.add(variableMap);
Map<String, Object> variableMapForUpdateSet = new HashMap<>();
for (Map.Entry<String, ExpressionExecutor> entry : recordTableCompiledUpdateSet.getExpressionExecutorMap().entrySet()) {
variableMapForUpdateSet.put(entry.getKey(), entry.getValue().execute(stateEvent));
}
updateSetParameterMaps.add(variableMapForUpdateSet);
addingRecords.add(stateEvent.getStreamEvent(0).getOutputData());
timestamp = stateEvent.getTimestamp();
}
if (recordTableHandler != null) {
recordTableHandler.updateOrAdd(timestamp, recordStoreCompiledCondition.compiledCondition, updateConditionParameterMaps, recordTableCompiledUpdateSet.getUpdateSetMap(), updateSetParameterMaps, addingRecords);
} else {
updateOrAdd(recordStoreCompiledCondition.compiledCondition, updateConditionParameterMaps, recordTableCompiledUpdateSet.getUpdateSetMap(), updateSetParameterMaps, addingRecords);
}
}
use of org.wso2.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.
the class AbstractRecordTable method delete.
@Override
public void delete(ComplexEventChunk<StateEvent> deletingEventChunk, CompiledCondition compiledCondition) throws ConnectionUnavailableException {
RecordStoreCompiledCondition recordStoreCompiledCondition = ((RecordStoreCompiledCondition) compiledCondition);
List<Map<String, Object>> deleteConditionParameterMaps = new ArrayList<>();
deletingEventChunk.reset();
long timestamp = 0L;
while (deletingEventChunk.hasNext()) {
StateEvent stateEvent = deletingEventChunk.next();
Map<String, Object> variableMap = new HashMap<>();
for (Map.Entry<String, ExpressionExecutor> entry : recordStoreCompiledCondition.variableExpressionExecutorMap.entrySet()) {
variableMap.put(entry.getKey(), entry.getValue().execute(stateEvent));
}
deleteConditionParameterMaps.add(variableMap);
timestamp = stateEvent.getTimestamp();
}
if (recordTableHandler != null) {
recordTableHandler.delete(timestamp, deleteConditionParameterMaps, recordStoreCompiledCondition.compiledCondition);
} else {
delete(deleteConditionParameterMaps, recordStoreCompiledCondition.compiledCondition);
}
}
use of org.wso2.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.
the class CollectionOperator method tryUpdate.
@Override
public ComplexEventChunk<StreamEvent> tryUpdate(ComplexEventChunk<StateEvent> updatingOrAddingEventChunk, Object storeEvents, InMemoryCompiledUpdateSet compiledUpdateSet, AddingStreamEventExtractor addingStreamEventExtractor) {
updatingOrAddingEventChunk.reset();
ComplexEventChunk<StreamEvent> failedEventChunk = new ComplexEventChunk<StreamEvent>(updatingOrAddingEventChunk.isBatch());
while (updatingOrAddingEventChunk.hasNext()) {
StateEvent updateOrAddingEvent = updatingOrAddingEventChunk.next();
try {
boolean updated = false;
if (((Collection<StreamEvent>) storeEvents).size() > 0) {
for (StreamEvent storeEvent : ((Collection<StreamEvent>) storeEvents)) {
updateOrAddingEvent.setEvent(storeEventPosition, storeEvent);
if ((Boolean) expressionExecutor.execute(updateOrAddingEvent)) {
for (Map.Entry<Integer, ExpressionExecutor> entry : compiledUpdateSet.getExpressionExecutorMap().entrySet()) {
storeEvent.setOutputData(entry.getValue().execute(updateOrAddingEvent), entry.getKey());
}
updated = true;
}
}
}
if (!updated) {
failedEventChunk.add(addingStreamEventExtractor.getAddingStreamEvent(updateOrAddingEvent));
}
} finally {
updateOrAddingEvent.setEvent(storeEventPosition, null);
}
}
return failedEventChunk;
}
use of org.wso2.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.
the class IncrementalAggregateCompileCondition method find.
public StreamEvent find(StateEvent matchingEvent, AggregationDefinition aggregationDefinition, Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap, Map<TimePeriod.Duration, Table> aggregationTables, List<TimePeriod.Duration> incrementalDurations, List<ExpressionExecutor> baseExecutors, ExpressionExecutor timestampExecutor, List<ExpressionExecutor> outputExpressionExecutors, SiddhiAppContext siddhiAppContext) {
ComplexEventChunk<StreamEvent> complexEventChunkToHoldWithinMatches = new ComplexEventChunk<>(true);
// Retrieve per value
String perValueAsString = perExpressionExecutor.execute(matchingEvent).toString();
TimePeriod.Duration perValue = TimePeriod.Duration.valueOf(perValueAsString.toUpperCase());
if (!incrementalExecutorMap.keySet().contains(perValue)) {
throw new SiddhiAppRuntimeException("The aggregate values for " + perValue.toString() + " granularity cannot be provided since aggregation definition " + aggregationDefinition.getId() + " does not contain " + perValue.toString() + " duration");
}
Table tableForPerDuration = aggregationTables.get(perValue);
Long[] startTimeEndTime = (Long[]) startTimeEndTimeExpressionExecutor.execute(matchingEvent);
if (startTimeEndTime == null) {
throw new SiddhiAppRuntimeException("Start and end times for within duration cannot be retrieved");
}
complexEventPopulater.populateComplexEvent(matchingEvent.getStreamEvent(0), startTimeEndTime);
// Get all the aggregates within the given duration, from table corresponding to "per" duration
StreamEvent withinMatchFromPersistedEvents = tableForPerDuration.find(matchingEvent, withinTableCompiledConditions.get(perValue));
complexEventChunkToHoldWithinMatches.add(withinMatchFromPersistedEvents);
// Optimization step.
// Get the newest and oldest event timestamps from in-memory, and
// check whether at least one of those timestamps fall out of the given time range. If that's the case,
// there's no need to iterate through in-memory data.
long oldestInMemoryEventTimestamp = getOldestInMemoryEventTimestamp(incrementalExecutorMap, incrementalDurations, perValue);
long newestInMemoryEventTimestamp = getNewestInMemoryEventTimestamp(incrementalExecutorMap, incrementalDurations, perValue);
if (requiresAggregatingInMemoryData(newestInMemoryEventTimestamp, oldestInMemoryEventTimestamp, startTimeEndTime)) {
IncrementalDataAggregator incrementalDataAggregator = new IncrementalDataAggregator(incrementalDurations, perValue, baseExecutors, timestampExecutor, tableMetaStreamEvent, siddhiAppContext);
// Aggregate in-memory data and create an event chunk out of it
ComplexEventChunk<StreamEvent> aggregatedInMemoryEventChunk = incrementalDataAggregator.aggregateInMemoryData(incrementalExecutorMap);
// Get the in-memory aggregate data, which is within given duration
StreamEvent withinMatchFromInMemory = ((Operator) inMemoryStoreCompileCondition).find(matchingEvent, aggregatedInMemoryEventChunk, tableEventCloner);
complexEventChunkToHoldWithinMatches.add(withinMatchFromInMemory);
}
// Get the final event chunk from the data which is within given duration. This event chunk contains the values
// in the select clause of an aggregate definition.
ComplexEventChunk<StreamEvent> aggregateSelectionComplexEventChunk = createAggregateSelectionEventChunk(complexEventChunkToHoldWithinMatches, outputExpressionExecutors);
// Execute the on compile condition
return ((Operator) onCompiledCondition).find(matchingEvent, aggregateSelectionComplexEventChunk, aggregateEventCloner);
}
Aggregations