use of io.siddhi.core.event.stream.StreamEvent in project siddhi by wso2.
the class UpdateOrInsertReducer method reduceEventsForInsert.
public List<Object[]> reduceEventsForInsert(List<Object[]> failedRecords, Map<String, ExpressionExecutor> inMemorySetExecutors) {
ComplexEventChunk<StreamEvent> toInsertEventChunk = new ComplexEventChunk<>();
StateEvent joinEvent = stateEventFactory.newInstance();
for (Object[] data : failedRecords) {
StreamEvent failedEvent = streamEventFactory.newInstance();
failedEvent.setOutputData(data);
joinEvent.setEvent(streamEventIndex, failedEvent);
boolean updated = false;
toInsertEventChunk.reset();
while (toInsertEventChunk.hasNext()) {
StreamEvent toInsertEvent = toInsertEventChunk.next();
joinEvent.setEvent(storeEventIndex, toInsertEvent);
if ((Boolean) inMemoryCompiledCondition.execute(joinEvent)) {
for (Map.Entry<String, ExpressionExecutor> entry : inMemorySetExecutors.entrySet()) {
toInsertEvent.setOutputData(entry.getValue().execute(failedEvent), attributeMap.get(entry.getKey()));
}
updated = true;
}
}
if (!updated) {
toInsertEventChunk.add(failedEvent);
}
}
List<Object[]> toInsertRecords = new LinkedList<>();
toInsertEventChunk.reset();
while (toInsertEventChunk.hasNext()) {
StreamEvent streamEvent = toInsertEventChunk.next();
toInsertRecords.add(streamEvent.getOutputData());
}
return toInsertRecords;
}
use of io.siddhi.core.event.stream.StreamEvent in project siddhi by wso2.
the class IncrementalAggregateCompileCondition method find.
public StreamEvent find(StateEvent matchingEvent, Map<TimePeriod.Duration, Executor> incrementalExecutorMap, Map<TimePeriod.Duration, List<ExpressionExecutor>> aggregateProcessingExecutorsMap, Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorMap, ExpressionExecutor shouldUpdateTimestamp, String timeZone) {
ComplexEventChunk<StreamEvent> complexEventChunkToHoldWithinMatches = new ComplexEventChunk<>();
// Create matching event if it is on-demand query
int additionTimestampAttributesSize = this.timestampFilterExecutors.size() + 2;
Long[] timestampFilters = new Long[additionTimestampAttributesSize];
if (matchingEvent.getStreamEvent(0) == null) {
StreamEvent streamEvent = new StreamEvent(0, additionTimestampAttributesSize, 0);
matchingEvent.addEvent(0, streamEvent);
}
Long[] startTimeEndTime = (Long[]) startTimeEndTimeExpressionExecutor.execute(matchingEvent);
if (startTimeEndTime == null) {
throw new SiddhiAppRuntimeException("Start and end times for within duration cannot be retrieved");
}
timestampFilters[0] = startTimeEndTime[0];
timestampFilters[1] = startTimeEndTime[1];
if (isDistributed) {
for (int i = 0; i < additionTimestampAttributesSize - 2; i++) {
timestampFilters[i + 2] = ((Long) this.timestampFilterExecutors.get(i).execute(matchingEvent));
}
}
complexEventPopulater.populateComplexEvent(matchingEvent.getStreamEvent(0), timestampFilters);
// Get all the aggregates within the given duration, from table corresponding to "per" duration
// Retrieve per value
String perValueAsString = perExpressionExecutor.execute(matchingEvent).toString();
TimePeriod.Duration perValue;
try {
// Per time function verification
perValue = normalizeDuration(perValueAsString);
} catch (SiddhiAppValidationException e) {
throw new SiddhiAppRuntimeException("Aggregation Query's per value is expected to be of a valid time function of the " + "following " + TimePeriod.Duration.SECONDS + ", " + TimePeriod.Duration.MINUTES + ", " + TimePeriod.Duration.HOURS + ", " + TimePeriod.Duration.DAYS + ", " + TimePeriod.Duration.MONTHS + ", " + TimePeriod.Duration.YEARS + ".");
}
if (!incrementalExecutorMap.keySet().contains(perValue)) {
throw new SiddhiAppRuntimeException("The aggregate values for " + perValue.toString() + " granularity cannot be provided since aggregation definition " + aggregationName + " does not contain " + perValue.toString() + " duration");
}
Table tableForPerDuration = aggregationTableMap.get(perValue);
StreamEvent withinMatchFromPersistedEvents;
if (isOptimisedLookup) {
withinMatchFromPersistedEvents = query(tableForPerDuration, matchingEvent, withinTableCompiledConditions.get(perValue), withinTableCompiledSelection.get(perValue), tableMetaStreamEvent.getLastInputDefinition().getAttributeList().toArray(new Attribute[0]));
} else {
withinMatchFromPersistedEvents = tableForPerDuration.find(matchingEvent, withinTableCompiledConditions.get(perValue));
}
complexEventChunkToHoldWithinMatches.add(withinMatchFromPersistedEvents);
// Optimization step.
long oldestInMemoryEventTimestamp = getOldestInMemoryEventTimestamp(incrementalExecutorMap, activeIncrementalDurations, perValue);
// If processing on external time, the in-memory data also needs to be queried
if (isProcessingOnExternalTime || requiresAggregatingInMemoryData(oldestInMemoryEventTimestamp, startTimeEndTime)) {
if (isDistributed) {
int perValueIndex = this.activeIncrementalDurations.indexOf(perValue);
if (perValueIndex != 0) {
Map<TimePeriod.Duration, CompiledCondition> lowerGranularityLookups = new HashMap<>();
for (int i = 0; i < perValueIndex; i++) {
TimePeriod.Duration key = this.activeIncrementalDurations.get(i);
lowerGranularityLookups.put(key, withinTableLowerGranularityCompileCondition.get(key));
}
List<StreamEvent> eventChunks = lowerGranularityLookups.entrySet().stream().map((entry) -> {
Table table = aggregationTableMap.get(entry.getKey());
if (isOptimisedLookup) {
return query(table, matchingEvent, entry.getValue(), withinTableCompiledSelection.get(entry.getKey()), tableMetaStreamEvent.getLastInputDefinition().getAttributeList().toArray(new Attribute[0]));
} else {
return table.find(matchingEvent, entry.getValue());
}
}).filter(Objects::nonNull).collect(Collectors.toList());
eventChunks.forEach(complexEventChunkToHoldWithinMatches::add);
}
} else {
TimePeriod.Duration rootDuration = activeIncrementalDurations.get(0);
IncrementalDataAggregator incrementalDataAggregator = new IncrementalDataAggregator(activeIncrementalDurations, perValue, oldestInMemoryEventTimestamp, aggregateProcessingExecutorsMap.get(rootDuration), shouldUpdateTimestamp, groupByKeyGeneratorMap.get(rootDuration) != null, tableMetaStreamEvent, timeZone);
ComplexEventChunk<StreamEvent> aggregatedInMemoryEventChunk;
// Aggregate in-memory data and create an event chunk out of it
aggregatedInMemoryEventChunk = incrementalDataAggregator.aggregateInMemoryData(incrementalExecutorMap);
// Get the in-memory aggregate data, which is within given duration
StreamEvent withinMatchFromInMemory = ((Operator) inMemoryStoreCompileCondition).find(matchingEvent, aggregatedInMemoryEventChunk, tableEventCloner);
complexEventChunkToHoldWithinMatches.add(withinMatchFromInMemory);
}
}
ComplexEventChunk<StreamEvent> processedEvents;
if (isDistributed || isProcessingOnExternalTime) {
List<ExpressionExecutor> expressionExecutors = aggregateProcessingExecutorsMap.get(perValue);
GroupByKeyGenerator groupByKeyGenerator = groupByKeyGeneratorMap.get(perValue);
OutOfOrderEventsDataAggregator outOfOrderEventsDataAggregator = new OutOfOrderEventsDataAggregator(expressionExecutors, shouldUpdateTimestamp, groupByKeyGenerator, tableMetaStreamEvent);
processedEvents = outOfOrderEventsDataAggregator.aggregateData(complexEventChunkToHoldWithinMatches);
} else {
processedEvents = complexEventChunkToHoldWithinMatches;
}
// Get the final event chunk from the data which is within given duration. This event chunk contains the values
// in the select clause of an aggregate definition.
ComplexEventChunk<StreamEvent> aggregateSelectionComplexEventChunk = createAggregateSelectionEventChunk(processedEvents, outputExpressionExecutors);
// Execute the on compile condition
return ((Operator) onCompiledCondition).find(matchingEvent, aggregateSelectionComplexEventChunk, aggregateEventCloner);
}
use of io.siddhi.core.event.stream.StreamEvent in project siddhi by wso2.
the class SnapshotableEventQueueOperator method contains.
@Override
public boolean contains(StateEvent matchingEvent, Object storeEvents) {
SnapshotableStreamEventQueue storeEventQueue = (SnapshotableStreamEventQueue) storeEvents;
try {
storeEventQueue.reset();
while (storeEventQueue.hasNext()) {
StreamEvent storeEvent = storeEventQueue.next();
matchingEvent.setEvent(storeEventPosition, storeEvent);
if ((Boolean) expressionExecutor.execute(matchingEvent)) {
return true;
}
}
return false;
} finally {
matchingEvent.setEvent(storeEventPosition, null);
}
}
use of io.siddhi.core.event.stream.StreamEvent in project siddhi by wso2.
the class Scheduler method sendTimerEvents.
/**
* Go through the timestamps stored in the toNotifyQueue and send the TIMER events for the expired events.
*
* @param state current state
*/
private void sendTimerEvents(SchedulerState state) {
Long toNotifyTime = state.toNotifyQueue.peek();
long currentTime = siddhiQueryContext.getSiddhiAppContext().getTimestampGenerator().currentTime();
while (toNotifyTime != null && toNotifyTime - currentTime <= 0) {
state.toNotifyQueue.poll();
StreamEvent timerEvent = streamEventFactory.newInstance();
timerEvent.setType(StreamEvent.Type.TIMER);
timerEvent.setTimestamp(toNotifyTime);
if (lockWrapper != null) {
lockWrapper.lock();
}
threadBarrier.enter();
try {
ComplexEventChunk<StreamEvent> streamEventChunk = new ComplexEventChunk<>();
streamEventChunk.add(timerEvent);
if (Level.BASIC.compareTo(siddhiQueryContext.getSiddhiAppContext().getRootMetricsLevel()) <= 0 && latencyTracker != null) {
try {
latencyTracker.markIn();
singleThreadEntryValve.process(streamEventChunk);
} finally {
latencyTracker.markOut();
}
} else {
singleThreadEntryValve.process(streamEventChunk);
}
} catch (Throwable t) {
log.error("Error while sending timer events, " + t.getMessage(), t);
} finally {
if (lockWrapper != null) {
lockWrapper.unlock();
}
threadBarrier.exit();
}
toNotifyTime = state.toNotifyQueue.peek();
currentTime = siddhiQueryContext.getSiddhiAppContext().getTimestampGenerator().currentTime();
}
}
use of io.siddhi.core.event.stream.StreamEvent in project siddhi by wso2.
the class EventChunkOperator method update.
@Override
public void update(ComplexEventChunk<StateEvent> updatingEventChunk, Object storeEvents, InMemoryCompiledUpdateSet compiledUpdateSet) {
ComplexEventChunk<StreamEvent> storeEventChunk = (ComplexEventChunk<StreamEvent>) storeEvents;
updatingEventChunk.reset();
while (updatingEventChunk.hasNext()) {
StateEvent updatingEvent = updatingEventChunk.next();
try {
storeEventChunk.reset();
while (storeEventChunk.hasNext()) {
StreamEvent storeEvent = storeEventChunk.next();
updatingEvent.setEvent(storeEventPosition, storeEvent);
if ((Boolean) expressionExecutor.execute(updatingEvent)) {
for (Map.Entry<Integer, ExpressionExecutor> entry : compiledUpdateSet.getExpressionExecutorMap().entrySet()) {
storeEvent.setOutputData(entry.getValue().execute(updatingEvent), entry.getKey());
}
}
}
} finally {
updatingEvent.setEvent(storeEventPosition, null);
}
}
}
Aggregations