use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class IncrementalAggregationProcessor method process.
@Override
public void process(ComplexEventChunk complexEventChunk) {
ComplexEventChunk<StreamEvent> streamEventChunk = new ComplexEventChunk<>(complexEventChunk.isBatch());
try {
int noOfEvents = 0;
if (latencyTrackerInsert != null && siddhiAppContext.isStatsEnabled()) {
latencyTrackerInsert.markIn();
}
while (complexEventChunk.hasNext()) {
ComplexEvent complexEvent = complexEventChunk.next();
StreamEvent borrowedEvent = streamEventPool.borrowEvent();
for (int i = 0; i < incomingExpressionExecutors.size(); i++) {
ExpressionExecutor expressionExecutor = incomingExpressionExecutors.get(i);
Object outputData = expressionExecutor.execute(complexEvent);
if (expressionExecutor instanceof IncrementalUnixTimeFunctionExecutor && outputData == null) {
throw new SiddhiAppRuntimeException("Cannot retrieve the timestamp of event");
}
borrowedEvent.setOutputData(outputData, i);
}
streamEventChunk.add(borrowedEvent);
noOfEvents++;
}
incrementalExecutor.execute(streamEventChunk);
if (throughputTrackerInsert != null && siddhiAppContext.isStatsEnabled()) {
throughputTrackerInsert.eventsIn(noOfEvents);
}
} finally {
if (latencyTrackerInsert != null && siddhiAppContext.isStatsEnabled()) {
latencyTrackerInsert.markOut();
}
}
}
use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class IncrementalExecutor method sendTimerEvent.
private void sendTimerEvent(String timeZone) {
if (getNextExecutor() != null) {
StreamEvent timerEvent = streamEventPool.borrowEvent();
timerEvent.setType(ComplexEvent.Type.TIMER);
timerEvent.setTimestamp(IncrementalTimeConverterUtil.getPreviousStartTime(startTimeOfAggregates, this.duration, timeZone));
ComplexEventChunk<StreamEvent> timerStreamEventChunk = new ComplexEventChunk<>(true);
timerStreamEventChunk.add(timerEvent);
next.execute(timerStreamEventChunk);
}
}
use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class IncrementalExecutor method execute.
@Override
public void execute(ComplexEventChunk streamEventChunk) {
LOG.debug("Event Chunk received by " + this.duration + " incremental executor: " + streamEventChunk.toString());
streamEventChunk.reset();
while (streamEventChunk.hasNext()) {
StreamEvent streamEvent = (StreamEvent) streamEventChunk.next();
streamEventChunk.remove();
String timeZone = getTimeZone(streamEvent);
long timestamp = getTimestamp(streamEvent, timeZone);
startTimeOfAggregates = IncrementalTimeConverterUtil.getStartTimeOfAggregates(timestamp, duration, timeZone);
if (isRootAndLoadedFromTable) {
// arise when replaying data.
if (timestamp < nextEmitTime) {
continue;
} else {
isRootAndLoadedFromTable = false;
}
}
if (bufferSize > 0 && isRoot) {
try {
mutex.acquire();
dispatchBufferedAggregateEvents(startTimeOfAggregates);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new SiddhiAppRuntimeException("Error when dispatching events from buffer", e);
} finally {
mutex.release();
}
if (streamEvent.getType() == ComplexEvent.Type.CURRENT) {
if (!eventOlderThanBuffer) {
processAggregates(streamEvent);
} else if (!ignoreEventsOlderThanBuffer) {
// Incoming event is older than buffer
startTimeOfAggregates = minTimestampInBuffer;
processAggregates(streamEvent);
}
}
} else {
if (timestamp >= nextEmitTime) {
nextEmitTime = IncrementalTimeConverterUtil.getNextEmitTime(timestamp, duration, timeZone);
dispatchAggregateEvents(startTimeOfAggregates);
if (!isProcessingOnExternalTime) {
sendTimerEvent(timeZone);
}
}
if (streamEvent.getType() == ComplexEvent.Type.CURRENT) {
if (nextEmitTime == IncrementalTimeConverterUtil.getNextEmitTime(timestamp, duration, timeZone)) {
// This condition checks whether incoming event belongs to current processing event's time slot
processAggregates(streamEvent);
} else if (!ignoreEventsOlderThanBuffer) {
// Incoming event is older than current processing event.
startTimeOfAggregates = minTimestampInBuffer;
processAggregates(streamEvent);
}
}
}
}
}
use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class RecreateInMemoryData method recreateInMemoryData.
public void recreateInMemoryData() {
if (incrementalExecutorMap.get(incrementalDurations.get(0)).getNextEmitTime() != -1) {
// created. Hence this method does not have to be executed.
return;
}
Event[] events;
Long latestEventTimestamp = null;
// Get all events from table corresponding to max duration
Table tableForMaxDuration = aggregationTables.get(incrementalDurations.get(incrementalDurations.size() - 1));
StoreQuery storeQuery = StoreQuery.query().from(InputStore.store(tableForMaxDuration.getTableDefinition().getId())).select(Selector.selector().orderBy(Expression.variable("AGG_TIMESTAMP")));
StoreQueryRuntime storeQueryRuntime = StoreQueryParser.parse(storeQuery, siddhiAppContext, tableMap, windowMap, aggregationMap);
// Get latest event timestamp in tableForMaxDuration
events = storeQueryRuntime.execute();
if (events != null) {
latestEventTimestamp = (Long) events[events.length - 1].getData(0);
}
for (int i = incrementalDurations.size() - 1; i > 0; i--) {
TimePeriod.Duration recreateForDuration = incrementalDurations.get(i);
IncrementalExecutor incrementalExecutor = incrementalExecutorMap.get(recreateForDuration);
// Get the table previous to the duration for which we need to recreate (e.g. if we want to recreate
// for minute duration, take the second table [provided that aggregation is done for seconds])
Table recreateFromTable = aggregationTables.get(incrementalDurations.get(i - 1));
storeQuery = StoreQuery.query().from(InputStore.store(recreateFromTable.getTableDefinition().getId())).select(Selector.selector().orderBy(Expression.variable("AGG_TIMESTAMP")));
storeQueryRuntime = StoreQueryParser.parse(storeQuery, siddhiAppContext, tableMap, windowMap, aggregationMap);
events = storeQueryRuntime.execute();
if (events != null) {
long referenceToNextLatestEvent = (Long) events[events.length - 1].getData(0);
String timeZoneOfNextLatestEvent = events[events.length - 1].getData(1).toString();
if (latestEventTimestamp != null) {
List<Event> eventsNewerThanLatestEventOfRecreateForDuration = new ArrayList<>();
for (Event event : events) {
// After getting the events from recreateFromTable, get the time bucket it belongs to,
// if each of these events were in the recreateForDuration. This helps to identify the events,
// which must be processed in-memory for recreateForDuration.
long timeBucketForNextDuration = IncrementalTimeConverterUtil.getStartTimeOfAggregates((Long) event.getData(0), recreateForDuration, event.getData(1).toString());
if (timeBucketForNextDuration > latestEventTimestamp) {
eventsNewerThanLatestEventOfRecreateForDuration.add(event);
}
}
events = eventsNewerThanLatestEventOfRecreateForDuration.toArray(new Event[eventsNewerThanLatestEventOfRecreateForDuration.size()]);
}
latestEventTimestamp = referenceToNextLatestEvent;
ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<>(false);
for (Event event : events) {
StreamEvent streamEvent = streamEventPool.borrowEvent();
streamEvent.setOutputData(event.getData());
complexEventChunk.add(streamEvent);
}
incrementalExecutor.execute(complexEventChunk);
if (i == 1) {
TimePeriod.Duration rootDuration = incrementalDurations.get(0);
IncrementalExecutor rootIncrementalExecutor = incrementalExecutorMap.get(rootDuration);
long emitTimeOfLatestEventInTable = IncrementalTimeConverterUtil.getNextEmitTime(latestEventTimestamp, rootDuration, timeZoneOfNextLatestEvent);
rootIncrementalExecutor.setValuesForInMemoryRecreateFromTable(true, emitTimeOfLatestEventInTable);
}
}
}
}
use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class FindStoreQueryRuntime method generateResetComplexEventChunk.
private ComplexEventChunk<ComplexEvent> generateResetComplexEventChunk(MetaStreamEvent metaStreamEvent) {
StreamEvent streamEvent = new StreamEvent(metaStreamEvent.getBeforeWindowData().size(), metaStreamEvent.getOnAfterWindowData().size(), metaStreamEvent.getOutputData().size());
streamEvent.setType(ComplexEvent.Type.RESET);
StateEvent stateEvent = stateEventPool.borrowEvent();
if (eventType == MetaStreamEvent.EventType.AGGREGATE) {
stateEvent.addEvent(1, streamEvent);
} else {
stateEvent.addEvent(0, streamEvent);
}
stateEvent.setType(ComplexEvent.Type.RESET);
ComplexEventChunk<ComplexEvent> complexEventChunk = new ComplexEventChunk<>(true);
complexEventChunk.add(stateEvent);
return complexEventChunk;
}
Aggregations