use of io.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class CacheExpirer method generateDeleteEventChunk.
private ComplexEventChunk<StateEvent> generateDeleteEventChunk() {
ComplexEventChunk<StateEvent> deleteEventChunk = new ComplexEventChunk<>();
StateEvent stateEvent = new StateEvent(2, 0);
StreamEvent deletingEvent = streamEventFactory.newInstance();
deletingEvent.setOutputData(new Object[] { siddhiAppContext.getTimestampGenerator().currentTime() });
stateEvent.addEvent(0, deletingEvent);
deleteEventChunk.add(stateEvent);
return deleteEventChunk;
}
use of io.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class IncrementalDataAggregator method getProcessedEventChunk.
private synchronized ComplexEventChunk<StreamEvent> getProcessedEventChunk() {
ComplexEventChunk<StreamEvent> streamEventChunk = new ComplexEventChunk<>();
Map<String, State> valueStoreMap = this.valueStateHolder.getAllGroupByStates();
try {
for (State aState : valueStoreMap.values()) {
ValueState state = (ValueState) aState;
StreamEvent streamEvent = streamEventFactory.newInstance();
long timestamp = state.lastTimestamp;
streamEvent.setTimestamp(timestamp);
state.setValue(timestamp, 0);
streamEvent.setOutputData(state.values);
streamEventChunk.add(streamEvent);
}
} finally {
this.valueStateHolder.returnGroupByStates(valueStoreMap);
}
return streamEventChunk;
}
use of io.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class IncrementalExecutor method sendTimerEvent.
private void sendTimerEvent(ExecutorState executorState) {
if (getNextExecutor() != null) {
StreamEvent timerEvent = streamEventFactory.newInstance();
timerEvent.setType(ComplexEvent.Type.TIMER);
timerEvent.setTimestamp(executorState.startTimeOfAggregates);
ComplexEventChunk<StreamEvent> timerStreamEventChunk = new ComplexEventChunk<>();
timerStreamEventChunk.add(timerEvent);
next.execute(timerStreamEventChunk);
}
}
use of io.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class IncrementalExecutor method dispatchEvent.
private void dispatchEvent(long startTimeOfNewAggregates, BaseIncrementalValueStore aBaseIncrementalValueStore) {
AtomicBoolean isProcessFinished = new AtomicBoolean(false);
if (aBaseIncrementalValueStore.isProcessed()) {
Map<String, StreamEvent> streamEventMap = aBaseIncrementalValueStore.getGroupedByEvents();
ComplexEventChunk<StreamEvent> eventChunk = new ComplexEventChunk<>();
for (StreamEvent event : streamEventMap.values()) {
eventChunk.add(event);
}
Map<String, StreamEvent> tableStreamEventMap = aBaseIncrementalValueStore.getGroupedByEvents();
ComplexEventChunk<StreamEvent> tableEventChunk = new ComplexEventChunk<>();
for (StreamEvent event : tableStreamEventMap.values()) {
tableEventChunk.add(event);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Event dispatched by aggregation " + aggregatorName + " for duration " + this.duration);
}
if (isProcessingExecutor) {
executorService.execute(() -> {
try {
table.addEvents(tableEventChunk, streamEventMap.size());
} catch (Throwable t) {
LOG.error("Exception occurred at siddhi app '" + this.siddhiAppName + "' when performing table writes of aggregation '" + this.aggregatorName + "' for duration '" + this.duration + "'. This should be investigated as this " + "can cause accuracy loss.", t);
if (LOG.isDebugEnabled()) {
LOG.debug("Dropping Event chunk - \"" + eventChunk.toString() + "\"");
}
} finally {
isProcessFinished.set(true);
}
});
}
if (waitUntillprocessFinish) {
try {
while (!isProcessFinished.get()) {
Thread.sleep(1000);
}
} catch (InterruptedException e) {
LOG.error("Error occurred while waiting until table update task finishes for duration " + duration + "in aggregation " + aggregatorName, e);
}
}
if (getNextExecutor() != null) {
next.execute(eventChunk);
}
}
cleanBaseIncrementalValueStore(startTimeOfNewAggregates, aBaseIncrementalValueStore);
}
use of io.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class OutOfOrderEventsDataAggregator method createEventChunkFromAggregatedData.
private synchronized ComplexEventChunk<StreamEvent> createEventChunkFromAggregatedData() {
ComplexEventChunk<StreamEvent> streamEventChunk = new ComplexEventChunk<>();
Map<String, State> valueStoreMap = this.valueStateHolder.getAllGroupByStates();
try {
for (State aState : valueStoreMap.values()) {
ValueState state = (ValueState) aState;
StreamEvent streamEvent = streamEventFactory.newInstance();
long timestamp = state.lastTimestamp;
streamEvent.setTimestamp(timestamp);
state.setValue(timestamp, 0);
streamEvent.setOutputData(state.values);
streamEventChunk.add(streamEvent);
}
} finally {
this.valueStateHolder.returnGroupByStates(valueStoreMap);
}
return streamEventChunk;
}
Aggregations