use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class AllPerTimeOutputRateLimiter method process.
@Override
public void process(ComplexEventChunk complexEventChunk) {
ArrayList<ComplexEventChunk<ComplexEvent>> outputEventChunks = new ArrayList<ComplexEventChunk<ComplexEvent>>();
complexEventChunk.reset();
synchronized (this) {
while (complexEventChunk.hasNext()) {
ComplexEvent event = complexEventChunk.next();
if (event.getType() == ComplexEvent.Type.TIMER) {
if (event.getTimestamp() >= scheduledTime) {
ComplexEvent first = allComplexEventChunk.getFirst();
if (first != null) {
allComplexEventChunk.clear();
ComplexEventChunk<ComplexEvent> outputEventChunk = new ComplexEventChunk<ComplexEvent>(complexEventChunk.isBatch());
outputEventChunk.add(first);
outputEventChunks.add(outputEventChunk);
}
scheduledTime = scheduledTime + value;
scheduler.notifyAt(scheduledTime);
}
} else if (event.getType() == ComplexEvent.Type.CURRENT || event.getType() == ComplexEvent.Type.EXPIRED) {
complexEventChunk.remove();
allComplexEventChunk.add(event);
}
}
}
for (ComplexEventChunk eventChunk : outputEventChunks) {
sendToCallBacks(eventChunk);
}
}
use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class FirstGroupByPerTimeOutputRateLimiter method process.
@Override
public void process(ComplexEventChunk complexEventChunk) {
ArrayList<ComplexEventChunk<ComplexEvent>> outputEventChunks = new ArrayList<ComplexEventChunk<ComplexEvent>>();
complexEventChunk.reset();
synchronized (this) {
while (complexEventChunk.hasNext()) {
ComplexEvent event = complexEventChunk.next();
if (event.getType() == ComplexEvent.Type.TIMER) {
if (event.getTimestamp() >= scheduledTime) {
if (allComplexEventChunk.getFirst() != null) {
ComplexEventChunk<ComplexEvent> eventChunk = new ComplexEventChunk<ComplexEvent>(complexEventChunk.isBatch());
eventChunk.add(allComplexEventChunk.getFirst());
allComplexEventChunk.clear();
groupByKeys.clear();
outputEventChunks.add(eventChunk);
} else {
groupByKeys.clear();
}
scheduledTime = scheduledTime + value;
scheduler.notifyAt(scheduledTime);
}
} else if (event.getType() == ComplexEvent.Type.CURRENT || event.getType() == ComplexEvent.Type.EXPIRED) {
GroupedComplexEvent groupedComplexEvent = ((GroupedComplexEvent) event);
if (!groupByKeys.contains(groupedComplexEvent.getGroupKey())) {
complexEventChunk.remove();
groupByKeys.add(groupedComplexEvent.getGroupKey());
allComplexEventChunk.add(groupedComplexEvent.getComplexEvent());
}
}
}
}
for (ComplexEventChunk eventChunk : outputEventChunks) {
sendToCallBacks(eventChunk);
}
}
use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class FilterProcessor method process.
@Override
public void process(ComplexEventChunk complexEventChunk) {
complexEventChunk.reset();
while (complexEventChunk.hasNext()) {
ComplexEvent complexEvent = complexEventChunk.next();
Object result = conditionExecutor.execute(complexEvent);
if (result == null || !(Boolean) result) {
complexEventChunk.remove();
}
}
if (complexEventChunk.getFirst() != null) {
this.next.process(complexEventChunk);
}
}
use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class LengthWindowProcessor method process.
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor, StreamEventCloner streamEventCloner) {
synchronized (this) {
long currentTime = siddhiAppContext.getTimestampGenerator().currentTime();
while (streamEventChunk.hasNext()) {
StreamEvent streamEvent = streamEventChunk.next();
StreamEvent clonedEvent = streamEventCloner.copyStreamEvent(streamEvent);
clonedEvent.setType(StreamEvent.Type.EXPIRED);
if (count < length) {
count++;
this.expiredEventChunk.add(clonedEvent);
} else {
StreamEvent firstEvent = this.expiredEventChunk.poll();
if (firstEvent != null) {
firstEvent.setTimestamp(currentTime);
streamEventChunk.insertBeforeCurrent(firstEvent);
this.expiredEventChunk.add(clonedEvent);
} else {
StreamEvent resetEvent = streamEventCloner.copyStreamEvent(streamEvent);
resetEvent.setType(ComplexEvent.Type.RESET);
// adding resetEvent and clonedEvent event to the streamEventChunk
// since we are using insertAfterCurrent(), the final order will be
// currentEvent > clonedEvent (or expiredEvent) > resetEvent
streamEventChunk.insertAfterCurrent(resetEvent);
streamEventChunk.insertAfterCurrent(clonedEvent);
// since we manually added resetEvent and clonedEvent in earlier step
// we have to skip those two events from getting processed in the next
// iteration. Hence, calling next() twice.
streamEventChunk.next();
streamEventChunk.next();
}
}
}
}
nextProcessor.process(streamEventChunk);
}
use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.
the class LossyFrequentWindowProcessor method process.
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor, StreamEventCloner streamEventCloner) {
synchronized (this) {
long currentTime = siddhiAppContext.getTimestampGenerator().currentTime();
StreamEvent streamEvent = streamEventChunk.getFirst();
streamEventChunk.clear();
while (streamEvent != null) {
StreamEvent next = streamEvent.getNext();
streamEvent.setNext(null);
StreamEvent clonedEvent = streamEventCloner.copyStreamEvent(streamEvent);
clonedEvent.setType(StreamEvent.Type.EXPIRED);
totalCount++;
if (totalCount != 1) {
currentBucketId = Math.ceil(totalCount / windowWidth);
}
String currentKey = generateKey(streamEvent);
StreamEvent oldEvent = map.put(currentKey, clonedEvent);
if (oldEvent != null) {
// this event is already in the store
countMap.put(currentKey, countMap.get(currentKey).incrementCount());
} else {
// This is a new event
LossyCount lCount;
lCount = new LossyCount(1, (int) currentBucketId - 1);
countMap.put(currentKey, lCount);
}
// calculating all the events in the system which match the
// requirement provided by the user
List<String> keys = new ArrayList<String>();
keys.addAll(countMap.keySet());
for (String key : keys) {
LossyCount lossyCount = countMap.get(key);
if (lossyCount.getCount() >= ((support - error) * totalCount)) {
// among the selected events, if the newly arrive event is there we mark it as an inEvent
if (key.equals(currentKey)) {
streamEventChunk.add(streamEvent);
}
}
}
if (totalCount % windowWidth == 0) {
// its time to run the data-structure prune code
keys = new ArrayList<String>();
keys.addAll(countMap.keySet());
for (String key : keys) {
LossyCount lossyCount = countMap.get(key);
if (lossyCount.getCount() + lossyCount.getBucketId() <= currentBucketId) {
log.info("Removing the Event: " + key + " from the window");
countMap.remove(key);
StreamEvent expirtedEvent = map.remove(key);
expirtedEvent.setTimestamp(currentTime);
streamEventChunk.add(expirtedEvent);
}
}
}
streamEvent = next;
}
}
nextProcessor.process(streamEventChunk);
}
Aggregations