use of io.siddhi.core.event.stream.StreamEvent in project siddhi by wso2.
the class EventChunkOperator method delete.
@Override
public void delete(ComplexEventChunk<StateEvent> deletingEventChunk, Object storeEvents) {
ComplexEventChunk<StreamEvent> storeEventChunk = (ComplexEventChunk<StreamEvent>) storeEvents;
deletingEventChunk.reset();
while (deletingEventChunk.hasNext()) {
StateEvent deletingEvent = deletingEventChunk.next();
try {
storeEventChunk.reset();
while (storeEventChunk.hasNext()) {
StreamEvent storeEvent = storeEventChunk.next();
deletingEvent.setEvent(storeEventPosition, storeEvent);
if ((Boolean) expressionExecutor.execute(deletingEvent)) {
storeEventChunk.remove();
}
}
} finally {
deletingEvent.setEvent(storeEventPosition, null);
}
}
}
use of io.siddhi.core.event.stream.StreamEvent in project siddhi by wso2.
the class IndexOperator method update.
private void update(IndexedEventHolder storeEvents, InMemoryCompiledUpdateSet compiledUpdateSet, StateEvent overwritingOrAddingEvent, ComplexEventChunk<StreamEvent> foundEventChunk) {
// for cases when indexed attribute is also updated but that not changed
// to reduce number of passes needed to update the events
boolean doDeleteUpdate = false;
boolean fail = false;
for (Map.Entry<Integer, ExpressionExecutor> entry : compiledUpdateSet.getExpressionExecutorMap().entrySet()) {
if (doDeleteUpdate || fail) {
break;
}
if (storeEvents.isAttributeIndexed(entry.getKey())) {
// Todo how much check we need to do before falling back to Delete and then Update
foundEventChunk.reset();
Set<Object> keys = null;
PrimaryKeyReferenceHolder[] primaryKeyReferenceHolders = storeEvents.getPrimaryKeyReferenceHolders();
if (primaryKeyReferenceHolders != null && primaryKeyReferenceHolders.length == 1 && entry.getKey() == primaryKeyReferenceHolders[0].getPrimaryKeyPosition()) {
keys = new HashSet<>(storeEvents.getAllPrimaryKeyValues());
}
while (foundEventChunk.hasNext()) {
StreamEvent streamEvent = foundEventChunk.next();
Object updatingData = entry.getValue().execute(overwritingOrAddingEvent);
Object storeEventData = streamEvent.getOutputData()[entry.getKey()];
if (updatingData != null && storeEventData != null && !updatingData.equals(storeEventData)) {
doDeleteUpdate = true;
if (keys == null || keys.size() == 0) {
break;
} else {
keys.remove(storeEventData);
if (!keys.add(updatingData)) {
log.error("Update failed for event :" + overwritingOrAddingEvent + ", as there is " + "already an event stored with primary key '" + updatingData + "' at '" + queryName + "'");
fail = true;
break;
}
}
}
}
}
}
foundEventChunk.reset();
if (!fail) {
if (doDeleteUpdate) {
collectionExecutor.delete(overwritingOrAddingEvent, storeEvents);
ComplexEventChunk<StreamEvent> toUpdateEventChunk = new ComplexEventChunk<>();
while (foundEventChunk.hasNext()) {
StreamEvent streamEvent = foundEventChunk.next();
foundEventChunk.remove();
// to make the chained state back to normal
streamEvent.setNext(null);
for (Map.Entry<Integer, ExpressionExecutor> entry : compiledUpdateSet.getExpressionExecutorMap().entrySet()) {
streamEvent.setOutputData(entry.getValue().execute(overwritingOrAddingEvent), entry.getKey());
}
toUpdateEventChunk.add(streamEvent);
}
storeEvents.add(toUpdateEventChunk);
} else {
StreamEvent first = foundEventChunk.getFirst();
while (first != null) {
StreamEvent streamEvent = first;
handleCachePolicyAttributeUpdate(streamEvent);
for (Map.Entry<Integer, ExpressionExecutor> entry : compiledUpdateSet.getExpressionExecutorMap().entrySet()) {
streamEvent.setOutputData(entry.getValue().execute(overwritingOrAddingEvent), entry.getKey());
}
StreamEvent next = first.getNext();
// to make the chained state back to normal
first.setNext(null);
first = next;
}
}
}
}
use of io.siddhi.core.event.stream.StreamEvent in project siddhi by wso2.
the class NotCollectionExecutor method delete.
@Override
public void delete(StateEvent deletingEvent, IndexedEventHolder indexedEventHolder) {
Collection<StreamEvent> notStreamEvents = notCollectionExecutor.findEvents(deletingEvent, indexedEventHolder);
if (notStreamEvents == null) {
exhaustiveCollectionExecutor.delete(deletingEvent, indexedEventHolder);
} else if (notStreamEvents.size() == 0) {
indexedEventHolder.deleteAll();
} else if (notStreamEvents.size() != indexedEventHolder.getAllEvents().size()) {
Collection<StreamEvent> allEvents = indexedEventHolder.getAllEvents();
Set<StreamEvent> returnSet = new HashSet<StreamEvent>();
for (StreamEvent aEvent : allEvents) {
if (!notStreamEvents.contains(aEvent)) {
returnSet.add(aEvent);
}
}
indexedEventHolder.deleteAll(returnSet);
}
}
use of io.siddhi.core.event.stream.StreamEvent in project siddhi by wso2.
the class CompareCollectionExecutor method find.
public StreamEvent find(StateEvent matchingEvent, IndexedEventHolder indexedEventHolder, StreamEventCloner storeEventCloner) {
ComplexEventChunk<StreamEvent> returnEventChunk = new ComplexEventChunk<StreamEvent>();
Collection<StreamEvent> storeEventSet = findEvents(matchingEvent, indexedEventHolder);
if (storeEventSet == null) {
// triggering sequential scan
Collection<StreamEvent> storeEvents = indexedEventHolder.getAllEvents();
for (StreamEvent storeEvent : storeEvents) {
matchingEvent.setEvent(storeEventIndex, storeEvent);
if ((Boolean) expressionExecutor.execute(matchingEvent)) {
if (storeEventCloner != null) {
returnEventChunk.add(storeEventCloner.copyStreamEvent(storeEvent));
} else {
returnEventChunk.add(storeEvent);
}
}
matchingEvent.setEvent(storeEventIndex, null);
}
return returnEventChunk.getFirst();
} else {
for (StreamEvent storeEvent : storeEventSet) {
if (cacheTable != null) {
cacheTable.updateCachePolicyAttribute(storeEvent);
}
if (storeEventCloner != null) {
returnEventChunk.add(storeEventCloner.copyStreamEvent(storeEvent));
} else {
returnEventChunk.add(storeEvent);
}
}
return returnEventChunk.getFirst();
}
}
use of io.siddhi.core.event.stream.StreamEvent in project siddhi by wso2.
the class ExhaustiveCollectionExecutor method find.
public StreamEvent find(StateEvent matchingEvent, IndexedEventHolder indexedEventHolder, StreamEventCloner storeEventCloner) {
ComplexEventChunk<StreamEvent> returnEventChunk = new ComplexEventChunk<StreamEvent>();
Collection<StreamEvent> storeEvents = indexedEventHolder.getAllEvents();
for (StreamEvent storeEvent : storeEvents) {
matchingEvent.setEvent(storeEventIndex, storeEvent);
if ((Boolean) expressionExecutor.execute(matchingEvent)) {
if (storeEventCloner != null) {
returnEventChunk.add(storeEventCloner.copyStreamEvent(storeEvent));
} else {
returnEventChunk.add(storeEvent);
}
}
matchingEvent.setEvent(storeEventIndex, null);
}
return returnEventChunk.getFirst();
}
Aggregations