Search in sources :

Example 51 with ComplexEventChunk

use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.

the class OverwriteTableIndexOperator method tryUpdate.

@Override
public ComplexEventChunk<StreamEvent> tryUpdate(ComplexEventChunk<StateEvent> updatingOrAddingEventChunk, Object storeEvents, InMemoryCompiledUpdateSet compiledUpdateSet, AddingStreamEventExtractor addingStreamEventExtractor) {
    updatingOrAddingEventChunk.reset();
    while (updatingOrAddingEventChunk.hasNext()) {
        StateEvent overwritingOrAddingEvent = updatingOrAddingEventChunk.next();
        ((IndexedEventHolder) storeEvents).overwrite(addingStreamEventExtractor.getAddingStreamEvent(overwritingOrAddingEvent));
    }
    return null;
}
Also used : IndexedEventHolder(org.wso2.siddhi.core.table.holder.IndexedEventHolder) StateEvent(org.wso2.siddhi.core.event.state.StateEvent)

Example 52 with ComplexEventChunk

use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.

the class Window method add.

/**
 * Add the given ComplexEventChunk to the Window.
 *
 * @param complexEventChunk the event chunk to be added
 */
public void add(ComplexEventChunk complexEventChunk) {
    try {
        this.lockWrapper.lock();
        complexEventChunk.reset();
        // Convert all events to StreamEvent because StateEvents can be passed if directly received from a join
        ComplexEvent complexEvents = complexEventChunk.getFirst();
        StreamEvent firstEvent = streamEventPool.borrowEvent();
        eventConverter.convertComplexEvent(complexEvents, firstEvent);
        StreamEvent currentEvent = firstEvent;
        complexEvents = complexEvents.getNext();
        int numberOfEvents = 0;
        while (complexEvents != null) {
            numberOfEvents++;
            StreamEvent nextEvent = streamEventPool.borrowEvent();
            eventConverter.convertComplexEvent(complexEvents, nextEvent);
            currentEvent.setNext(nextEvent);
            currentEvent = nextEvent;
            complexEvents = complexEvents.getNext();
        }
        try {
            if (throughputTrackerInsert != null && siddhiAppContext.isStatsEnabled()) {
                throughputTrackerInsert.eventsIn(numberOfEvents);
                latencyTrackerInsert.markIn();
            }
            // Send to the window windowProcessor
            windowProcessor.process(new ComplexEventChunk<StreamEvent>(firstEvent, currentEvent, complexEventChunk.isBatch()));
        } finally {
            if (throughputTrackerInsert != null && siddhiAppContext.isStatsEnabled()) {
                latencyTrackerInsert.markOut();
            }
        }
    } finally {
        this.lockWrapper.unlock();
    }
}
Also used : ComplexEvent(org.wso2.siddhi.core.event.ComplexEvent) MetaStreamEvent(org.wso2.siddhi.core.event.stream.MetaStreamEvent) StreamEvent(org.wso2.siddhi.core.event.stream.StreamEvent)

Example 53 with ComplexEventChunk

use of org.wso2.siddhi.core.event.ComplexEventChunk in project carbon-apimgt by wso2.

the class EmitOnStateChange method process.

@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor processor, StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
    while (streamEventChunk.hasNext()) {
        StreamEvent event = streamEventChunk.next();
        Boolean currentThrottleState = (Boolean) isThrottledExpressionExecutor.execute(event);
        String key = (String) keyExpressionExecutor.execute(event);
        Boolean lastThrottleState = (Boolean) throttleStateMap.get(key);
        if (currentThrottleState.equals(lastThrottleState) && !currentThrottleState) {
            streamEventChunk.remove();
        } else {
            throttleStateMap.put(key, currentThrottleState);
        }
    }
    nextProcessor.process(streamEventChunk);
}
Also used : StreamEvent(org.wso2.siddhi.core.event.stream.StreamEvent)

Example 54 with ComplexEventChunk

use of org.wso2.siddhi.core.event.ComplexEventChunk in project carbon-apimgt by wso2.

the class ThrottleStreamProcessor method process.

@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor, StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
    synchronized (this) {
        if (expireEventTime == -1) {
            long currentTime = siddhiAppContext.getTimestampGenerator().currentTime();
            if (startTime != -1) {
                expireEventTime = addTimeShift(currentTime);
            } else {
                expireEventTime = siddhiAppContext.getTimestampGenerator().currentTime() + timeInMilliSeconds;
            }
            if (scheduler != null) {
                scheduler.notifyAt(expireEventTime);
            } else {
                log.error("scheduler is not initiated");
            }
        }
        long currentTime = siddhiAppContext.getTimestampGenerator().currentTime();
        boolean sendEvents;
        if (currentTime >= expireEventTime) {
            expireEventTime += timeInMilliSeconds;
            if (scheduler != null) {
                scheduler.notifyAt(expireEventTime);
            } else {
                log.error("scheduler is not initiated");
            }
            sendEvents = true;
        } else {
            sendEvents = false;
        }
        while (streamEventChunk.hasNext()) {
            StreamEvent streamEvent = streamEventChunk.next();
            if (streamEvent.getType() != ComplexEvent.Type.CURRENT) {
                continue;
            }
            complexEventPopulater.populateComplexEvent(streamEvent, new Object[] { expireEventTime });
            StreamEvent clonedStreamEvent = streamEventCloner.copyStreamEvent(streamEvent);
            clonedStreamEvent.setType(StreamEvent.Type.EXPIRED);
            clonedStreamEvent.setTimestamp(expireEventTime);
            expiredEventChunk.add(clonedStreamEvent);
        }
        if (sendEvents) {
            expiredEventChunk.reset();
            if (expiredEventChunk.getFirst() != null) {
                streamEventChunk.add(expiredEventChunk.getFirst());
            }
            expiredEventChunk.clear();
        }
    }
    if (streamEventChunk.getFirst() != null) {
        streamEventChunk.setBatch(true);
        nextProcessor.process(streamEventChunk);
        streamEventChunk.setBatch(false);
    }
}
Also used : StreamEvent(org.wso2.siddhi.core.event.stream.StreamEvent)

Example 55 with ComplexEventChunk

use of org.wso2.siddhi.core.event.ComplexEventChunk in project siddhi by wso2.

the class AggregationRuntime method compileExpression.

public CompiledCondition compileExpression(Expression expression, Within within, Expression per, MatchingMetaInfoHolder matchingMetaInfoHolder, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, Table> tableMap, String queryName, SiddhiAppContext siddhiAppContext) {
    Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions = new HashMap<>();
    CompiledCondition withinInMemoryCompileCondition;
    CompiledCondition onCompiledCondition;
    List<Attribute> additionalAttributes = new ArrayList<>();
    // Define additional attribute list
    additionalAttributes.add(new Attribute("_START", Attribute.Type.LONG));
    additionalAttributes.add(new Attribute("_END", Attribute.Type.LONG));
    // Get table definition. Table definitions for all the tables used to persist aggregates are similar.
    // Therefore it's enough to get the definition from one table.
    AbstractDefinition tableDefinition = ((Table) aggregationTables.values().toArray()[0]).getTableDefinition();
    // Alter existing meta stream event or create new one if a meta stream doesn't exist
    // After calling this method the original MatchingMetaInfoHolder's meta stream event would be altered
    MetaStreamEvent newMetaStreamEventWithStartEnd = createNewMetaStreamEventWithStartEnd(matchingMetaInfoHolder, additionalAttributes);
    MatchingMetaInfoHolder alteredMatchingMetaInfoHolder = null;
    // Alter meta info holder to contain stream event and aggregate both when it's a store query
    if (matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvents().length == 1) {
        matchingMetaInfoHolder = alterMetaInfoHolderForStoreQuery(newMetaStreamEventWithStartEnd, matchingMetaInfoHolder);
        alteredMatchingMetaInfoHolder = matchingMetaInfoHolder;
    }
    // Create new MatchingMetaInfoHolder containing newMetaStreamEventWithStartEnd and table meta event
    MatchingMetaInfoHolder streamTableMetaInfoHolderWithStartEnd = createNewStreamTableMetaInfoHolder(newMetaStreamEventWithStartEnd, tableDefinition);
    // Create per expression executor
    ExpressionExecutor perExpressionExecutor = ExpressionParser.parseExpression(per, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, queryName);
    if (perExpressionExecutor.getReturnType() != Attribute.Type.STRING) {
        throw new SiddhiAppCreationException("Query " + queryName + "'s per value expected a string but found " + perExpressionExecutor.getReturnType(), per.getQueryContextStartIndex(), per.getQueryContextEndIndex());
    }
    // Create within expression
    Expression withinExpression;
    Expression start = Expression.variable(additionalAttributes.get(0).getName());
    Expression end = Expression.variable(additionalAttributes.get(1).getName());
    Expression compareWithStartTime = Compare.compare(start, Compare.Operator.LESS_THAN_EQUAL, Expression.variable("AGG_TIMESTAMP"));
    Expression compareWithEndTime = Compare.compare(Expression.variable("AGG_TIMESTAMP"), Compare.Operator.LESS_THAN, end);
    withinExpression = Expression.and(compareWithStartTime, compareWithEndTime);
    // Create start and end time expression
    Expression startEndTimeExpression;
    if (within.getTimeRange().size() == 1) {
        startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0));
    } else {
        // within.getTimeRange().size() == 2
        startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0), within.getTimeRange().get(1));
    }
    ExpressionExecutor startTimeEndTimeExpressionExecutor = ExpressionParser.parseExpression(startEndTimeExpression, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, queryName);
    // These compile conditions are used to check whether the aggregates in tables are within the given duration.
    for (Map.Entry<TimePeriod.Duration, Table> entry : aggregationTables.entrySet()) {
        CompiledCondition withinTableCompileCondition = entry.getValue().compileCondition(withinExpression, streamTableMetaInfoHolderWithStartEnd, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
        withinTableCompiledConditions.put(entry.getKey(), withinTableCompileCondition);
    }
    // Create compile condition for in-memory data.
    // This compile condition is used to check whether the running aggregates (in-memory data)
    // are within given duration
    withinInMemoryCompileCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), withinExpression, streamTableMetaInfoHolderWithStartEnd, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
    // On compile condition.
    // After finding all the aggregates belonging to within duration, the final on condition (given as
    // "on stream1.name == aggregator.nickName ..." in the join query) must be executed on that data.
    // This condition is used for that purpose.
    onCompiledCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), expression, matchingMetaInfoHolder, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
    return new IncrementalAggregateCompileCondition(withinTableCompiledConditions, withinInMemoryCompileCondition, onCompiledCondition, tableMetaStreamEvent, aggregateMetaSteamEvent, additionalAttributes, alteredMatchingMetaInfoHolder, perExpressionExecutor, startTimeEndTimeExpressionExecutor);
}
Also used : Table(org.wso2.siddhi.core.table.Table) VariableExpressionExecutor(org.wso2.siddhi.core.executor.VariableExpressionExecutor) ExpressionExecutor(org.wso2.siddhi.core.executor.ExpressionExecutor) ComplexEventChunk(org.wso2.siddhi.core.event.ComplexEventChunk) HashMap(java.util.HashMap) Attribute(org.wso2.siddhi.query.api.definition.Attribute) SiddhiAppCreationException(org.wso2.siddhi.core.exception.SiddhiAppCreationException) ArrayList(java.util.ArrayList) IncrementalAggregateCompileCondition(org.wso2.siddhi.core.util.collection.operator.IncrementalAggregateCompileCondition) AbstractDefinition(org.wso2.siddhi.query.api.definition.AbstractDefinition) AttributeFunction(org.wso2.siddhi.query.api.expression.AttributeFunction) CompiledCondition(org.wso2.siddhi.core.util.collection.operator.CompiledCondition) Expression(org.wso2.siddhi.query.api.expression.Expression) MatchingMetaInfoHolder(org.wso2.siddhi.core.util.collection.operator.MatchingMetaInfoHolder) HashMap(java.util.HashMap) Map(java.util.Map) MetaStreamEvent(org.wso2.siddhi.core.event.stream.MetaStreamEvent)

Aggregations

StreamEvent (org.wso2.siddhi.core.event.stream.StreamEvent)72 ComplexEventChunk (org.wso2.siddhi.core.event.ComplexEventChunk)69 ComplexEvent (org.wso2.siddhi.core.event.ComplexEvent)44 ArrayList (java.util.ArrayList)30 StateEvent (org.wso2.siddhi.core.event.state.StateEvent)26 MetaStreamEvent (org.wso2.siddhi.core.event.stream.MetaStreamEvent)19 GroupedComplexEvent (org.wso2.siddhi.core.event.GroupedComplexEvent)17 ExpressionExecutor (org.wso2.siddhi.core.executor.ExpressionExecutor)13 Map (java.util.Map)12 VariableExpressionExecutor (org.wso2.siddhi.core.executor.VariableExpressionExecutor)8 HashMap (java.util.HashMap)6 ConstantExpressionExecutor (org.wso2.siddhi.core.executor.ConstantExpressionExecutor)5 AttributeProcessor (org.wso2.siddhi.core.query.selector.attribute.processor.AttributeProcessor)4 SiddhiAppValidationException (org.wso2.siddhi.query.api.exception.SiddhiAppValidationException)4 Collection (java.util.Collection)3 Event (org.wso2.siddhi.core.event.Event)3 SiddhiAppRuntimeException (org.wso2.siddhi.core.exception.SiddhiAppRuntimeException)3 Table (org.wso2.siddhi.core.table.Table)3 Iterator (java.util.Iterator)2 StreamPreStateProcessor (org.wso2.siddhi.core.query.input.stream.state.StreamPreStateProcessor)2