Search in sources :

Example 81 with Within

use of org.wso2.siddhi.query.api.aggregation.Within in project siddhi by wso2.

the class AggregationRuntime method compileExpression.

public CompiledCondition compileExpression(Expression expression, Within within, Expression per, MatchingMetaInfoHolder matchingMetaInfoHolder, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, Table> tableMap, String queryName, SiddhiAppContext siddhiAppContext) {
    Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions = new HashMap<>();
    CompiledCondition withinInMemoryCompileCondition;
    CompiledCondition onCompiledCondition;
    List<Attribute> additionalAttributes = new ArrayList<>();
    // Define additional attribute list
    additionalAttributes.add(new Attribute("_START", Attribute.Type.LONG));
    additionalAttributes.add(new Attribute("_END", Attribute.Type.LONG));
    // Get table definition. Table definitions for all the tables used to persist aggregates are similar.
    // Therefore it's enough to get the definition from one table.
    AbstractDefinition tableDefinition = ((Table) aggregationTables.values().toArray()[0]).getTableDefinition();
    // Alter existing meta stream event or create new one if a meta stream doesn't exist
    // After calling this method the original MatchingMetaInfoHolder's meta stream event would be altered
    MetaStreamEvent newMetaStreamEventWithStartEnd = createNewMetaStreamEventWithStartEnd(matchingMetaInfoHolder, additionalAttributes);
    MatchingMetaInfoHolder alteredMatchingMetaInfoHolder = null;
    // Alter meta info holder to contain stream event and aggregate both when it's a store query
    if (matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvents().length == 1) {
        matchingMetaInfoHolder = alterMetaInfoHolderForStoreQuery(newMetaStreamEventWithStartEnd, matchingMetaInfoHolder);
        alteredMatchingMetaInfoHolder = matchingMetaInfoHolder;
    }
    // Create new MatchingMetaInfoHolder containing newMetaStreamEventWithStartEnd and table meta event
    MatchingMetaInfoHolder streamTableMetaInfoHolderWithStartEnd = createNewStreamTableMetaInfoHolder(newMetaStreamEventWithStartEnd, tableDefinition);
    // Create per expression executor
    ExpressionExecutor perExpressionExecutor = ExpressionParser.parseExpression(per, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, queryName);
    if (perExpressionExecutor.getReturnType() != Attribute.Type.STRING) {
        throw new SiddhiAppCreationException("Query " + queryName + "'s per value expected a string but found " + perExpressionExecutor.getReturnType(), per.getQueryContextStartIndex(), per.getQueryContextEndIndex());
    }
    // Create within expression
    Expression withinExpression;
    Expression start = Expression.variable(additionalAttributes.get(0).getName());
    Expression end = Expression.variable(additionalAttributes.get(1).getName());
    Expression compareWithStartTime = Compare.compare(start, Compare.Operator.LESS_THAN_EQUAL, Expression.variable("AGG_TIMESTAMP"));
    Expression compareWithEndTime = Compare.compare(Expression.variable("AGG_TIMESTAMP"), Compare.Operator.LESS_THAN, end);
    withinExpression = Expression.and(compareWithStartTime, compareWithEndTime);
    // Create start and end time expression
    Expression startEndTimeExpression;
    if (within.getTimeRange().size() == 1) {
        startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0));
    } else {
        // within.getTimeRange().size() == 2
        startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0), within.getTimeRange().get(1));
    }
    ExpressionExecutor startTimeEndTimeExpressionExecutor = ExpressionParser.parseExpression(startEndTimeExpression, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, queryName);
    // These compile conditions are used to check whether the aggregates in tables are within the given duration.
    for (Map.Entry<TimePeriod.Duration, Table> entry : aggregationTables.entrySet()) {
        CompiledCondition withinTableCompileCondition = entry.getValue().compileCondition(withinExpression, streamTableMetaInfoHolderWithStartEnd, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
        withinTableCompiledConditions.put(entry.getKey(), withinTableCompileCondition);
    }
    // Create compile condition for in-memory data.
    // This compile condition is used to check whether the running aggregates (in-memory data)
    // are within given duration
    withinInMemoryCompileCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), withinExpression, streamTableMetaInfoHolderWithStartEnd, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
    // On compile condition.
    // After finding all the aggregates belonging to within duration, the final on condition (given as
    // "on stream1.name == aggregator.nickName ..." in the join query) must be executed on that data.
    // This condition is used for that purpose.
    onCompiledCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), expression, matchingMetaInfoHolder, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
    return new IncrementalAggregateCompileCondition(withinTableCompiledConditions, withinInMemoryCompileCondition, onCompiledCondition, tableMetaStreamEvent, aggregateMetaSteamEvent, additionalAttributes, alteredMatchingMetaInfoHolder, perExpressionExecutor, startTimeEndTimeExpressionExecutor);
}
Also used : Table(org.wso2.siddhi.core.table.Table) VariableExpressionExecutor(org.wso2.siddhi.core.executor.VariableExpressionExecutor) ExpressionExecutor(org.wso2.siddhi.core.executor.ExpressionExecutor) ComplexEventChunk(org.wso2.siddhi.core.event.ComplexEventChunk) HashMap(java.util.HashMap) Attribute(org.wso2.siddhi.query.api.definition.Attribute) SiddhiAppCreationException(org.wso2.siddhi.core.exception.SiddhiAppCreationException) ArrayList(java.util.ArrayList) IncrementalAggregateCompileCondition(org.wso2.siddhi.core.util.collection.operator.IncrementalAggregateCompileCondition) AbstractDefinition(org.wso2.siddhi.query.api.definition.AbstractDefinition) AttributeFunction(org.wso2.siddhi.query.api.expression.AttributeFunction) CompiledCondition(org.wso2.siddhi.core.util.collection.operator.CompiledCondition) Expression(org.wso2.siddhi.query.api.expression.Expression) MatchingMetaInfoHolder(org.wso2.siddhi.core.util.collection.operator.MatchingMetaInfoHolder) HashMap(java.util.HashMap) Map(java.util.Map) MetaStreamEvent(org.wso2.siddhi.core.event.stream.MetaStreamEvent)

Example 82 with Within

use of org.wso2.siddhi.query.api.aggregation.Within in project siddhi by wso2.

the class JoinInputStreamParser method setStreamRuntimeProcessorChain.

private static void setStreamRuntimeProcessorChain(MetaStreamEvent metaStreamEvent, SingleStreamRuntime streamRuntime, String inputStreamId, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, List<VariableExpressionExecutor> variableExpressionExecutors, boolean outputExpectsExpiredEvents, String queryName, Within within, Expression per, SiddhiAppContext siddhiAppContext, InputStream inputStream) {
    switch(metaStreamEvent.getEventType()) {
        case TABLE:
            TableWindowProcessor tableWindowProcessor = new TableWindowProcessor(tableMap.get(inputStreamId));
            tableWindowProcessor.initProcessor(metaStreamEvent.getLastInputDefinition(), new ExpressionExecutor[0], null, siddhiAppContext, outputExpectsExpiredEvents, queryName, inputStream);
            streamRuntime.setProcessorChain(tableWindowProcessor);
            break;
        case WINDOW:
            WindowWindowProcessor windowWindowProcessor = new WindowWindowProcessor(windowMap.get(inputStreamId));
            windowWindowProcessor.initProcessor(metaStreamEvent.getLastInputDefinition(), variableExpressionExecutors.toArray(new ExpressionExecutor[0]), null, siddhiAppContext, outputExpectsExpiredEvents, queryName, inputStream);
            streamRuntime.setProcessorChain(windowWindowProcessor);
            break;
        case AGGREGATE:
            AggregationRuntime aggregationRuntime = aggregationMap.get(inputStreamId);
            AggregateWindowProcessor aggregateWindowProcessor = new AggregateWindowProcessor(aggregationRuntime, within, per);
            aggregateWindowProcessor.initProcessor(metaStreamEvent.getLastInputDefinition(), variableExpressionExecutors.toArray(new ExpressionExecutor[0]), null, siddhiAppContext, outputExpectsExpiredEvents, queryName, inputStream);
            streamRuntime.setProcessorChain(aggregateWindowProcessor);
            break;
        case DEFAULT:
            break;
    }
}
Also used : AggregateWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.AggregateWindowProcessor) TableWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.TableWindowProcessor) VariableExpressionExecutor(org.wso2.siddhi.core.executor.VariableExpressionExecutor) ExpressionExecutor(org.wso2.siddhi.core.executor.ExpressionExecutor) ConstantExpressionExecutor(org.wso2.siddhi.core.executor.ConstantExpressionExecutor) WindowWindowProcessor(org.wso2.siddhi.core.query.processor.stream.window.WindowWindowProcessor) AggregationRuntime(org.wso2.siddhi.core.aggregation.AggregationRuntime)

Example 83 with Within

use of org.wso2.siddhi.query.api.aggregation.Within in project siddhi by wso2.

the class AggregationTestCase method incrementalStreamProcessorTest35.

@Test(dependsOnMethods = { "incrementalStreamProcessorTest34" })
public void incrementalStreamProcessorTest35() throws InterruptedException {
    LOG.info("incrementalStreamProcessorTest35");
    SiddhiManager siddhiManager = new SiddhiManager();
    String stockStream = "define stream stockStream (symbol string, price float, lastClosingPrice float, volume long , " + "quantity int, timestamp long);";
    String query = " define aggregation stockAggregation " + "from stockStream " + "select symbol, avg(price) as avgPrice, sum(price) as totalPrice, (price * quantity) " + "as lastTradeValue  " + "group by symbol " + "aggregate by timestamp every sec...hour ;";
    SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(stockStream + query);
    InputHandler stockStreamInputHandler = siddhiAppRuntime.getInputHandler("stockStream");
    siddhiAppRuntime.start();
    stockStreamInputHandler.send(new Object[] { "WSO2", 50f, 60f, 90L, 6, 1496289950000L });
    stockStreamInputHandler.send(new Object[] { "WSO2", 70f, null, 40L, 10, 1496289950000L });
    stockStreamInputHandler.send(new Object[] { "WSO2", 60f, 44f, 200L, 56, 1496289952000L });
    stockStreamInputHandler.send(new Object[] { "WSO2", 100f, null, 200L, 16, 1496289952000L });
    stockStreamInputHandler.send(new Object[] { "IBM", 100f, null, 200L, 26, 1496289954000L });
    stockStreamInputHandler.send(new Object[] { "IBM", 100f, null, 200L, 96, 1496289954000L });
    stockStreamInputHandler.send(new Object[] { "CISCO", 100f, null, 200L, 26, 1513578087000L });
    stockStreamInputHandler.send(new Object[] { "CISCO", 100f, null, 200L, 96, 1513578087000L });
    Thread.sleep(100);
    Event[] events = siddhiAppRuntime.query("from stockAggregation " + "within \"2017-12-18 11:51:27 +05:30\" " + "per \"seconds\" " + "select * " + "order by AGG_TIMESTAMP ;");
    EventPrinter.print(events);
    AssertJUnit.assertEquals(1, events.length);
    for (int i = 0; i < events.length; i++) {
        switch(i) {
            case 1:
                AssertJUnit.assertArrayEquals(new Object[] { 1513578087000L, "CISCO", 100.0, 200.0, 9600f }, events[i].getData());
                break;
            default:
                AssertJUnit.assertEquals(1, events.length);
        }
    }
    Thread.sleep(100);
    siddhiAppRuntime.shutdown();
}
Also used : InputHandler(org.wso2.siddhi.core.stream.input.InputHandler) SiddhiAppRuntime(org.wso2.siddhi.core.SiddhiAppRuntime) Event(org.wso2.siddhi.core.event.Event) SiddhiManager(org.wso2.siddhi.core.SiddhiManager) Test(org.testng.annotations.Test)

Example 84 with Within

use of org.wso2.siddhi.query.api.aggregation.Within in project siddhi by wso2.

the class AggregationTestCase method incrementalStreamProcessorTest9.

@Test(dependsOnMethods = { "incrementalStreamProcessorTest8" })
public void incrementalStreamProcessorTest9() throws InterruptedException {
    LOG.info("incrementalStreamProcessorTest9");
    SiddhiManager siddhiManager = new SiddhiManager();
    String stockStream = "define stream stockStream (symbol string, price float, lastClosingPrice float, volume long , " + "quantity int, timestamp long);";
    String query = "define aggregation stockAggregation " + "from stockStream " + "select avg(price) as avgPrice, sum(price) as totalPrice, (price * quantity) as lastTradeValue, " + "count() as count " + "aggregate by timestamp every min, day, year ;" + "define stream inputStream (symbol string, value int, startTime string, " + "endTime string, perValue string); " + "@info(name = 'query1') " + "from inputStream as i join stockAggregation as s " + "within 1496200000000L, 1596434876000L " + "per \"days\" " + "select AGG_TIMESTAMP, s.avgPrice, totalPrice, lastTradeValue, count " + "order by AGG_TIMESTAMP " + "insert all events into outputStream; ";
    SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(stockStream + query);
    try {
        siddhiAppRuntime.addCallback("query1", new QueryCallback() {

            @Override
            public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) {
                if (inEvents != null) {
                    EventPrinter.print(timestamp, inEvents, removeEvents);
                    for (Event event : inEvents) {
                        inEventsList.add(event.getData());
                        inEventCount.incrementAndGet();
                    }
                    eventArrived = true;
                }
                if (removeEvents != null) {
                    EventPrinter.print(timestamp, inEvents, removeEvents);
                    for (Event event : removeEvents) {
                        removeEventsList.add(event.getData());
                        removeEventCount.incrementAndGet();
                    }
                }
                eventArrived = true;
            }
        });
        InputHandler stockStreamInputHandler = siddhiAppRuntime.getInputHandler("stockStream");
        InputHandler inputStreamInputHandler = siddhiAppRuntime.getInputHandler("inputStream");
        siddhiAppRuntime.start();
        // Thursday, June 1, 2017 4:05:50 AM
        stockStreamInputHandler.send(new Object[] { "WSO2", 50f, 60f, 90L, 6, 1496289950000L });
        stockStreamInputHandler.send(new Object[] { "WSO2", 70f, null, 40L, 10, 1496289950000L });
        // Thursday, June 1, 2017 4:05:52 AM
        stockStreamInputHandler.send(new Object[] { "WSO2", 60f, 44f, 200L, 56, 1496289952000L });
        stockStreamInputHandler.send(new Object[] { "WSO2", 100f, null, 200L, 16, 1496289952000L });
        // Thursday, June 1, 2017 4:05:54 AM
        stockStreamInputHandler.send(new Object[] { "IBM", 100f, null, 200L, 26, 1496289954000L });
        stockStreamInputHandler.send(new Object[] { "IBM", 100f, null, 200L, 96, 1496289954000L });
        // Thursday, June 1, 2017 4:05:56 AM
        stockStreamInputHandler.send(new Object[] { "IBM", 900f, null, 200L, 60, 1496289956000L });
        stockStreamInputHandler.send(new Object[] { "IBM", 500f, null, 200L, 7, 1496289956000L });
        // Thursday, June 1, 2017 4:06:56 AM
        stockStreamInputHandler.send(new Object[] { "IBM", 400f, null, 200L, 9, 1496290016000L });
        // Thursday, June 1, 2017 4:07:56 AM
        stockStreamInputHandler.send(new Object[] { "IBM", 600f, null, 200L, 6, 1496290076000L });
        // Thursday, June 1, 2017 5:07:56 AM
        stockStreamInputHandler.send(new Object[] { "CISCO", 700f, null, 200L, 20, 1496293676000L });
        // Thursday, June 1, 2017 6:07:56 AM
        stockStreamInputHandler.send(new Object[] { "WSO2", 60f, 44f, 200L, 56, 1496297276000L });
        // Friday, June 2, 2017 6:07:56 AM
        stockStreamInputHandler.send(new Object[] { "CISCO", 800f, null, 100L, 10, 1496383676000L });
        // Saturday, June 3, 2017 6:07:56 AM
        stockStreamInputHandler.send(new Object[] { "CISCO", 900f, null, 100L, 15, 1496470076000L });
        // Monday, July 3, 2017 6:07:56 AM
        stockStreamInputHandler.send(new Object[] { "IBM", 100f, null, 200L, 96, 1499062076000L });
        // Thursday, August 3, 2017 6:07:56 AM
        stockStreamInputHandler.send(new Object[] { "IBM", 400f, null, 200L, 9, 1501740476000L });
        // Friday, August 3, 2018 6:07:56 AM
        stockStreamInputHandler.send(new Object[] { "WSO2", 60f, 44f, 200L, 6, 1533276476000L });
        // Saturday, August 3, 2019 6:07:56 AM
        stockStreamInputHandler.send(new Object[] { "WSO2", 260f, 44f, 200L, 16, 1564812476000L });
        // Monday, August 3, 2020 6:07:56 AM
        stockStreamInputHandler.send(new Object[] { "CISCO", 260f, 44f, 200L, 16, 1596434876000L });
        Thread.sleep(100);
        inputStreamInputHandler.send(new Object[] { "IBM", 1, "2017-06-01 09:35:51 +05:30", "2017-06-01 09:35:52 +05:30", "seconds" });
        Thread.sleep(100);
        List<Object[]> expected = Arrays.asList(new Object[] { 1496275200000L, 303.3333333333333, 3640.0, 3360f, 12L }, new Object[] { 1496361600000L, 800.0, 800.0, 8000f, 1L }, new Object[] { 1496448000000L, 900.0, 900.0, 13500f, 1L }, new Object[] { 1499040000000L, 100.0, 100.0, 9600f, 1L }, new Object[] { 1501718400000L, 400.0, 400.0, 3600f, 1L }, new Object[] { 1533254400000L, 60.0, 60.0, 360f, 1L }, new Object[] { 1564790400000L, 260.0, 260.0, 4160f, 1L }, new Object[] { 1596412800000L, 260.0, 260.0, 4160f, 1L });
        SiddhiTestHelper.waitForEvents(100, 8, inEventCount, 60000);
        AssertJUnit.assertEquals("In events matched", true, SiddhiTestHelper.isEventsMatch(inEventsList, expected));
        AssertJUnit.assertEquals("Remove events matched", true, SiddhiTestHelper.isEventsMatch(removeEventsList, expected));
        AssertJUnit.assertEquals("Number of success events", 8, inEventCount.get());
        AssertJUnit.assertEquals("Number of remove events", 8, removeEventCount.get());
        AssertJUnit.assertEquals("Event arrived", true, eventArrived);
    } finally {
        siddhiAppRuntime.shutdown();
    }
}
Also used : InputHandler(org.wso2.siddhi.core.stream.input.InputHandler) SiddhiAppRuntime(org.wso2.siddhi.core.SiddhiAppRuntime) Event(org.wso2.siddhi.core.event.Event) SiddhiManager(org.wso2.siddhi.core.SiddhiManager) QueryCallback(org.wso2.siddhi.core.query.output.callback.QueryCallback) Test(org.testng.annotations.Test)

Example 85 with Within

use of org.wso2.siddhi.query.api.aggregation.Within in project siddhi by wso2.

the class AggregationTestCase method incrementalStreamProcessorTest43.

@Test(dependsOnMethods = { "incrementalStreamProcessorTest42" })
public void incrementalStreamProcessorTest43() throws InterruptedException {
    LOG.info("incrementalStreamProcessorTest43");
    SiddhiManager siddhiManager = new SiddhiManager();
    String stockStream = "define stream stockStream (symbol string, price float, lastClosingPrice float, volume long , " + "quantity int, timestamp long);";
    String query = "@BufferSize('3') " + "define aggregation stockAggregation " + "from stockStream " + "select avg(price) as avgPrice, sum(price) as totalPrice, (price * quantity) as " + "lastTradeValue, " + "count() as count " + "group by symbol " + "aggregate by timestamp every sec...year ;" + "define stream inputStream (symbol string, value int, startTime string, " + "endTime string, perValue string); " + "@info(name = 'query1') " + "from inputStream as i join stockAggregation as s " + "within \"2017-06-01 04:05:**\" " + "per \"seconds\" " + "select AGG_TIMESTAMP, s.avgPrice, totalPrice, lastTradeValue, count " + "order by AGG_TIMESTAMP " + "insert all events into outputStream; ";
    SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(stockStream + query);
    try {
        siddhiAppRuntime.addCallback("query1", new QueryCallback() {

            @Override
            public void receive(long timestamp, Event[] inEvents, Event[] removeEvents) {
                if (inEvents != null) {
                    EventPrinter.print(timestamp, inEvents, removeEvents);
                    for (Event event : inEvents) {
                        inEventsList.add(event.getData());
                        inEventCount.incrementAndGet();
                    }
                    eventArrived = true;
                }
                if (removeEvents != null) {
                    EventPrinter.print(timestamp, inEvents, removeEvents);
                    for (Event event : removeEvents) {
                        removeEventsList.add(event.getData());
                        removeEventCount.incrementAndGet();
                    }
                }
                eventArrived = true;
            }
        });
        InputHandler stockStreamInputHandler = siddhiAppRuntime.getInputHandler("stockStream");
        InputHandler inputStreamInputHandler = siddhiAppRuntime.getInputHandler("inputStream");
        siddhiAppRuntime.start();
        // Thursday, June 1, 2017 4:05:50 AM
        stockStreamInputHandler.send(new Object[] { "WSO2", 50f, 60f, 90L, 6, 1496289950000L });
        stockStreamInputHandler.send(new Object[] { "WSO2", 70f, null, 40L, 10, 1496289950000L });
        // Thursday, June 1, 2017 4:05:51 AM
        stockStreamInputHandler.send(new Object[] { "WSO2", 60f, 44f, 200L, 56, 1496289951000L });
        stockStreamInputHandler.send(new Object[] { "WSO2", 100f, null, 200L, 16, 1496289951011L });
        // Thursday, June 1, 2017 4:05:52 AM
        stockStreamInputHandler.send(new Object[] { "IBM", 400f, null, 200L, 9, 1496289952000L });
        // Thursday, June 1, 2017 4:05:49 AM
        stockStreamInputHandler.send(new Object[] { "IBM", 900f, null, 200L, 60, 1496289949000L });
        stockStreamInputHandler.send(new Object[] { "IBM", 500f, null, 200L, 7, 1496289949000L });
        // Thursday, June 1, 2017 4:05:53 AM
        stockStreamInputHandler.send(new Object[] { "IBM", 100f, null, 200L, 26, 1496289953000L });
        stockStreamInputHandler.send(new Object[] { "IBM", 100f, null, 200L, 96, 1496289953000L });
        Thread.sleep(100);
        inputStreamInputHandler.send(new Object[] { "IBM", 1, "2017-06-01 09:35:51 +05:30", "2017-06-01 09:35:52 +05:30", "seconds" });
        Thread.sleep(100);
        List<Object[]> expected = Arrays.asList(new Object[] { 1496289949000L, 700.0, 1400.0, 3500f, 2L }, new Object[] { 1496289950000L, 60.0, 120.0, 700f, 2L }, new Object[] { 1496289951000L, 80.0, 160.0, 1600f, 2L }, new Object[] { 1496289952000L, 400.0, 400.0, 3600f, 1L }, new Object[] { 1496289953000L, 100.0, 200.0, 9600f, 2L });
        SiddhiTestHelper.waitForEvents(100, 5, inEventCount, 60000);
        AssertJUnit.assertEquals("In events matched", true, SiddhiTestHelper.isEventsMatch(inEventsList, expected));
        AssertJUnit.assertEquals("Remove events matched", true, SiddhiTestHelper.isEventsMatch(removeEventsList, expected));
        AssertJUnit.assertEquals("Number of success events", 5, inEventCount.get());
        AssertJUnit.assertEquals("Number of remove events", 5, removeEventCount.get());
        AssertJUnit.assertEquals("Event arrived", true, eventArrived);
    } finally {
        siddhiAppRuntime.shutdown();
    }
}
Also used : InputHandler(org.wso2.siddhi.core.stream.input.InputHandler) SiddhiAppRuntime(org.wso2.siddhi.core.SiddhiAppRuntime) Event(org.wso2.siddhi.core.event.Event) SiddhiManager(org.wso2.siddhi.core.SiddhiManager) QueryCallback(org.wso2.siddhi.core.query.output.callback.QueryCallback) Test(org.testng.annotations.Test)

Aggregations

Test (org.testng.annotations.Test)135 SiddhiManager (org.wso2.siddhi.core.SiddhiManager)101 SiddhiAppRuntime (org.wso2.siddhi.core.SiddhiAppRuntime)100 InputHandler (org.wso2.siddhi.core.stream.input.InputHandler)90 TestUtil (org.wso2.siddhi.core.TestUtil)60 Event (org.wso2.siddhi.core.event.Event)37 HTTPCarbonMessage (org.wso2.transport.http.netty.message.HTTPCarbonMessage)28 HTTPTestRequest (org.ballerinalang.test.services.testutils.HTTPTestRequest)27 HttpMessageDataStreamer (org.wso2.transport.http.netty.message.HttpMessageDataStreamer)23 BString (org.ballerinalang.model.values.BString)22 QueryCallback (org.wso2.siddhi.core.query.output.callback.QueryCallback)19 BJSON (org.ballerinalang.model.values.BJSON)18 ArrayList (java.util.ArrayList)8 ExecutorService (java.util.concurrent.ExecutorService)6 Expression (org.wso2.siddhi.query.api.expression.Expression)6 Map (java.util.Map)4 Semaphore (java.util.concurrent.Semaphore)4 Header (org.wso2.carbon.messaging.Header)4 ComplexEventChunk (org.wso2.siddhi.core.event.ComplexEventChunk)4 MetaStreamEvent (org.wso2.siddhi.core.event.stream.MetaStreamEvent)4