use of org.wso2.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.
the class EventTestCase method testConditionExpressionExecutors.
@Test
public void testConditionExpressionExecutors() {
// StreamDefinition streamDefinition = StreamDefinition.id("cseEventStream").attribute("symbol", Attribute
// .Type.STRING).attribute("price", Attribute.Type.FLOAT).attribute("volume", Attribute.Type.INT);
VariableExpressionExecutor priceVariableExpressionExecutor = new VariableExpressionExecutor(new Attribute("price", Attribute.Type.FLOAT), 0, 0);
priceVariableExpressionExecutor.setPosition(new int[] { 0, SiddhiConstants.UNKNOWN_STATE, SiddhiConstants.OUTPUT_DATA_INDEX, 1 });
VariableExpressionExecutor volumeVariableExpressionExecutor = new VariableExpressionExecutor(new Attribute("volume", Attribute.Type.INT), 0, 0);
volumeVariableExpressionExecutor.setPosition(new int[] { 0, SiddhiConstants.UNKNOWN_STATE, SiddhiConstants.OUTPUT_DATA_INDEX, 2 });
ExpressionExecutor compareLessThanExecutor = new LessThanCompareConditionExpressionExecutorFloatFloat(new ConstantExpressionExecutor(10f, Attribute.Type.FLOAT), priceVariableExpressionExecutor);
ExpressionExecutor compareGreaterThanExecutor = new GreaterThanCompareConditionExpressionExecutorIntInt(new ConstantExpressionExecutor(10, Attribute.Type.INT), volumeVariableExpressionExecutor);
ExpressionExecutor andExecutor = new AndConditionExpressionExecutor(compareLessThanExecutor, compareGreaterThanExecutor);
int count = 0;
for (int i = 0; i < 3; i++) {
StreamEvent event = new StreamEvent(0, 0, 3);
event.setOutputData(new Object[] { "WSO2", i * 11f, 5 });
if ((Boolean) andExecutor.execute(event)) {
count++;
}
}
AssertJUnit.assertEquals("Two events should pass through executor", 2, count);
}
use of org.wso2.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.
the class EventTestCase method testConditionExpressionExecutorValidation.
@Test(expectedExceptions = OperationNotSupportedException.class)
public void testConditionExpressionExecutorValidation() {
// StreamDefinition streamDefinition = StreamDefinition.id("cseEventStream").attribute("symbol", Attribute
// .Type.STRING).attribute("price", Attribute.Type.FLOAT).attribute("volume", Attribute.Type.INT);
VariableExpressionExecutor volumeVariableExpressionExecutor = new VariableExpressionExecutor(new Attribute("volume", Attribute.Type.INT), 0, 0);
volumeVariableExpressionExecutor.setPosition(new int[] { 0, SiddhiConstants.UNKNOWN_STATE, SiddhiConstants.OUTPUT_DATA_INDEX, 2 });
ConstantExpressionExecutor constantExpressionExecutor = new ConstantExpressionExecutor(10f, Attribute.Type.FLOAT);
ExpressionExecutor compareGreaterThanExecutor = new GreaterThanCompareConditionExpressionExecutorIntInt(new ConstantExpressionExecutor(10, Attribute.Type.INT), volumeVariableExpressionExecutor);
ExpressionExecutor andExecutor = new AndConditionExpressionExecutor(constantExpressionExecutor, compareGreaterThanExecutor);
}
use of org.wso2.siddhi.core.executor.ExpressionExecutor in project carbon-apimgt by wso2.
the class ThrottleStreamProcessor method init.
@Override
protected List<Attribute> init(AbstractDefinition abstractDefinition, ExpressionExecutor[] expressionExecutors, ConfigReader configReader, SiddhiAppContext siddhiAppContext) {
this.siddhiAppContext = siddhiAppContext;
if (attributeExpressionExecutors.length == 1) {
if (attributeExpressionExecutors[0] instanceof ConstantExpressionExecutor) {
if (attributeExpressionExecutors[0].getReturnType() == Attribute.Type.INT) {
timeInMilliSeconds = (Integer) ((ConstantExpressionExecutor) attributeExpressionExecutors[0]).getValue();
} else if (attributeExpressionExecutors[0].getReturnType() == Attribute.Type.LONG) {
timeInMilliSeconds = (Long) ((ConstantExpressionExecutor) attributeExpressionExecutors[0]).getValue();
} else {
throw new SiddhiAppValidationException("Throttle batch window's 1st parameter attribute should be " + "either int or long, but found " + attributeExpressionExecutors[0].getReturnType());
}
} else {
throw new SiddhiAppValidationException("Throttle batch window 1st parameter needs to be constant " + "parameter attribute but found a dynamic attribute " + attributeExpressionExecutors[0].getClass().getCanonicalName());
}
} else if (attributeExpressionExecutors.length == 2) {
if (attributeExpressionExecutors[0] instanceof ConstantExpressionExecutor) {
if (attributeExpressionExecutors[0].getReturnType() == Attribute.Type.INT) {
timeInMilliSeconds = (Integer) ((ConstantExpressionExecutor) attributeExpressionExecutors[0]).getValue();
} else if (attributeExpressionExecutors[0].getReturnType() == Attribute.Type.LONG) {
timeInMilliSeconds = (Long) ((ConstantExpressionExecutor) attributeExpressionExecutors[0]).getValue();
} else {
throw new SiddhiAppValidationException("Throttle batch window's 1st parameter attribute should be " + "either int or long, but found " + attributeExpressionExecutors[0].getReturnType());
}
} else {
throw new SiddhiAppValidationException("Throttle batch window 1st parameter needs to be constant " + "attribute but found a dynamic attribute " + attributeExpressionExecutors[0].getClass().getCanonicalName());
}
if (attributeExpressionExecutors[1].getReturnType() == Attribute.Type.INT) {
startTime = Integer.parseInt(String.valueOf(((ConstantExpressionExecutor) attributeExpressionExecutors[1]).getValue()));
} else if (attributeExpressionExecutors[1].getReturnType() == Attribute.Type.LONG) {
startTime = Long.parseLong(String.valueOf(((ConstantExpressionExecutor) attributeExpressionExecutors[1]).getValue()));
} else {
throw new SiddhiAppValidationException("Throttle batch window 2nd parameter needs to be a Long " + "or Int type but found a " + attributeExpressionExecutors[2].getReturnType());
}
} else {
throw new SiddhiAppValidationException("Throttle batch window should only have one/two parameter " + "(<int|long|time> windowTime (and <int|long> startTime), but found " + attributeExpressionExecutors.length + " input attributes");
}
List<Attribute> attributeList = new ArrayList<Attribute>();
attributeList.add(new Attribute(EXPIRY_TIME_STAMP, Attribute.Type.LONG));
return attributeList;
}
use of org.wso2.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.
the class EventTestCase method testExpressionExecutors.
@Test
public void testExpressionExecutors() {
// StreamDefinition streamDefinition = StreamDefinition.id("cseEventStream").attribute("symbol", Attribute
// .Type.STRING).attribute("price", Attribute.Type.FLOAT).attribute("volume", Attribute.Type.INT);
VariableExpressionExecutor priceVariableExpressionExecutor = new VariableExpressionExecutor(new Attribute("price", Attribute.Type.FLOAT), 0, 0);
priceVariableExpressionExecutor.setPosition(new int[] { 0, SiddhiConstants.UNKNOWN_STATE, SiddhiConstants.OUTPUT_DATA_INDEX, 1 });
ExpressionExecutor addExecutor = new AddExpressionExecutorFloat(new ConstantExpressionExecutor(10f, Attribute.Type.FLOAT), priceVariableExpressionExecutor);
StreamEvent event = new StreamEvent(0, 0, 3);
event.setOutputData(new Object[] { "WSO2", 10f, 5 });
AssertJUnit.assertEquals("Result of adding should be 20.0", 20f, addExecutor.execute(event));
}
use of org.wso2.siddhi.core.executor.ExpressionExecutor in project siddhi by wso2.
the class AggregationRuntime method compileExpression.
public CompiledCondition compileExpression(Expression expression, Within within, Expression per, MatchingMetaInfoHolder matchingMetaInfoHolder, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, Table> tableMap, String queryName, SiddhiAppContext siddhiAppContext) {
Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions = new HashMap<>();
CompiledCondition withinInMemoryCompileCondition;
CompiledCondition onCompiledCondition;
List<Attribute> additionalAttributes = new ArrayList<>();
// Define additional attribute list
additionalAttributes.add(new Attribute("_START", Attribute.Type.LONG));
additionalAttributes.add(new Attribute("_END", Attribute.Type.LONG));
// Get table definition. Table definitions for all the tables used to persist aggregates are similar.
// Therefore it's enough to get the definition from one table.
AbstractDefinition tableDefinition = ((Table) aggregationTables.values().toArray()[0]).getTableDefinition();
// Alter existing meta stream event or create new one if a meta stream doesn't exist
// After calling this method the original MatchingMetaInfoHolder's meta stream event would be altered
MetaStreamEvent newMetaStreamEventWithStartEnd = createNewMetaStreamEventWithStartEnd(matchingMetaInfoHolder, additionalAttributes);
MatchingMetaInfoHolder alteredMatchingMetaInfoHolder = null;
// Alter meta info holder to contain stream event and aggregate both when it's a store query
if (matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvents().length == 1) {
matchingMetaInfoHolder = alterMetaInfoHolderForStoreQuery(newMetaStreamEventWithStartEnd, matchingMetaInfoHolder);
alteredMatchingMetaInfoHolder = matchingMetaInfoHolder;
}
// Create new MatchingMetaInfoHolder containing newMetaStreamEventWithStartEnd and table meta event
MatchingMetaInfoHolder streamTableMetaInfoHolderWithStartEnd = createNewStreamTableMetaInfoHolder(newMetaStreamEventWithStartEnd, tableDefinition);
// Create per expression executor
ExpressionExecutor perExpressionExecutor = ExpressionParser.parseExpression(per, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, queryName);
if (perExpressionExecutor.getReturnType() != Attribute.Type.STRING) {
throw new SiddhiAppCreationException("Query " + queryName + "'s per value expected a string but found " + perExpressionExecutor.getReturnType(), per.getQueryContextStartIndex(), per.getQueryContextEndIndex());
}
// Create within expression
Expression withinExpression;
Expression start = Expression.variable(additionalAttributes.get(0).getName());
Expression end = Expression.variable(additionalAttributes.get(1).getName());
Expression compareWithStartTime = Compare.compare(start, Compare.Operator.LESS_THAN_EQUAL, Expression.variable("AGG_TIMESTAMP"));
Expression compareWithEndTime = Compare.compare(Expression.variable("AGG_TIMESTAMP"), Compare.Operator.LESS_THAN, end);
withinExpression = Expression.and(compareWithStartTime, compareWithEndTime);
// Create start and end time expression
Expression startEndTimeExpression;
if (within.getTimeRange().size() == 1) {
startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0));
} else {
// within.getTimeRange().size() == 2
startEndTimeExpression = new AttributeFunction("incrementalAggregator", "startTimeEndTime", within.getTimeRange().get(0), within.getTimeRange().get(1));
}
ExpressionExecutor startTimeEndTimeExpressionExecutor = ExpressionParser.parseExpression(startEndTimeExpression, matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors, siddhiAppContext, false, 0, queryName);
// These compile conditions are used to check whether the aggregates in tables are within the given duration.
for (Map.Entry<TimePeriod.Duration, Table> entry : aggregationTables.entrySet()) {
CompiledCondition withinTableCompileCondition = entry.getValue().compileCondition(withinExpression, streamTableMetaInfoHolderWithStartEnd, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
withinTableCompiledConditions.put(entry.getKey(), withinTableCompileCondition);
}
// Create compile condition for in-memory data.
// This compile condition is used to check whether the running aggregates (in-memory data)
// are within given duration
withinInMemoryCompileCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), withinExpression, streamTableMetaInfoHolderWithStartEnd, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
// On compile condition.
// After finding all the aggregates belonging to within duration, the final on condition (given as
// "on stream1.name == aggregator.nickName ..." in the join query) must be executed on that data.
// This condition is used for that purpose.
onCompiledCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), expression, matchingMetaInfoHolder, siddhiAppContext, variableExpressionExecutors, tableMap, queryName);
return new IncrementalAggregateCompileCondition(withinTableCompiledConditions, withinInMemoryCompileCondition, onCompiledCondition, tableMetaStreamEvent, aggregateMetaSteamEvent, additionalAttributes, alteredMatchingMetaInfoHolder, perExpressionExecutor, startTimeEndTimeExpressionExecutor);
}
Aggregations