use of org.wso2.siddhi.query.api.definition.AbstractDefinition in project siddhi by wso2.
the class DefinitionParserHelper method validateDefinition.
public static void validateDefinition(AbstractDefinition definition, ConcurrentMap<String, AbstractDefinition> streamDefinitionMap, ConcurrentMap<String, AbstractDefinition> tableDefinitionMap, ConcurrentMap<String, AbstractDefinition> windowDefinitionMap, ConcurrentMap<String, AbstractDefinition> aggregationDefinitionMap) {
AbstractDefinition existingTableDefinition = tableDefinitionMap.get(definition.getId());
if (existingTableDefinition != null && (!existingTableDefinition.equals(definition) || definition instanceof StreamDefinition)) {
throw new DuplicateDefinitionException("Table Definition with same Stream Id '" + definition.getId() + "' already exist : " + existingTableDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
AbstractDefinition existingStreamDefinition = streamDefinitionMap.get(definition.getId());
if (existingStreamDefinition != null && (!existingStreamDefinition.equals(definition) || definition instanceof TableDefinition)) {
throw new DuplicateDefinitionException("Stream Definition with same Stream Id '" + definition.getId() + "' already exist : " + existingStreamDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
AbstractDefinition existingWindowDefinition = windowDefinitionMap.get(definition.getId());
if (existingWindowDefinition != null && (!existingWindowDefinition.equals(definition) || definition instanceof WindowDefinition)) {
throw new DuplicateDefinitionException("Window Definition with same Window Id '" + definition.getId() + "' already exist : " + existingWindowDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
AbstractDefinition existingAggregationDefinition = aggregationDefinitionMap.get(definition.getId());
if (existingAggregationDefinition != null && (!existingAggregationDefinition.equals(definition) || definition instanceof AggregationDefinition)) {
throw new DuplicateDefinitionException("Aggregation Definition with same Aggregation Id '" + definition.getId() + "' already exist : " + existingWindowDefinition + ", hence cannot add " + definition, definition.getQueryContextStartIndex(), definition.getQueryContextEndIndex());
}
}
use of org.wso2.siddhi.query.api.definition.AbstractDefinition in project siddhi by wso2.
the class SiddhiAppRuntimeBuilder method defineWindow.
public void defineWindow(WindowDefinition windowDefinition) {
DefinitionParserHelper.validateDefinition(windowDefinition, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap);
DefinitionParserHelper.addStreamJunction(windowDefinition, streamJunctionMap, siddhiAppContext);
AbstractDefinition currentDefinition = windowDefinitionMap.putIfAbsent(windowDefinition.getId(), windowDefinition);
if (currentDefinition != null) {
windowDefinition = (WindowDefinition) currentDefinition;
}
DefinitionParserHelper.addWindow(windowDefinition, windowMap, siddhiAppContext);
// defineStream(windowDefinition);
// DefinitionParserHelper.addStreamJunction(windowDefinition, streamJunctionMap, siddhiAppContext);
}
use of org.wso2.siddhi.query.api.definition.AbstractDefinition in project siddhi by wso2.
the class SiddhiAppRuntimeBuilder method defineTable.
public void defineTable(TableDefinition tableDefinition) {
DefinitionParserHelper.validateDefinition(tableDefinition, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap);
AbstractDefinition currentDefinition = tableDefinitionMap.putIfAbsent(tableDefinition.getId(), tableDefinition);
if (currentDefinition != null) {
tableDefinition = (TableDefinition) currentDefinition;
}
DefinitionParserHelper.addTable(tableDefinition, tableMap, siddhiAppContext);
}
use of org.wso2.siddhi.query.api.definition.AbstractDefinition in project siddhi by wso2.
the class EventTestCase method testQueryParser.
@Test
public void testQueryParser() {
StreamDefinition streamDefinition = StreamDefinition.id("cseEventStream").attribute("symbol", Attribute.Type.STRING).attribute("price", Attribute.Type.FLOAT).attribute("volume", Attribute.Type.INT);
StreamDefinition outStreamDefinition = StreamDefinition.id("outputStream").attribute("symbol", Attribute.Type.STRING).attribute("price", Attribute.Type.FLOAT);
Query query = new Query();
query.annotation(Annotation.annotation("info").element("name", "query1"));
query.from(InputStream.stream("cseEventStream").filter(Expression.compare(Expression.variable("volume"), Compare.Operator.NOT_EQUAL, Expression.value(50))));
query.select(Selector.selector().select("symbol", Expression.variable("symbol")).select("price", Expression.variable("price")));
query.insertInto("outputStream");
Map<String, AbstractDefinition> tableDefinitionMap = new HashMap<>();
Map<String, AbstractDefinition> windowDefinitionMap = new HashMap<>();
Map<String, AbstractDefinition> aggregationDefinitionMap = new HashMap<>();
Map<String, Table> tableMap = new HashMap<String, Table>();
Map<String, Window> eventWindowMap = new HashMap<String, Window>();
Map<String, AggregationRuntime> aggregationMap = new HashMap<String, AggregationRuntime>();
Map<String, List<Source>> eventSourceMap = new HashMap<String, List<Source>>();
Map<String, List<Sink>> eventSinkMap = new HashMap<String, List<Sink>>();
Map<String, AbstractDefinition> streamDefinitionMap = new HashMap<String, AbstractDefinition>();
LockSynchronizer lockSynchronizer = new LockSynchronizer();
streamDefinitionMap.put("cseEventStream", streamDefinition);
streamDefinitionMap.put("outputStream", outStreamDefinition);
SiddhiContext siddhicontext = new SiddhiContext();
SiddhiAppContext context = new SiddhiAppContext();
context.setSiddhiContext(siddhicontext);
context.setElementIdGenerator(new ElementIdGenerator(context.getName()));
context.setSnapshotService(new SnapshotService(context));
QueryRuntime runtime = QueryParser.parse(query, context, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, aggregationMap, eventWindowMap, lockSynchronizer, "1");
AssertJUnit.assertNotNull(runtime);
AssertJUnit.assertTrue(runtime.getStreamRuntime() instanceof SingleStreamRuntime);
AssertJUnit.assertNotNull(runtime.getSelector());
AssertJUnit.assertTrue(runtime.getMetaComplexEvent() instanceof MetaStreamEvent);
}
use of org.wso2.siddhi.query.api.definition.AbstractDefinition in project carbon-apimgt by wso2.
the class ThrottleStreamProcessor method init.
@Override
protected List<Attribute> init(AbstractDefinition abstractDefinition, ExpressionExecutor[] expressionExecutors, ConfigReader configReader, SiddhiAppContext siddhiAppContext) {
this.siddhiAppContext = siddhiAppContext;
if (attributeExpressionExecutors.length == 1) {
if (attributeExpressionExecutors[0] instanceof ConstantExpressionExecutor) {
if (attributeExpressionExecutors[0].getReturnType() == Attribute.Type.INT) {
timeInMilliSeconds = (Integer) ((ConstantExpressionExecutor) attributeExpressionExecutors[0]).getValue();
} else if (attributeExpressionExecutors[0].getReturnType() == Attribute.Type.LONG) {
timeInMilliSeconds = (Long) ((ConstantExpressionExecutor) attributeExpressionExecutors[0]).getValue();
} else {
throw new SiddhiAppValidationException("Throttle batch window's 1st parameter attribute should be " + "either int or long, but found " + attributeExpressionExecutors[0].getReturnType());
}
} else {
throw new SiddhiAppValidationException("Throttle batch window 1st parameter needs to be constant " + "parameter attribute but found a dynamic attribute " + attributeExpressionExecutors[0].getClass().getCanonicalName());
}
} else if (attributeExpressionExecutors.length == 2) {
if (attributeExpressionExecutors[0] instanceof ConstantExpressionExecutor) {
if (attributeExpressionExecutors[0].getReturnType() == Attribute.Type.INT) {
timeInMilliSeconds = (Integer) ((ConstantExpressionExecutor) attributeExpressionExecutors[0]).getValue();
} else if (attributeExpressionExecutors[0].getReturnType() == Attribute.Type.LONG) {
timeInMilliSeconds = (Long) ((ConstantExpressionExecutor) attributeExpressionExecutors[0]).getValue();
} else {
throw new SiddhiAppValidationException("Throttle batch window's 1st parameter attribute should be " + "either int or long, but found " + attributeExpressionExecutors[0].getReturnType());
}
} else {
throw new SiddhiAppValidationException("Throttle batch window 1st parameter needs to be constant " + "attribute but found a dynamic attribute " + attributeExpressionExecutors[0].getClass().getCanonicalName());
}
if (attributeExpressionExecutors[1].getReturnType() == Attribute.Type.INT) {
startTime = Integer.parseInt(String.valueOf(((ConstantExpressionExecutor) attributeExpressionExecutors[1]).getValue()));
} else if (attributeExpressionExecutors[1].getReturnType() == Attribute.Type.LONG) {
startTime = Long.parseLong(String.valueOf(((ConstantExpressionExecutor) attributeExpressionExecutors[1]).getValue()));
} else {
throw new SiddhiAppValidationException("Throttle batch window 2nd parameter needs to be a Long " + "or Int type but found a " + attributeExpressionExecutors[2].getReturnType());
}
} else {
throw new SiddhiAppValidationException("Throttle batch window should only have one/two parameter " + "(<int|long|time> windowTime (and <int|long> startTime), but found " + attributeExpressionExecutors.length + " input attributes");
}
List<Attribute> attributeList = new ArrayList<Attribute>();
attributeList.add(new Attribute(EXPIRY_TIME_STAMP, Attribute.Type.LONG));
return attributeList;
}
Aggregations