use of io.siddhi.core.config.SiddhiContext in project siddhi by wso2.
the class JunctionTestCase method init.
@BeforeMethod
public void init() {
count = 0;
eventArrived = false;
executorService = Executors.newCachedThreadPool();
SiddhiContext siddhiContext = new SiddhiContext();
siddhiAppContext = new SiddhiAppContext();
siddhiAppContext.setSiddhiContext(siddhiContext);
}
use of io.siddhi.core.config.SiddhiContext in project siddhi by wso2.
the class EventTestCase method testQueryParser.
@Test
public void testQueryParser() {
StreamDefinition streamDefinition = StreamDefinition.id("cseEventStream").attribute("symbol", Attribute.Type.STRING).attribute("price", Attribute.Type.FLOAT).attribute("volume", Attribute.Type.INT);
StreamDefinition outStreamDefinition = StreamDefinition.id("outputStream").attribute("symbol", Attribute.Type.STRING).attribute("price", Attribute.Type.FLOAT);
Query query = new Query();
query.annotation(Annotation.annotation("info").element("name", "query1"));
query.from(InputStream.stream("cseEventStream").filter(Expression.compare(Expression.variable("volume"), Compare.Operator.NOT_EQUAL, Expression.value(50))));
query.select(Selector.selector().select("symbol", Expression.variable("symbol")).select("price", Expression.variable("price")));
query.insertInto("outputStream");
Map<String, AbstractDefinition> tableDefinitionMap = new HashMap<>();
Map<String, AbstractDefinition> windowDefinitionMap = new HashMap<>();
Map<String, AbstractDefinition> aggregationDefinitionMap = new HashMap<>();
Map<String, Table> tableMap = new HashMap<String, Table>();
Map<String, Window> eventWindowMap = new HashMap<String, Window>();
Map<String, AggregationRuntime> aggregationMap = new HashMap<String, AggregationRuntime>();
Map<String, List<Source>> eventSourceMap = new HashMap<String, List<Source>>();
Map<String, List<Sink>> eventSinkMap = new HashMap<String, List<Sink>>();
Map<String, AbstractDefinition> streamDefinitionMap = new HashMap<String, AbstractDefinition>();
LockSynchronizer lockSynchronizer = new LockSynchronizer();
streamDefinitionMap.put("cseEventStream", streamDefinition);
streamDefinitionMap.put("outputStream", outStreamDefinition);
SiddhiContext siddhicontext = new SiddhiContext();
SiddhiAppContext context = new SiddhiAppContext();
context.setSiddhiContext(siddhicontext);
context.setIdGenerator(new IdGenerator());
context.setSnapshotService(new SnapshotService(context));
QueryRuntimeImpl runtime = QueryParser.parse(query, context, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, aggregationMap, eventWindowMap, lockSynchronizer, "1", false, SiddhiConstants.PARTITION_ID_DEFAULT);
AssertJUnit.assertNotNull(runtime);
AssertJUnit.assertTrue(runtime.getStreamRuntime() instanceof SingleStreamRuntime);
AssertJUnit.assertNotNull(runtime.getSelector());
AssertJUnit.assertTrue(runtime.getMetaComplexEvent() instanceof MetaStreamEvent);
}
use of io.siddhi.core.config.SiddhiContext in project siddhi by wso2.
the class AggregationParser method initAggregateQueryExecutor.
private static Map<TimePeriod.Duration, Processor> initAggregateQueryExecutor(List<TimePeriod.Duration> aggregationDurations, Map<TimePeriod.Duration, List<ExpressionExecutor>> processExpressionExecutorsMap, StreamDefinition incomingOutputStreamDefinition, boolean isDistributed, String shardID, boolean isProcessingOnExternalTime, SiddhiQueryContext siddhiQueryContext, AggregationDefinition aggregationDefinition, ConfigManager configManager, Map<TimePeriod.Duration, Table> aggregationTables, List<Variable> groupByVariableList) {
Map<TimePeriod.Duration, Processor> cudProcessors = new LinkedHashMap<>();
String datasourceName = AnnotationHelper.getAnnotationElement(SiddhiConstants.NAMESPACE_STORE, "datasource", aggregationDefinition.getAnnotations()).getValue();
if (datasourceName == null || datasourceName.isEmpty()) {
throw new SiddhiAppCreationException("Datasource configuration must be provided inorder to use persisted " + "aggregation mode");
}
Database databaseType = getDatabaseType(configManager, datasourceName);
if (log.isDebugEnabled()) {
log.debug("Database type " + databaseType);
}
SiddhiAppContext cudSiddhiAppContext = new SiddhiAppContext();
SiddhiContext context = new SiddhiContext();
context.setConfigManager(configManager);
cudSiddhiAppContext.setSiddhiContext(context);
StringConstant datasource = new StringConstant(datasourceName);
ConstantExpressionExecutor datasourceExecutor = new ConstantExpressionExecutor(datasource.getValue(), Attribute.Type.STRING);
Expression[] streamHandler;
ExpressionExecutor[] cudStreamProcessorInputVariables;
if (isProcessingOnExternalTime) {
streamHandler = new Expression[7];
} else {
streamHandler = new Expression[5];
}
try {
DBAggregationQueryConfigurationEntry dbAggregationQueryConfigurationEntry = DBAggregationQueryUtil.lookupCurrentQueryConfigurationEntry(databaseType);
if (log.isDebugEnabled()) {
log.debug("CUD queries for aggregation " + aggregationDefinition.getId());
}
for (int i = aggregationDurations.size() - 1; i > 0; i--) {
if (aggregationDurations.get(i).ordinal() >= 3) {
if (log.isDebugEnabled()) {
log.debug(" Initializing cudProcessors for duration " + aggregationDurations.get(i));
}
String databaseSelectQuery = generateDatabaseQuery(processExpressionExecutorsMap.get(aggregationDurations.get(i)), dbAggregationQueryConfigurationEntry, incomingOutputStreamDefinition, isDistributed, shardID, isProcessingOnExternalTime, aggregationTables.get(aggregationDurations.get(i)), aggregationTables.get(aggregationDurations.get(i - 1)), groupByVariableList, aggregationDurations.get(i));
StringConstant selectQuery = new StringConstant(databaseSelectQuery);
if (log.isDebugEnabled()) {
log.debug(selectQuery);
}
ConstantExpressionExecutor selectExecutor = new ConstantExpressionExecutor(selectQuery.getValue(), Attribute.Type.STRING);
Map<Attribute, int[]> cudInputStreamAttributesMap = generateCUDInputStreamAttributes(isProcessingOnExternalTime);
if (isProcessingOnExternalTime) {
cudStreamProcessorInputVariables = new ExpressionExecutor[7];
} else {
cudStreamProcessorInputVariables = new ExpressionExecutor[5];
}
cudStreamProcessorInputVariables[0] = datasourceExecutor;
cudStreamProcessorInputVariables[1] = selectExecutor;
streamHandler[0] = datasource;
streamHandler[1] = selectQuery;
MetaStreamEvent metaStreamEvent = generateCUDMetaStreamEvent(isProcessingOnExternalTime);
StreamDefinition outputStream = new StreamDefinition();
VariableExpressionExecutor variableExpressionExecutor;
int j = 0;
for (Map.Entry<Attribute, int[]> entry : cudInputStreamAttributesMap.entrySet()) {
Attribute attribute = entry.getKey();
Variable timestampVariable = new Variable(attribute.getName());
for (int position : entry.getValue()) {
streamHandler[position + 2] = timestampVariable;
variableExpressionExecutor = new VariableExpressionExecutor(attribute, 0, 0);
variableExpressionExecutor.setPosition(new int[] { 2, j });
cudStreamProcessorInputVariables[position + 2] = variableExpressionExecutor;
}
outputStream.attribute(attribute.getName(), attribute.getType());
j++;
}
StreamFunction cudStreamFunction = new StreamFunction(NAMESPACE_RDBMS, FUNCTION_NAME_CUD, streamHandler);
cudProcessors.put(aggregationDurations.get(i), getCudProcessor(cudStreamFunction, siddhiQueryContext, metaStreamEvent, cudStreamProcessorInputVariables, aggregationDurations.get(i)));
}
}
return cudProcessors;
} catch (CannotLoadConfigurationException e) {
throw new SiddhiAppCreationException("Error occurred while initializing the persisted incremental " + "aggregation. Could not load the db quires for database type " + databaseType);
}
}
Aggregations