use of org.wso2.siddhi.query.api.execution.query.input.stream.InputStream in project siddhi by wso2.
the class StreamPartitioner method createSingleInputStreamExecutors.
private void createSingleInputStreamExecutors(SingleInputStream inputStream, Partition partition, MetaStreamEvent metaEvent, List<VariableExpressionExecutor> executors, Map<String, Table> tableMap, SiddhiAppContext siddhiAppContext, String queryName) {
List<PartitionExecutor> executorList = new ArrayList<PartitionExecutor>();
partitionExecutorLists.add(executorList);
if (!inputStream.isInnerStream()) {
for (PartitionType partitionType : partition.getPartitionTypeMap().values()) {
if (partitionType instanceof ValuePartitionType) {
if (partitionType.getStreamId().equals(inputStream.getStreamId())) {
executorList.add(new ValuePartitionExecutor(ExpressionParser.parseExpression(((ValuePartitionType) partitionType).getExpression(), metaEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, executors, siddhiAppContext, false, 0, queryName)));
}
} else {
for (RangePartitionType.RangePartitionProperty rangePartitionProperty : ((RangePartitionType) partitionType).getRangePartitionProperties()) {
if (partitionType.getStreamId().equals(inputStream.getStreamId())) {
executorList.add(new RangePartitionExecutor((ConditionExpressionExecutor) ExpressionParser.parseExpression(rangePartitionProperty.getCondition(), metaEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, executors, siddhiAppContext, false, 0, queryName), rangePartitionProperty.getPartitionKey()));
}
}
}
}
}
}
use of org.wso2.siddhi.query.api.execution.query.input.stream.InputStream in project siddhi by wso2.
the class JoinInputStreamParser method insertJoinProcessorsAndGetFindable.
private static FindableProcessor insertJoinProcessorsAndGetFindable(JoinProcessor preJoinProcessor, JoinProcessor postJoinProcessor, SingleStreamRuntime streamRuntime, SiddhiAppContext siddhiAppContext, boolean outputExpectsExpiredEvents, String queryName, InputStream inputStream) {
Processor lastProcessor = streamRuntime.getProcessorChain();
Processor prevLastProcessor = null;
if (lastProcessor != null) {
while (lastProcessor.getNextProcessor() != null) {
prevLastProcessor = lastProcessor;
lastProcessor = lastProcessor.getNextProcessor();
}
}
if (lastProcessor == null) {
try {
WindowProcessor windowProcessor = new LengthWindowProcessor();
ExpressionExecutor[] expressionExecutors = new ExpressionExecutor[1];
expressionExecutors[0] = new ConstantExpressionExecutor(0, Attribute.Type.INT);
ConfigReader configReader = siddhiAppContext.getSiddhiContext().getConfigManager().generateConfigReader("", "length");
windowProcessor.initProcessor(((MetaStreamEvent) streamRuntime.getMetaComplexEvent()).getLastInputDefinition(), expressionExecutors, configReader, siddhiAppContext, outputExpectsExpiredEvents, queryName, inputStream);
lastProcessor = windowProcessor;
} catch (Throwable t) {
throw new SiddhiAppCreationException(t);
}
}
if (lastProcessor instanceof FindableProcessor) {
if (prevLastProcessor != null) {
prevLastProcessor.setNextProcessor(preJoinProcessor);
} else {
streamRuntime.setProcessorChain(preJoinProcessor);
}
preJoinProcessor.setNextProcessor(lastProcessor);
lastProcessor.setNextProcessor(postJoinProcessor);
return (FindableProcessor) lastProcessor;
} else {
throw new OperationNotSupportedException("Stream " + ((MetaStreamEvent) streamRuntime.getMetaComplexEvent()).getLastInputDefinition().getId() + "'s last processor " + lastProcessor.getClass().getCanonicalName() + " is not an instance of " + FindableProcessor.class.getCanonicalName() + " hence join cannot be proceed");
}
}
use of org.wso2.siddhi.query.api.execution.query.input.stream.InputStream in project siddhi by wso2.
the class SingleInputStreamParser method parseInputStream.
/**
* Parse single InputStream and return SingleStreamRuntime
*
* @param inputStream single input stream to be parsed
* @param siddhiAppContext query to be parsed
* @param variableExpressionExecutors List to hold VariableExpressionExecutors to update after query parsing
* @param streamDefinitionMap Stream Definition Map
* @param tableDefinitionMap Table Definition Map
* @param windowDefinitionMap window definition map
* @param aggregationDefinitionMap aggregation definition map
* @param tableMap Table Map
* @param metaComplexEvent MetaComplexEvent
* @param processStreamReceiver ProcessStreamReceiver
* @param supportsBatchProcessing supports batch processing
* @param outputExpectsExpiredEvents is output expects ExpiredEvents
* @param queryName query name of single input stream belongs to.
*
* @return SingleStreamRuntime
*/
public static SingleStreamRuntime parseInputStream(SingleInputStream inputStream, SiddhiAppContext siddhiAppContext, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, MetaComplexEvent metaComplexEvent, ProcessStreamReceiver processStreamReceiver, boolean supportsBatchProcessing, boolean outputExpectsExpiredEvents, String queryName) {
Processor processor = null;
EntryValveProcessor entryValveProcessor = null;
boolean first = true;
MetaStreamEvent metaStreamEvent;
if (metaComplexEvent instanceof MetaStateEvent) {
metaStreamEvent = new MetaStreamEvent();
((MetaStateEvent) metaComplexEvent).addEvent(metaStreamEvent);
initMetaStreamEvent(inputStream, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, metaStreamEvent);
} else {
metaStreamEvent = (MetaStreamEvent) metaComplexEvent;
initMetaStreamEvent(inputStream, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, metaStreamEvent);
}
// A window cannot be defined for a window stream
if (!inputStream.getStreamHandlers().isEmpty() && windowDefinitionMap != null && windowDefinitionMap.containsKey(inputStream.getStreamId())) {
for (StreamHandler handler : inputStream.getStreamHandlers()) {
if (handler instanceof Window) {
throw new OperationNotSupportedException("Cannot create " + ((Window) handler).getName() + " " + "window for the window stream " + inputStream.getStreamId());
}
}
}
if (!inputStream.getStreamHandlers().isEmpty()) {
for (StreamHandler handler : inputStream.getStreamHandlers()) {
Processor currentProcessor = generateProcessor(handler, metaComplexEvent, variableExpressionExecutors, siddhiAppContext, tableMap, supportsBatchProcessing, outputExpectsExpiredEvents, queryName);
if (currentProcessor instanceof SchedulingProcessor) {
if (entryValveProcessor == null) {
entryValveProcessor = new EntryValveProcessor(siddhiAppContext);
if (first) {
processor = entryValveProcessor;
first = false;
} else {
processor.setToLast(entryValveProcessor);
}
}
Scheduler scheduler = SchedulerParser.parse(siddhiAppContext.getScheduledExecutorService(), entryValveProcessor, siddhiAppContext);
((SchedulingProcessor) currentProcessor).setScheduler(scheduler);
}
if (first) {
processor = currentProcessor;
first = false;
} else {
processor.setToLast(currentProcessor);
}
}
}
metaStreamEvent.initializeAfterWindowData();
return new SingleStreamRuntime(processStreamReceiver, processor, metaComplexEvent);
}
use of org.wso2.siddhi.query.api.execution.query.input.stream.InputStream in project siddhi by wso2.
the class SingleInputStreamParser method initMetaStreamEvent.
/**
* Method to generate MetaStreamEvent reagent to the given input stream. Empty definition will be created and
* definition and reference is will be set accordingly in this method.
*
* @param inputStream InputStream
* @param streamDefinitionMap StreamDefinition Map
* @param tableDefinitionMap TableDefinition Map
* @param aggregationDefinitionMap AggregationDefinition Map
* @param metaStreamEvent MetaStreamEvent
*/
private static void initMetaStreamEvent(SingleInputStream inputStream, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, MetaStreamEvent metaStreamEvent) {
String streamId = inputStream.getStreamId();
if (!inputStream.isInnerStream() && windowDefinitionMap != null && windowDefinitionMap.containsKey(streamId)) {
AbstractDefinition inputDefinition = windowDefinitionMap.get(streamId);
if (!metaStreamEvent.getInputDefinitions().contains(inputDefinition)) {
metaStreamEvent.addInputDefinition(inputDefinition);
}
} else if (streamDefinitionMap != null && streamDefinitionMap.containsKey(streamId)) {
AbstractDefinition inputDefinition = streamDefinitionMap.get(streamId);
metaStreamEvent.addInputDefinition(inputDefinition);
} else if (!inputStream.isInnerStream() && tableDefinitionMap != null && tableDefinitionMap.containsKey(streamId)) {
AbstractDefinition inputDefinition = tableDefinitionMap.get(streamId);
metaStreamEvent.addInputDefinition(inputDefinition);
} else if (!inputStream.isInnerStream() && aggregationDefinitionMap != null && aggregationDefinitionMap.containsKey(streamId)) {
AbstractDefinition inputDefinition = aggregationDefinitionMap.get(streamId);
metaStreamEvent.addInputDefinition(inputDefinition);
} else {
throw new SiddhiAppCreationException("Stream/table/window/aggregation definition with ID '" + inputStream.getStreamId() + "' has not been defined", inputStream.getQueryContextStartIndex(), inputStream.getQueryContextEndIndex());
}
if ((inputStream.getStreamReferenceId() != null) && !(inputStream.getStreamId()).equals(inputStream.getStreamReferenceId())) {
// if ref id is provided
metaStreamEvent.setInputReferenceId(inputStream.getStreamReferenceId());
}
}
use of org.wso2.siddhi.query.api.execution.query.input.stream.InputStream in project siddhi by wso2.
the class MaximumFunctionExtensionTestCase method testMaxFunctionExtension10.
@Test
public void testMaxFunctionExtension10() throws InterruptedException {
log.info("MaximumFunctionExecutor TestCase 10");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "define stream inputStream (price1 long,price2 long, price3 long);";
String query = ("@info(name = 'query1') from inputStream " + "select maximum(price1, price2, price3) as max " + "insert into outputStream;");
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(inStreamDefinition + query);
siddhiAppRuntime.addCallback("query1", new QueryCallback() {
@Override
public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) {
EventPrinter.print(timeStamp, inEvents, removeEvents);
eventArrived = true;
for (Event event : inEvents) {
count++;
switch(count) {
case 1:
AssertJUnit.assertEquals(3675L, event.getData(0));
break;
case 2:
AssertJUnit.assertEquals(3812L, event.getData(0));
break;
case 3:
AssertJUnit.assertEquals(3925L, event.getData(0));
break;
case 4:
AssertJUnit.assertEquals(3775L, event.getData(0));
break;
case 5:
AssertJUnit.assertEquals(3812L, event.getData(0));
break;
case 6:
AssertJUnit.assertEquals(3812L, event.getData(0));
break;
default:
org.junit.Assert.fail();
}
}
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("inputStream");
siddhiAppRuntime.start();
inputHandler.send(new Object[] { 36L, 3675L, 3575L });
inputHandler.send(new Object[] { 3788L, 3812L, 3762L });
inputHandler.send(new Object[] { 3900L, 3925L, 3862L });
inputHandler.send(new Object[] { 3688L, 3775L, 3675L });
inputHandler.send(new Object[] { 3812L, 3812L, 3775L });
inputHandler.send(new Object[] { 3812L, 40L, 3775L });
Thread.sleep(300);
AssertJUnit.assertEquals(6, count);
AssertJUnit.assertTrue(eventArrived);
siddhiAppRuntime.shutdown();
}
Aggregations