use of org.wso2.siddhi.query.api.expression.condition.And in project jaggery by wso2.
the class DatabaseHostObject method executeBatch.
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings("SQL_INJECTION_JDBC")
private static Object executeBatch(Context cx, final DatabaseHostObject db, NativeArray queries, NativeArray params, final Function callback) throws ScriptException, SQLException {
if (params != null && (queries.getLength() != params.getLength())) {
String msg = "Query array and values array should be in the same size. HostObject : " + hostObjectName + ", Method : query";
log.warn(msg);
throw new ScriptException(msg);
}
final Statement stmt = db.conn.createStatement();
for (int index : (Integer[]) queries.getIds()) {
Object obj = queries.get(index, db);
if (!(obj instanceof String)) {
String msg = "Invalid query type : " + obj.toString() + ". Query should be a string";
log.warn(msg);
throw new ScriptException(msg);
}
String query = (String) obj;
if (params != null) {
Object valObj = params.get(index, db);
if (!(valObj instanceof NativeArray)) {
String msg = "Invalid value type : " + obj.toString() + " for the query " + query;
log.warn(msg);
throw new ScriptException(msg);
}
query = replaceWildcards(db, query, (NativeArray) valObj);
}
stmt.addBatch(query);
}
if (callback != null) {
final ContextFactory factory = cx.getFactory();
final ExecutorService es = Executors.newSingleThreadExecutor();
es.submit(new Callable() {
public Object call() throws Exception {
Context ctx = RhinoEngine.enterContext(factory);
try {
int[] result = stmt.executeBatch();
callback.call(ctx, db, db, new Object[] { result });
} catch (SQLException e) {
log.warn(e);
} finally {
es.shutdown();
RhinoEngine.exitContext();
}
return null;
}
});
return null;
} else {
return stmt.executeBatch();
}
}
use of org.wso2.siddhi.query.api.expression.condition.And in project siddhi by wso2.
the class SiddhiDebuggerClient method start.
/**
* Start the {@link SiddhiDebuggerClient} and configure the breakpoints.
*
* @param siddhiApp the Siddhi query
* @param input the user input as a whole text
*/
public void start(final String siddhiApp, String input) {
SiddhiManager siddhiManager = new SiddhiManager();
info("Deploying the siddhi app");
final SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(siddhiApp);
// Add callbacks for all the streams
final Set<String> streamNames = SiddhiCompiler.parse(siddhiApp).getStreamDefinitionMap().keySet();
for (String streamName : streamNames) {
final String stream = streamName;
siddhiAppRuntime.addCallback(stream, new StreamCallback() {
@Override
public void receive(Event[] events) {
info("@Receive: Stream: " + stream + ", Event: " + Arrays.deepToString(events));
}
});
}
SiddhiDebugger siddhiDebugger = siddhiAppRuntime.debug();
final InputFeeder inputFeeder = new InputFeeder(siddhiAppRuntime, input);
System.out.println("Configure the breakpoints.\nYou can use the following commands:\n - " + ADD_BREAKPOINT + "<query name>:<IN/OUT>\n - " + REMOVE_BREAKPOINT + "<query name>:<IN/OUT>\n - " + START + "\n - " + STOP);
printNextLine();
final Scanner scanner = new Scanner(System.in, "UTF-8");
while (scanner.hasNextLine()) {
String userInput = scanner.nextLine().trim();
String command = userInput.toLowerCase();
if (command.startsWith(ADD_BREAKPOINT)) {
if (!command.contains(QUERY_DELIMITER)) {
error("Invalid add query. The query must be " + ADD_BREAKPOINT + "<query " + "name>:<IN/OUT>. Please try again");
printNextLine();
continue;
}
String[] components = userInput.substring(ADD_BREAKPOINT.length(), userInput.length()).split(QUERY_DELIMITER);
String queryName = components[0];
String terminal = components[1].toLowerCase();
if (IN.equals(terminal)) {
siddhiDebugger.acquireBreakPoint(queryName, SiddhiDebugger.QueryTerminal.IN);
info("Added a breakpoint at the IN terminal of " + queryName);
printNextLine();
} else if (OUT.equals(terminal)) {
siddhiDebugger.acquireBreakPoint(queryName, SiddhiDebugger.QueryTerminal.OUT);
info("Added a breakpoint at the OUT terminal of " + queryName);
printNextLine();
} else {
error("The terminal must be either IN or OUT but found: " + terminal.toUpperCase() + ". Please try again");
printNextLine();
}
} else if (command.startsWith(REMOVE_BREAKPOINT)) {
if (!command.contains(QUERY_DELIMITER)) {
error("Invalid add query. The query must be " + REMOVE_BREAKPOINT + "<query " + "name>:<IN/OUT>. Please try again");
printNextLine();
continue;
}
String[] components = command.substring(ADD_BREAKPOINT.length(), command.length()).split(QUERY_DELIMITER);
String queryName = components[0];
String terminal = components[1];
if (IN.equals(terminal)) {
siddhiDebugger.releaseBreakPoint(queryName, SiddhiDebugger.QueryTerminal.IN);
info("Removed the breakpoint at the IN terminal of " + queryName);
printNextLine();
} else if (OUT.equals(terminal)) {
siddhiDebugger.releaseBreakPoint(queryName, SiddhiDebugger.QueryTerminal.OUT);
info("Removed the breakpoint at the OUT terminal of " + queryName);
printNextLine();
} else {
error("The terminal must be either IN or OUT but found: " + terminal.toUpperCase());
printNextLine();
}
} else if (STOP.equals(command)) {
inputFeeder.stop();
siddhiAppRuntime.shutdown();
break;
} else if (START.equals(command)) {
inputFeeder.start();
info("Siddhi Debugger starts sending input to Siddhi");
System.out.println("You can use the following commands:\n - " + NEXT + "\n - " + PLAY + "\n - " + STATE + ":<query name>\n - " + STOP);
break;
} else {
error("Invalid command: " + command);
printNextLine();
}
}
siddhiDebugger.setDebuggerCallback(new SiddhiDebuggerCallback() {
@Override
public void debugEvent(ComplexEvent event, String queryName, SiddhiDebugger.QueryTerminal queryTerminal, SiddhiDebugger debugger) {
info("@Debug: Query: " + queryName + ", Terminal: " + queryTerminal + ", Event: " + event);
printNextLine();
while (scanner.hasNextLine()) {
String command = scanner.nextLine().trim().toLowerCase();
if (STOP.equals(command)) {
debugger.releaseAllBreakPoints();
debugger.play();
inputFeeder.stop();
siddhiAppRuntime.shutdown();
break;
} else if (NEXT.equals(command)) {
debugger.next();
break;
} else if (PLAY.equals(command)) {
debugger.play();
break;
} else if (command.startsWith(STATE)) {
if (!command.contains(QUERY_DELIMITER)) {
error("Invalid get state request. The query must be " + STATE + ":<query " + "name>. Please try again");
printNextLine();
continue;
}
String[] components = command.split(QUERY_DELIMITER);
String requestQueryName = components[1];
Map<String, Object> state = debugger.getQueryState(requestQueryName.trim());
System.out.println("Query '" + requestQueryName + "' state : ");
for (Map.Entry<String, Object> entry : state.entrySet()) {
System.out.println(" '" + entry.getKey() + "' : " + entry.getValue());
}
printNextLine();
continue;
} else {
error("Invalid command: " + command);
printNextLine();
}
}
}
});
inputFeeder.join();
if (inputFeeder.isRunning()) {
info("Input feeder has sopped sending all inputs. If you want to stop the execution, use " + "the STOP command");
printNextLine();
while (scanner.hasNextLine()) {
String command = scanner.nextLine().trim().toLowerCase();
if (STOP.equals(command)) {
inputFeeder.stop();
siddhiAppRuntime.shutdown();
break;
} else {
error("Invalid command: " + command);
printNextLine();
}
}
}
scanner.close();
info("Siddhi Debugger is stopped successfully");
}
use of org.wso2.siddhi.query.api.expression.condition.And in project siddhi by wso2.
the class ExternalTimeBatchWindowProcessor method process.
/**
* Here an assumption is taken:
* Parameter: timestamp: The time which the window determines as current time and will act upon,
* the value of this parameter should be monotonically increasing.
* from https://docs.wso2.com/display/CEP400/Inbuilt+Windows#InbuiltWindows-externalTime
*/
@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor, StreamEventCloner streamEventCloner) {
// event incoming trigger process. No events means no action
if (streamEventChunk.getFirst() == null) {
return;
}
List<ComplexEventChunk<StreamEvent>> complexEventChunks = new ArrayList<ComplexEventChunk<StreamEvent>>();
synchronized (this) {
initTiming(streamEventChunk.getFirst());
StreamEvent nextStreamEvent = streamEventChunk.getFirst();
while (nextStreamEvent != null) {
StreamEvent currStreamEvent = nextStreamEvent;
nextStreamEvent = nextStreamEvent.getNext();
if (currStreamEvent.getType() == ComplexEvent.Type.TIMER) {
if (lastScheduledTime <= currStreamEvent.getTimestamp()) {
// implies that there have not been any more events after this schedule has been done.
if (!flushed) {
flushToOutputChunk(streamEventCloner, complexEventChunks, lastCurrentEventTime, true);
flushed = true;
} else {
if (currentEventChunk.getFirst() != null) {
appendToOutputChunk(streamEventCloner, complexEventChunks, lastCurrentEventTime, true);
}
}
// rescheduling to emit the current batch after expiring it if no further events arrive.
lastScheduledTime = siddhiAppContext.getTimestampGenerator().currentTime() + schedulerTimeout;
scheduler.notifyAt(lastScheduledTime);
}
continue;
} else if (currStreamEvent.getType() != ComplexEvent.Type.CURRENT) {
continue;
}
long currentEventTime = (Long) timestampExpressionExecutor.execute(currStreamEvent);
if (lastCurrentEventTime < currentEventTime) {
lastCurrentEventTime = currentEventTime;
}
if (currentEventTime < endTime) {
cloneAppend(streamEventCloner, currStreamEvent);
} else {
if (flushed) {
appendToOutputChunk(streamEventCloner, complexEventChunks, lastCurrentEventTime, false);
flushed = false;
} else {
flushToOutputChunk(streamEventCloner, complexEventChunks, lastCurrentEventTime, false);
}
// update timestamp, call next processor
endTime = findEndTime(lastCurrentEventTime, startTime, timeToKeep);
cloneAppend(streamEventCloner, currStreamEvent);
// triggering the last batch expiration.
if (schedulerTimeout > 0) {
lastScheduledTime = siddhiAppContext.getTimestampGenerator().currentTime() + schedulerTimeout;
scheduler.notifyAt(lastScheduledTime);
}
}
}
}
for (ComplexEventChunk<StreamEvent> complexEventChunk : complexEventChunks) {
nextProcessor.process(complexEventChunk);
}
}
use of org.wso2.siddhi.query.api.expression.condition.And in project siddhi by wso2.
the class AggregationParser method parse.
public static AggregationRuntime parse(AggregationDefinition aggregationDefinition, SiddhiAppContext siddhiAppContext, Map<String, AbstractDefinition> streamDefinitionMap, Map<String, AbstractDefinition> tableDefinitionMap, Map<String, AbstractDefinition> windowDefinitionMap, Map<String, AbstractDefinition> aggregationDefinitionMap, Map<String, Table> tableMap, Map<String, Window> windowMap, Map<String, AggregationRuntime> aggregationMap, SiddhiAppRuntimeBuilder siddhiAppRuntimeBuilder) {
if (aggregationDefinition == null) {
throw new SiddhiAppCreationException("AggregationDefinition instance is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'");
}
if (aggregationDefinition.getTimePeriod() == null) {
throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s timePeriod is null. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (aggregationDefinition.getSelector() == null) {
throw new SiddhiAppCreationException("AggregationDefinition '" + aggregationDefinition.getId() + "'s selection is not defined. " + "Hence, can't create the siddhi app '" + siddhiAppContext.getName() + "'", aggregationDefinition.getQueryContextStartIndex(), aggregationDefinition.getQueryContextEndIndex());
}
if (streamDefinitionMap.get(aggregationDefinition.getBasicSingleInputStream().getStreamId()) == null) {
throw new SiddhiAppCreationException("Stream " + aggregationDefinition.getBasicSingleInputStream().getStreamId() + " has not been defined");
}
try {
List<VariableExpressionExecutor> incomingVariableExpressionExecutors = new ArrayList<>();
String aggregatorName = aggregationDefinition.getId();
StreamRuntime streamRuntime = InputStreamParser.parse(aggregationDefinition.getBasicSingleInputStream(), siddhiAppContext, streamDefinitionMap, tableDefinitionMap, windowDefinitionMap, aggregationDefinitionMap, tableMap, windowMap, aggregationMap, incomingVariableExpressionExecutors, null, false, aggregatorName);
// Get original meta for later use.
MetaStreamEvent incomingMetaStreamEvent = (MetaStreamEvent) streamRuntime.getMetaComplexEvent();
// Create new meta stream event.
// This must hold the timestamp, group by attributes (if given) and the incremental attributes, in
// onAfterWindowData array
// Example format: AGG_TIMESTAMP, groupByAttribute1, groupByAttribute2, AGG_incAttribute1, AGG_incAttribute2
// AGG_incAttribute1, AGG_incAttribute2 would have the same attribute names as in
// finalListOfIncrementalAttributes
// To enter data as onAfterWindowData
incomingMetaStreamEvent.initializeAfterWindowData();
List<ExpressionExecutor> incomingExpressionExecutors = new ArrayList<>();
List<IncrementalAttributeAggregator> incrementalAttributeAggregators = new ArrayList<>();
List<Variable> groupByVariableList = aggregationDefinition.getSelector().getGroupByList();
boolean isProcessingOnExternalTime = aggregationDefinition.getAggregateAttribute() != null;
// Expressions to get
List<Expression> outputExpressions = new ArrayList<>();
// final aggregate outputs. e.g avg = sum/count
// Expression executors to get
List<ExpressionExecutor> outputExpressionExecutors = new ArrayList<>();
// final aggregate outputs. e.g avg = sum/count
populateIncomingAggregatorsAndExecutors(aggregationDefinition, siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators, groupByVariableList, outputExpressions);
int baseAggregatorBeginIndex = incomingMetaStreamEvent.getOutputData().size();
List<Expression> finalBaseAggregators = getFinalBaseAggregators(siddhiAppContext, tableMap, incomingVariableExpressionExecutors, aggregatorName, incomingMetaStreamEvent, incomingExpressionExecutors, incrementalAttributeAggregators);
StreamDefinition incomingOutputStreamDefinition = StreamDefinition.id("");
incomingOutputStreamDefinition.setQueryContextStartIndex(aggregationDefinition.getQueryContextStartIndex());
incomingOutputStreamDefinition.setQueryContextEndIndex(aggregationDefinition.getQueryContextEndIndex());
MetaStreamEvent processedMetaStreamEvent = new MetaStreamEvent();
for (Attribute attribute : incomingMetaStreamEvent.getOutputData()) {
incomingOutputStreamDefinition.attribute(attribute.getName(), attribute.getType());
processedMetaStreamEvent.addOutputData(attribute);
}
incomingMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.addInputDefinition(incomingOutputStreamDefinition);
processedMetaStreamEvent.setOutputDefinition(incomingOutputStreamDefinition);
// Executors of processing meta
List<VariableExpressionExecutor> processVariableExpressionExecutors = new ArrayList<>();
boolean groupBy = aggregationDefinition.getSelector().getGroupByList().size() != 0;
List<ExpressionExecutor> processExpressionExecutors = constructProcessExpressionExecutors(siddhiAppContext, tableMap, aggregatorName, baseAggregatorBeginIndex, finalBaseAggregators, incomingOutputStreamDefinition, processedMetaStreamEvent, processVariableExpressionExecutors, groupBy);
outputExpressionExecutors.addAll(outputExpressions.stream().map(expression -> ExpressionParser.parseExpression(expression, processedMetaStreamEvent, 0, tableMap, processVariableExpressionExecutors, siddhiAppContext, groupBy, 0, aggregatorName)).collect(Collectors.toList()));
// Create group by key generator
GroupByKeyGenerator groupByKeyGenerator = null;
if (groupBy) {
groupByKeyGenerator = new GroupByKeyGenerator(groupByVariableList, processedMetaStreamEvent, SiddhiConstants.UNKNOWN_STATE, tableMap, processVariableExpressionExecutors, siddhiAppContext, aggregatorName);
}
// Create new scheduler
EntryValveExecutor entryValveExecutor = new EntryValveExecutor(siddhiAppContext);
LockWrapper lockWrapper = new LockWrapper(aggregatorName);
lockWrapper.setLock(new ReentrantLock());
Scheduler scheduler = SchedulerParser.parse(siddhiAppContext.getScheduledExecutorService(), entryValveExecutor, siddhiAppContext);
scheduler.init(lockWrapper, aggregatorName);
scheduler.setStreamEventPool(new StreamEventPool(processedMetaStreamEvent, 10));
QueryParserHelper.reduceMetaComplexEvent(incomingMetaStreamEvent);
QueryParserHelper.reduceMetaComplexEvent(processedMetaStreamEvent);
QueryParserHelper.updateVariablePosition(incomingMetaStreamEvent, incomingVariableExpressionExecutors);
QueryParserHelper.updateVariablePosition(processedMetaStreamEvent, processVariableExpressionExecutors);
List<TimePeriod.Duration> incrementalDurations = getSortedPeriods(aggregationDefinition.getTimePeriod());
Map<TimePeriod.Duration, Table> aggregationTables = initDefaultTables(aggregatorName, incrementalDurations, processedMetaStreamEvent.getOutputStreamDefinition(), siddhiAppRuntimeBuilder, aggregationDefinition.getAnnotations(), groupByVariableList);
int bufferSize = 0;
Element element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_BUFFER_SIZE, null, aggregationDefinition.getAnnotations());
if (element != null) {
try {
bufferSize = Integer.parseInt(element.getValue());
} catch (NumberFormatException e) {
throw new SiddhiAppCreationException(e.getMessage() + ": BufferSize must be an integer");
}
}
if (bufferSize > 0) {
TimePeriod.Duration rootDuration = incrementalDurations.get(0);
if (rootDuration == TimePeriod.Duration.MONTHS || rootDuration == TimePeriod.Duration.YEARS) {
throw new SiddhiAppCreationException("A buffer size greater than 0 can be provided, only when the " + "first duration value is seconds, minutes, hours or days");
}
if (!isProcessingOnExternalTime) {
throw new SiddhiAppCreationException("Buffer size cannot be specified when events are aggregated " + "based on event arrival time.");
// Buffer size is used to process out of order events. However, events would never be out of
// order if they are processed based on event arrival time.
}
} else if (bufferSize < 0) {
throw new SiddhiAppCreationException("Expected a positive integer as the buffer size, but found " + bufferSize + " as the provided value");
}
boolean ignoreEventsOlderThanBuffer = false;
element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_IGNORE_EVENTS_OLDER_THAN_BUFFER, null, aggregationDefinition.getAnnotations());
if (element != null) {
if (element.getValue().equalsIgnoreCase("true")) {
ignoreEventsOlderThanBuffer = true;
} else if (!element.getValue().equalsIgnoreCase("false")) {
throw new SiddhiAppCreationException("IgnoreEventsOlderThanBuffer value must " + "be true or false");
}
}
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap = buildIncrementalExecutors(isProcessingOnExternalTime, processedMetaStreamEvent, processExpressionExecutors, groupByKeyGenerator, bufferSize, ignoreEventsOlderThanBuffer, incrementalDurations, aggregationTables, siddhiAppContext, aggregatorName);
// Recreate in-memory data from tables
RecreateInMemoryData recreateInMemoryData = new RecreateInMemoryData(incrementalDurations, aggregationTables, incrementalExecutorMap, siddhiAppContext, processedMetaStreamEvent, tableMap, windowMap, aggregationMap);
IncrementalExecutor rootIncrementalExecutor = incrementalExecutorMap.get(incrementalDurations.get(0));
rootIncrementalExecutor.setScheduler(scheduler);
// Connect entry valve to root incremental executor
entryValveExecutor.setNextExecutor(rootIncrementalExecutor);
QueryParserHelper.initStreamRuntime(streamRuntime, incomingMetaStreamEvent, lockWrapper, aggregatorName);
LatencyTracker latencyTrackerFind = null;
LatencyTracker latencyTrackerInsert = null;
ThroughputTracker throughputTrackerFind = null;
ThroughputTracker throughputTrackerInsert = null;
if (siddhiAppContext.getStatisticsManager() != null) {
latencyTrackerFind = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
latencyTrackerInsert = QueryParserHelper.createLatencyTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
throughputTrackerFind = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_FIND);
throughputTrackerInsert = QueryParserHelper.createThroughputTracker(siddhiAppContext, aggregationDefinition.getId(), SiddhiConstants.METRIC_INFIX_WINDOWS, SiddhiConstants.METRIC_TYPE_INSERT);
}
streamRuntime.setCommonProcessor(new IncrementalAggregationProcessor(rootIncrementalExecutor, incomingExpressionExecutors, processedMetaStreamEvent, latencyTrackerInsert, throughputTrackerInsert, siddhiAppContext));
List<ExpressionExecutor> baseExecutors = cloneExpressionExecutors(processExpressionExecutors);
ExpressionExecutor timestampExecutor = baseExecutors.remove(0);
return new AggregationRuntime(aggregationDefinition, incrementalExecutorMap, aggregationTables, ((SingleStreamRuntime) streamRuntime), entryValveExecutor, incrementalDurations, siddhiAppContext, baseExecutors, timestampExecutor, processedMetaStreamEvent, outputExpressionExecutors, latencyTrackerFind, throughputTrackerFind, recreateInMemoryData);
} catch (Throwable t) {
ExceptionUtil.populateQueryContext(t, aggregationDefinition, siddhiAppContext);
throw t;
}
}
use of org.wso2.siddhi.query.api.expression.condition.And in project siddhi by wso2.
the class CollectionExpressionParser method parseInternalCollectionExpression.
/**
* Parse the given expression and create the appropriate Executor by recursively traversing the expression.
*
* @param expression Expression to be parsed
* @param matchingMetaInfoHolder matchingMetaInfoHolder
* @param indexedEventHolder indexed event holder
* @return ExpressionExecutor
*/
private static CollectionExpression parseInternalCollectionExpression(Expression expression, MatchingMetaInfoHolder matchingMetaInfoHolder, IndexedEventHolder indexedEventHolder) {
if (expression instanceof And) {
CollectionExpression leftCollectionExpression = parseInternalCollectionExpression(((And) expression).getLeftExpression(), matchingMetaInfoHolder, indexedEventHolder);
CollectionExpression rightCollectionExpression = parseInternalCollectionExpression(((And) expression).getRightExpression(), matchingMetaInfoHolder, indexedEventHolder);
if (leftCollectionExpression.getCollectionScope() == NON && rightCollectionExpression.getCollectionScope() == NON) {
return new BasicCollectionExpression(expression, NON);
} else if ((leftCollectionExpression.getCollectionScope() == PRIMARY_KEY_ATTRIBUTE || leftCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_ATTRIBUTE || leftCollectionExpression.getCollectionScope() == PRIMARY_KEY_RESULT_SET || leftCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_RESULT_SET) && (rightCollectionExpression.getCollectionScope() == PRIMARY_KEY_ATTRIBUTE || rightCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_ATTRIBUTE || rightCollectionExpression.getCollectionScope() == PRIMARY_KEY_RESULT_SET || rightCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_RESULT_SET)) {
Set<String> primaryKeys = new HashSet<>();
primaryKeys.addAll(leftCollectionExpression.getMultiPrimaryKeys());
primaryKeys.addAll(rightCollectionExpression.getMultiPrimaryKeys());
if (indexedEventHolder.getPrimaryKeyReferenceHolders() != null && primaryKeys.size() == indexedEventHolder.getPrimaryKeyReferenceHolders().length) {
return new AndMultiPrimaryKeyCollectionExpression(expression, PRIMARY_KEY_RESULT_SET, leftCollectionExpression, rightCollectionExpression);
} else {
return new AndCollectionExpression(expression, PARTIAL_PRIMARY_KEY_RESULT_SET, leftCollectionExpression, rightCollectionExpression);
}
// TODO support query rewriting to group all PARTIAL_PRIMARY_KEY_RESULT_SETs together such that it can
// build AndMultiPrimaryKeyCollectionExpression.
} else if ((leftCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_ATTRIBUTE || leftCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_RESULT_SET || leftCollectionExpression.getCollectionScope() == NON || leftCollectionExpression.getCollectionScope() == EXHAUSTIVE) && (rightCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_ATTRIBUTE || rightCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_RESULT_SET || rightCollectionExpression.getCollectionScope() == NON || rightCollectionExpression.getCollectionScope() == EXHAUSTIVE)) {
return new BasicCollectionExpression(expression, EXHAUSTIVE);
} else {
return new AndCollectionExpression(expression, OPTIMISED_PRIMARY_KEY_OR_INDEXED_RESULT_SET, leftCollectionExpression, rightCollectionExpression);
}
} else if (expression instanceof Or) {
CollectionExpression leftCollectionExpression = parseInternalCollectionExpression(((Or) expression).getLeftExpression(), matchingMetaInfoHolder, indexedEventHolder);
CollectionExpression rightCollectionExpression = parseInternalCollectionExpression(((Or) expression).getRightExpression(), matchingMetaInfoHolder, indexedEventHolder);
if (leftCollectionExpression.getCollectionScope() == NON && rightCollectionExpression.getCollectionScope() == NON) {
return new BasicCollectionExpression(expression, NON);
} else if (leftCollectionExpression.getCollectionScope() == EXHAUSTIVE || leftCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_ATTRIBUTE || leftCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_RESULT_SET || rightCollectionExpression.getCollectionScope() == EXHAUSTIVE || rightCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_ATTRIBUTE || rightCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_RESULT_SET) {
return new BasicCollectionExpression(expression, EXHAUSTIVE);
} else {
return new OrCollectionExpression(expression, OPTIMISED_PRIMARY_KEY_OR_INDEXED_RESULT_SET, leftCollectionExpression, rightCollectionExpression);
}
} else if (expression instanceof Not) {
CollectionExpression notCollectionExpression = parseInternalCollectionExpression(((Not) expression).getExpression(), matchingMetaInfoHolder, indexedEventHolder);
switch(notCollectionExpression.getCollectionScope()) {
case NON:
return new BasicCollectionExpression(expression, NON);
case PRIMARY_KEY_ATTRIBUTE:
return new NotCollectionExpression(expression, PRIMARY_KEY_RESULT_SET, notCollectionExpression);
case INDEXED_ATTRIBUTE:
return new NotCollectionExpression(expression, INDEXED_RESULT_SET, notCollectionExpression);
case PRIMARY_KEY_RESULT_SET:
case INDEXED_RESULT_SET:
case OPTIMISED_PRIMARY_KEY_OR_INDEXED_RESULT_SET:
return new NotCollectionExpression(expression, OPTIMISED_PRIMARY_KEY_OR_INDEXED_RESULT_SET, notCollectionExpression);
case PARTIAL_PRIMARY_KEY_ATTRIBUTE:
case PARTIAL_PRIMARY_KEY_RESULT_SET:
case EXHAUSTIVE:
return new BasicCollectionExpression(expression, EXHAUSTIVE);
}
} else if (expression instanceof Compare) {
CollectionExpression leftCollectionExpression = parseInternalCollectionExpression(((Compare) expression).getLeftExpression(), matchingMetaInfoHolder, indexedEventHolder);
CollectionExpression rightCollectionExpression = parseInternalCollectionExpression(((Compare) expression).getRightExpression(), matchingMetaInfoHolder, indexedEventHolder);
if (leftCollectionExpression.getCollectionScope() == NON && rightCollectionExpression.getCollectionScope() == NON) {
// comparing two stream attributes with O(1) time complexity
return new BasicCollectionExpression(expression, NON);
} else if ((leftCollectionExpression.getCollectionScope() == INDEXED_ATTRIBUTE || leftCollectionExpression.getCollectionScope() == PRIMARY_KEY_ATTRIBUTE || leftCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_ATTRIBUTE) && rightCollectionExpression.getCollectionScope() == NON) {
switch(leftCollectionExpression.getCollectionScope()) {
case INDEXED_ATTRIBUTE:
return new CompareCollectionExpression((Compare) expression, INDEXED_RESULT_SET, leftCollectionExpression, ((Compare) expression).getOperator(), rightCollectionExpression);
case PRIMARY_KEY_ATTRIBUTE:
return new CompareCollectionExpression((Compare) expression, PRIMARY_KEY_RESULT_SET, leftCollectionExpression, ((Compare) expression).getOperator(), rightCollectionExpression);
case PARTIAL_PRIMARY_KEY_ATTRIBUTE:
return new CompareCollectionExpression((Compare) expression, PARTIAL_PRIMARY_KEY_RESULT_SET, leftCollectionExpression, ((Compare) expression).getOperator(), rightCollectionExpression);
}
} else if (leftCollectionExpression.getCollectionScope() == NON && (rightCollectionExpression.getCollectionScope() == INDEXED_ATTRIBUTE || rightCollectionExpression.getCollectionScope() == PRIMARY_KEY_ATTRIBUTE || rightCollectionExpression.getCollectionScope() == PARTIAL_PRIMARY_KEY_ATTRIBUTE)) {
Compare.Operator operator = ((Compare) expression).getOperator();
// moving let to right
switch(operator) {
case LESS_THAN:
operator = Compare.Operator.GREATER_THAN;
break;
case GREATER_THAN:
operator = Compare.Operator.LESS_THAN;
break;
case LESS_THAN_EQUAL:
operator = Compare.Operator.GREATER_THAN_EQUAL;
break;
case GREATER_THAN_EQUAL:
operator = Compare.Operator.LESS_THAN_EQUAL;
break;
case EQUAL:
break;
case NOT_EQUAL:
break;
}
switch(rightCollectionExpression.getCollectionScope()) {
case INDEXED_ATTRIBUTE:
return new CompareCollectionExpression((Compare) expression, INDEXED_RESULT_SET, rightCollectionExpression, operator, leftCollectionExpression);
case PRIMARY_KEY_ATTRIBUTE:
return new CompareCollectionExpression((Compare) expression, PRIMARY_KEY_RESULT_SET, rightCollectionExpression, operator, leftCollectionExpression);
case PARTIAL_PRIMARY_KEY_ATTRIBUTE:
return new CompareCollectionExpression((Compare) expression, PARTIAL_PRIMARY_KEY_RESULT_SET, rightCollectionExpression, operator, leftCollectionExpression);
}
} else {
// comparing non indexed table with stream attributes or another table attribute
return new BasicCollectionExpression(expression, EXHAUSTIVE);
}
} else if (expression instanceof Constant) {
return new BasicCollectionExpression(expression, NON);
} else if (expression instanceof Variable) {
if (isCollectionVariable(matchingMetaInfoHolder, (Variable) expression)) {
if (indexedEventHolder.isAttributeIndexed(((Variable) expression).getAttributeName())) {
return new AttributeCollectionExpression(expression, ((Variable) expression).getAttributeName(), INDEXED_ATTRIBUTE);
} else if (indexedEventHolder.isMultiPrimaryKeyAttribute(((Variable) expression).getAttributeName())) {
if (indexedEventHolder.getPrimaryKeyReferenceHolders() != null && indexedEventHolder.getPrimaryKeyReferenceHolders().length == 1) {
return new AttributeCollectionExpression(expression, ((Variable) expression).getAttributeName(), PRIMARY_KEY_ATTRIBUTE);
} else {
return new AttributeCollectionExpression(expression, ((Variable) expression).getAttributeName(), PARTIAL_PRIMARY_KEY_ATTRIBUTE);
}
} else {
return new BasicCollectionExpression(expression, EXHAUSTIVE);
}
} else {
return new BasicCollectionExpression(expression, NON);
}
} else if (expression instanceof Multiply) {
CollectionExpression left = parseInternalCollectionExpression(((Multiply) expression).getLeftValue(), matchingMetaInfoHolder, indexedEventHolder);
CollectionExpression right = parseInternalCollectionExpression(((Multiply) expression).getRightValue(), matchingMetaInfoHolder, indexedEventHolder);
if (left.getCollectionScope() == NON && right.getCollectionScope() == NON) {
return new BasicCollectionExpression(expression, NON);
} else {
return new BasicCollectionExpression(expression, EXHAUSTIVE);
}
} else if (expression instanceof Add) {
CollectionExpression left = parseInternalCollectionExpression(((Add) expression).getLeftValue(), matchingMetaInfoHolder, indexedEventHolder);
CollectionExpression right = parseInternalCollectionExpression(((Add) expression).getRightValue(), matchingMetaInfoHolder, indexedEventHolder);
if (left.getCollectionScope() == NON && right.getCollectionScope() == NON) {
return new BasicCollectionExpression(expression, NON);
} else {
return new BasicCollectionExpression(expression, EXHAUSTIVE);
}
} else if (expression instanceof Subtract) {
CollectionExpression left = parseInternalCollectionExpression(((Subtract) expression).getLeftValue(), matchingMetaInfoHolder, indexedEventHolder);
CollectionExpression right = parseInternalCollectionExpression(((Subtract) expression).getRightValue(), matchingMetaInfoHolder, indexedEventHolder);
if (left.getCollectionScope() == NON && right.getCollectionScope() == NON) {
return new BasicCollectionExpression(expression, NON);
} else {
return new BasicCollectionExpression(expression, EXHAUSTIVE);
}
} else if (expression instanceof Mod) {
CollectionExpression left = parseInternalCollectionExpression(((Mod) expression).getLeftValue(), matchingMetaInfoHolder, indexedEventHolder);
CollectionExpression right = parseInternalCollectionExpression(((Mod) expression).getRightValue(), matchingMetaInfoHolder, indexedEventHolder);
if (left.getCollectionScope() == NON && right.getCollectionScope() == NON) {
return new BasicCollectionExpression(expression, NON);
} else {
return new BasicCollectionExpression(expression, EXHAUSTIVE);
}
} else if (expression instanceof Divide) {
CollectionExpression left = parseInternalCollectionExpression(((Divide) expression).getLeftValue(), matchingMetaInfoHolder, indexedEventHolder);
CollectionExpression right = parseInternalCollectionExpression(((Divide) expression).getRightValue(), matchingMetaInfoHolder, indexedEventHolder);
if (left.getCollectionScope() == NON && right.getCollectionScope() == NON) {
return new BasicCollectionExpression(expression, NON);
} else {
return new BasicCollectionExpression(expression, EXHAUSTIVE);
}
} else if (expression instanceof AttributeFunction) {
Expression[] innerExpressions = ((AttributeFunction) expression).getParameters();
for (Expression aExpression : innerExpressions) {
CollectionExpression aCollectionExpression = parseInternalCollectionExpression(aExpression, matchingMetaInfoHolder, indexedEventHolder);
if (aCollectionExpression.getCollectionScope() != NON) {
return new BasicCollectionExpression(expression, EXHAUSTIVE);
}
}
return new BasicCollectionExpression(expression, NON);
} else if (expression instanceof In) {
CollectionExpression inCollectionExpression = parseInternalCollectionExpression(((In) expression).getExpression(), matchingMetaInfoHolder, indexedEventHolder);
if (inCollectionExpression.getCollectionScope() != NON) {
return new BasicCollectionExpression(expression, EXHAUSTIVE);
}
return new BasicCollectionExpression(expression, NON);
} else if (expression instanceof IsNull) {
CollectionExpression nullCollectionExpression = parseInternalCollectionExpression(((IsNull) expression).getExpression(), matchingMetaInfoHolder, indexedEventHolder);
if (nullCollectionExpression.getCollectionScope() == NON) {
return new BasicCollectionExpression(expression, NON);
} else if (nullCollectionExpression.getCollectionScope() == INDEXED_ATTRIBUTE) {
return new NullCollectionExpression(expression, INDEXED_RESULT_SET, ((AttributeCollectionExpression) nullCollectionExpression).getAttribute());
} else if (nullCollectionExpression.getCollectionScope() == PRIMARY_KEY_ATTRIBUTE) {
return new NullCollectionExpression(expression, PRIMARY_KEY_RESULT_SET, ((AttributeCollectionExpression) nullCollectionExpression).getAttribute());
} else {
return new BasicCollectionExpression(expression, EXHAUSTIVE);
}
}
throw new UnsupportedOperationException(expression.toString() + " not supported!");
}
Aggregations