Search in sources :

Example 1 with OnDemandQueryRuntime

use of io.siddhi.core.query.OnDemandQueryRuntime in project siddhi by wso2.

the class IncrementalExecutorsInitialiser method recreateState.

private void recreateState(Long lastData, TimePeriod.Duration recreateForDuration, Table recreateFromTable, boolean isBeforeRoot) {
    Executor incrementalExecutor = incrementalExecutorMap.get(recreateForDuration);
    if (lastData != null) {
        endOFLatestEventTimestamp = IncrementalTimeConverterUtil.getNextEmitTime(lastData, recreateForDuration, timeZone);
    }
    OnDemandQuery onDemandQuery = getOnDemandQuery(recreateFromTable, false, endOFLatestEventTimestamp);
    onDemandQuery.setType(OnDemandQuery.OnDemandQueryType.FIND);
    OnDemandQueryRuntime onDemandQueryRuntime = OnDemandQueryParser.parse(onDemandQuery, null, siddhiAppContext, tableMap, windowMap, aggregationMap);
    Event[] events = onDemandQueryRuntime.execute();
    if (events != null) {
        long referenceToNextLatestEvent = (Long) events[events.length - 1].getData(0);
        ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<>();
        for (Event event : events) {
            StreamEvent streamEvent = streamEventFactory.newInstance();
            streamEvent.setOutputData(event.getData());
            complexEventChunk.add(streamEvent);
        }
        incrementalExecutor.execute(complexEventChunk);
        if (isBeforeRoot) {
            TimePeriod.Duration rootDuration = incrementalDurations.get(0);
            Executor rootIncrementalExecutor = incrementalExecutorMap.get(rootDuration);
            long emitTimeOfLatestEventInTable = IncrementalTimeConverterUtil.getNextEmitTime(referenceToNextLatestEvent, rootDuration, timeZone);
            rootIncrementalExecutor.setEmitTime(emitTimeOfLatestEventInTable);
        }
    }
}
Also used : ComplexEventChunk(io.siddhi.core.event.ComplexEventChunk) OnDemandQueryRuntime(io.siddhi.core.query.OnDemandQueryRuntime) TimePeriod(io.siddhi.query.api.aggregation.TimePeriod) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) Event(io.siddhi.core.event.Event) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) OnDemandQuery(io.siddhi.query.api.execution.query.OnDemandQuery)

Example 2 with OnDemandQueryRuntime

use of io.siddhi.core.query.OnDemandQueryRuntime in project siddhi by wso2.

the class IncrementalDataPurger method isSafeToPurgeTheDuration.

private Map<String, Boolean> isSafeToPurgeTheDuration(long purgeTime, Table parentTable, Table currentTable, TimePeriod.Duration duration, String timeZone) {
    Event[] dataToDelete;
    Event[] dataInParentTable = null;
    Map<String, Boolean> purgingCheckState = new HashMap<>();
    try {
        dataToDelete = dataToDelete(purgeTime, currentTable);
        if (dataToDelete != null && dataToDelete.length != 0) {
            Map<String, Long> purgingValidationTimeDurations = getPurgingValidationTimeDurations(duration, (Long) dataToDelete[0].getData()[0], timeZone);
            OnDemandQuery onDemandQuery = getOnDemandQuery(parentTable, purgingValidationTimeDurations.get(AGGREGATION_START_TIME), purgingValidationTimeDurations.get(AGGREGATION_NEXT_EMIT_TIME));
            onDemandQuery.setType(OnDemandQuery.OnDemandQueryType.FIND);
            OnDemandQueryRuntime onDemandQueryRuntime = OnDemandQueryParser.parse(onDemandQuery, null, siddhiQueryContext.getSiddhiAppContext(), tableMap, windowMap, aggregationMap);
            dataInParentTable = onDemandQueryRuntime.execute();
        }
        purgingCheckState.put(IS_DATA_AVAILABLE_TO_PURGE, dataToDelete != null && dataToDelete.length > 0);
        purgingCheckState.put(IS_PARENT_TABLE_HAS_AGGREGATED_DATA, dataInParentTable != null && dataInParentTable.length > 0);
    } catch (Exception e) {
        if (e.getMessage().contains("deadlocked")) {
            errorMessage = "Deadlock observed while checking whether the data is safe to purge from aggregation " + "tables for the aggregation " + aggregationDefinition.getId() + ". If this occurred in an Active Active deployment, this error can be ignored if other node " + "doesn't have this error";
        } else {
            errorMessage = "Error occurred while checking whether the data is safe to purge from aggregation " + "tables for the aggregation " + aggregationDefinition.getId();
        }
        LOG.error(errorMessage, e);
        purgingCheckState.put(IS_DATA_AVAILABLE_TO_PURGE, false);
        purgingCheckState.put(IS_PARENT_TABLE_HAS_AGGREGATED_DATA, false);
        errorMessage = "Error occurred while checking whether the data is safe to purge from aggregation tables" + " for the aggregation " + aggregationDefinition.getId();
        purgingHalted = true;
    }
    return purgingCheckState;
}
Also used : HashMap(java.util.HashMap) OnDemandQueryRuntime(io.siddhi.core.query.OnDemandQueryRuntime) Time.timeToLong(io.siddhi.query.api.expression.Expression.Time.timeToLong) MetaStateEvent(io.siddhi.core.event.state.MetaStateEvent) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) Event(io.siddhi.core.event.Event) StateEvent(io.siddhi.core.event.state.StateEvent) OnDemandQuery(io.siddhi.query.api.execution.query.OnDemandQuery) SiddhiAppCreationException(io.siddhi.core.exception.SiddhiAppCreationException) DataPurgingException(io.siddhi.core.exception.DataPurgingException)

Example 3 with OnDemandQueryRuntime

use of io.siddhi.core.query.OnDemandQueryRuntime in project siddhi by siddhi-io.

the class IncrementalDataPurger method isSafeToPurgeTheDuration.

private Map<String, Boolean> isSafeToPurgeTheDuration(long purgeTime, Table parentTable, Table currentTable, TimePeriod.Duration duration, String timeZone) {
    Event[] dataToDelete;
    Event[] dataInParentTable = null;
    Map<String, Boolean> purgingCheckState = new HashMap<>();
    try {
        dataToDelete = dataToDelete(purgeTime, currentTable);
        if (dataToDelete != null && dataToDelete.length != 0) {
            Map<String, Long> purgingValidationTimeDurations = getPurgingValidationTimeDurations(duration, (Long) dataToDelete[0].getData()[0], timeZone);
            OnDemandQuery onDemandQuery = getOnDemandQuery(parentTable, purgingValidationTimeDurations.get(AGGREGATION_START_TIME), purgingValidationTimeDurations.get(AGGREGATION_NEXT_EMIT_TIME));
            onDemandQuery.setType(OnDemandQuery.OnDemandQueryType.FIND);
            OnDemandQueryRuntime onDemandQueryRuntime = OnDemandQueryParser.parse(onDemandQuery, null, siddhiQueryContext.getSiddhiAppContext(), tableMap, windowMap, aggregationMap);
            dataInParentTable = onDemandQueryRuntime.execute();
        }
        purgingCheckState.put(IS_DATA_AVAILABLE_TO_PURGE, dataToDelete != null && dataToDelete.length > 0);
        purgingCheckState.put(IS_PARENT_TABLE_HAS_AGGREGATED_DATA, dataInParentTable != null && dataInParentTable.length > 0);
    } catch (Exception e) {
        if (e.getMessage().contains("deadlocked")) {
            errorMessage = "Deadlock observed while checking whether the data is safe to purge from aggregation " + "tables for the aggregation " + aggregationDefinition.getId() + ". If this occurred in an Active Active deployment, this error can be ignored if other node " + "doesn't have this error";
        } else {
            errorMessage = "Error occurred while checking whether the data is safe to purge from aggregation " + "tables for the aggregation " + aggregationDefinition.getId();
        }
        LOG.error(errorMessage, e);
        purgingCheckState.put(IS_DATA_AVAILABLE_TO_PURGE, false);
        purgingCheckState.put(IS_PARENT_TABLE_HAS_AGGREGATED_DATA, false);
        errorMessage = "Error occurred while checking whether the data is safe to purge from aggregation tables" + " for the aggregation " + aggregationDefinition.getId();
        purgingHalted = true;
    }
    return purgingCheckState;
}
Also used : HashMap(java.util.HashMap) OnDemandQueryRuntime(io.siddhi.core.query.OnDemandQueryRuntime) Time.timeToLong(io.siddhi.query.api.expression.Expression.Time.timeToLong) MetaStateEvent(io.siddhi.core.event.state.MetaStateEvent) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) Event(io.siddhi.core.event.Event) StateEvent(io.siddhi.core.event.state.StateEvent) OnDemandQuery(io.siddhi.query.api.execution.query.OnDemandQuery) SiddhiAppCreationException(io.siddhi.core.exception.SiddhiAppCreationException) DataPurgingException(io.siddhi.core.exception.DataPurgingException)

Example 4 with OnDemandQueryRuntime

use of io.siddhi.core.query.OnDemandQueryRuntime in project siddhi by siddhi-io.

the class IncrementalExecutorsInitialiser method recreateState.

private void recreateState(Long lastData, TimePeriod.Duration recreateForDuration, Table recreateFromTable, boolean isBeforeRoot) {
    if (lastData != null) {
        endOFLatestEventTimestamp = IncrementalTimeConverterUtil.getNextEmitTime(lastData, recreateForDuration, timeZone);
    }
    OnDemandQuery onDemandQuery = getOnDemandQuery(recreateFromTable, false, endOFLatestEventTimestamp);
    onDemandQuery.setType(OnDemandQuery.OnDemandQueryType.FIND);
    OnDemandQueryRuntime onDemandQueryRuntime = OnDemandQueryParser.parse(onDemandQuery, null, siddhiAppContext, tableMap, windowMap, aggregationMap);
    Event[] events = onDemandQueryRuntime.execute();
    if (events != null) {
        long referenceToNextLatestEvent = (Long) events[events.length - 1].getData(0);
        if (!isStatePresentForAggregationDuration(recreateForDuration)) {
            ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<>();
            for (Event event : events) {
                StreamEvent streamEvent = streamEventFactory.newInstance();
                streamEvent.setOutputData(event.getData());
                complexEventChunk.add(streamEvent);
            }
            Executor incrementalExecutor = incrementalExecutorMap.get(recreateForDuration);
            incrementalExecutor.execute(complexEventChunk);
        }
        if (isBeforeRoot) {
            TimePeriod.Duration rootDuration = incrementalDurations.get(0);
            Executor rootIncrementalExecutor = incrementalExecutorMap.get(rootDuration);
            long emitTimeOfLatestEventInTable = IncrementalTimeConverterUtil.getNextEmitTime(referenceToNextLatestEvent, rootDuration, timeZone);
            rootIncrementalExecutor.setEmitTime(emitTimeOfLatestEventInTable);
        }
    }
}
Also used : ComplexEventChunk(io.siddhi.core.event.ComplexEventChunk) OnDemandQueryRuntime(io.siddhi.core.query.OnDemandQueryRuntime) TimePeriod(io.siddhi.query.api.aggregation.TimePeriod) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) Event(io.siddhi.core.event.Event) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) OnDemandQuery(io.siddhi.query.api.execution.query.OnDemandQuery)

Example 5 with OnDemandQueryRuntime

use of io.siddhi.core.query.OnDemandQueryRuntime in project siddhi by siddhi-io.

the class IncrementalExecutorsInitialiser method initialiseExecutors.

public synchronized void initialiseExecutors() {
    if (this.isInitialised || isReadOnly) {
        // Only cleared when executors change from reading to processing state in one node deployment
        return;
    }
    Event[] events;
    Long lastData = null;
    // Get max(AGG_TIMESTAMP) from table corresponding to max duration
    Table tableForMaxDuration = aggregationTables.get(incrementalDurations.get(incrementalDurations.size() - 1));
    OnDemandQuery onDemandQuery = getOnDemandQuery(tableForMaxDuration, true, endOFLatestEventTimestamp);
    onDemandQuery.setType(OnDemandQuery.OnDemandQueryType.FIND);
    OnDemandQueryRuntime onDemandQueryRuntime = OnDemandQueryParser.parse(onDemandQuery, null, siddhiAppContext, tableMap, windowMap, aggregationMap);
    // Get latest event timestamp in tableForMaxDuration and get the end time of the aggregation record
    events = onDemandQueryRuntime.execute();
    if (events != null) {
        lastData = (Long) events[events.length - 1].getData(0);
        endOFLatestEventTimestamp = IncrementalTimeConverterUtil.getNextEmitTime(lastData, incrementalDurations.get(incrementalDurations.size() - 1), timeZone);
    }
    if (isPersistedAggregation) {
        for (int i = incrementalDurations.size() - 1; i > 0; i--) {
            if (lastData != null && !IncrementalTimeConverterUtil.isAggregationDataComplete(lastData, incrementalDurations.get(i), timeZone)) {
                recreateState(lastData, incrementalDurations.get(i), aggregationTables.get(incrementalDurations.get(i - 1)), i == 1);
            } else if (lastData == null) {
                recreateState(null, incrementalDurations.get(i), aggregationTables.get(incrementalDurations.get(i - 1)), i == 1);
            }
            if (i > 1) {
                onDemandQuery = getOnDemandQuery(aggregationTables.get(incrementalDurations.get(i - 1)), true, endOFLatestEventTimestamp);
                onDemandQuery.setType(OnDemandQuery.OnDemandQueryType.FIND);
                onDemandQueryRuntime = OnDemandQueryParser.parse(onDemandQuery, null, siddhiAppContext, tableMap, windowMap, aggregationMap);
                events = onDemandQueryRuntime.execute();
                if (events != null) {
                    lastData = (Long) events[events.length - 1].getData(0);
                } else {
                    lastData = null;
                }
            }
        }
    } else {
        for (int i = incrementalDurations.size() - 1; i > 0; i--) {
            // Get the table previous to the duration for which we need to recreate (e.g. if we want to recreate
            // for minute duration, take the second table [provided that aggregation is done for seconds])
            // This lookup is filtered by endOFLatestEventTimestamp
            Table recreateFromTable = aggregationTables.get(incrementalDurations.get(i - 1));
            onDemandQuery = getOnDemandQuery(recreateFromTable, false, endOFLatestEventTimestamp);
            onDemandQuery.setType(OnDemandQuery.OnDemandQueryType.FIND);
            onDemandQueryRuntime = OnDemandQueryParser.parse(onDemandQuery, null, siddhiAppContext, tableMap, windowMap, aggregationMap);
            events = onDemandQueryRuntime.execute();
            if (events != null) {
                long referenceToNextLatestEvent = (Long) events[events.length - 1].getData(0);
                endOFLatestEventTimestamp = IncrementalTimeConverterUtil.getNextEmitTime(referenceToNextLatestEvent, incrementalDurations.get(i - 1), timeZone);
                TimePeriod.Duration recreateForDuration = incrementalDurations.get(i);
                if (!isStatePresentForAggregationDuration(recreateForDuration)) {
                    ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<>();
                    for (Event event : events) {
                        StreamEvent streamEvent = streamEventFactory.newInstance();
                        streamEvent.setOutputData(event.getData());
                        complexEventChunk.add(streamEvent);
                    }
                    Executor incrementalExecutor = incrementalExecutorMap.get(recreateForDuration);
                    incrementalExecutor.execute(complexEventChunk);
                }
                if (i == 1) {
                    TimePeriod.Duration rootDuration = incrementalDurations.get(0);
                    Executor rootIncrementalExecutor = incrementalExecutorMap.get(rootDuration);
                    long emitTimeOfLatestEventInTable = IncrementalTimeConverterUtil.getNextEmitTime(referenceToNextLatestEvent, rootDuration, timeZone);
                    rootIncrementalExecutor.setEmitTime(emitTimeOfLatestEventInTable);
                }
            }
        }
    }
    this.isInitialised = true;
}
Also used : Table(io.siddhi.core.table.Table) ComplexEventChunk(io.siddhi.core.event.ComplexEventChunk) TimePeriod(io.siddhi.query.api.aggregation.TimePeriod) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent) OnDemandQuery(io.siddhi.query.api.execution.query.OnDemandQuery) OnDemandQueryRuntime(io.siddhi.core.query.OnDemandQueryRuntime) Event(io.siddhi.core.event.Event) StreamEvent(io.siddhi.core.event.stream.StreamEvent) MetaStreamEvent(io.siddhi.core.event.stream.MetaStreamEvent)

Aggregations

OnDemandQueryRuntime (io.siddhi.core.query.OnDemandQueryRuntime)14 OnDemandQuery (io.siddhi.query.api.execution.query.OnDemandQuery)8 Event (io.siddhi.core.event.Event)6 MetaStreamEvent (io.siddhi.core.event.stream.MetaStreamEvent)6 StreamEvent (io.siddhi.core.event.stream.StreamEvent)6 ComplexEventChunk (io.siddhi.core.event.ComplexEventChunk)4 OnDemandQueryCreationException (io.siddhi.core.exception.OnDemandQueryCreationException)4 SiddhiAppRuntimeException (io.siddhi.core.exception.SiddhiAppRuntimeException)4 TimePeriod (io.siddhi.query.api.aggregation.TimePeriod)4 SiddhiAppContextException (io.siddhi.query.api.exception.SiddhiAppContextException)4 MetaStateEvent (io.siddhi.core.event.state.MetaStateEvent)2 StateEvent (io.siddhi.core.event.state.StateEvent)2 StateEventFactory (io.siddhi.core.event.state.StateEventFactory)2 DataPurgingException (io.siddhi.core.exception.DataPurgingException)2 SiddhiAppCreationException (io.siddhi.core.exception.SiddhiAppCreationException)2 DeleteOnDemandQueryRuntime (io.siddhi.core.query.DeleteOnDemandQueryRuntime)2 FindOnDemandQueryRuntime (io.siddhi.core.query.FindOnDemandQueryRuntime)2 InsertOnDemandQueryRuntime (io.siddhi.core.query.InsertOnDemandQueryRuntime)2 SelectOnDemandQueryRuntime (io.siddhi.core.query.SelectOnDemandQueryRuntime)2 UpdateOnDemandQueryRuntime (io.siddhi.core.query.UpdateOnDemandQueryRuntime)2