Search in sources :

Example 1 with DataPurgingException

use of io.siddhi.core.exception.DataPurgingException in project siddhi by wso2.

the class IncrementalDataPurger method run.

@Override
public void run() {
    boolean isNeededToExecutePurgeTask = false;
    Map<String, Boolean> purgingCheckState;
    boolean isSafeToRunPurgingTask = false;
    long currentTime = System.currentTimeMillis();
    long purgeTime;
    Object[] purgeTimeArray = new Object[1];
    int i = 1;
    if (purgingHalted) {
        LOG.error(errorMessage);
        return;
    }
    for (TimePeriod.Duration duration : activeIncrementalDurations) {
        if (!retentionPeriods.get(duration).equals(RETAIN_ALL)) {
            eventChunk.clear();
            purgeTime = currentTime - retentionPeriods.get(duration);
            purgeTimeArray[0] = purgeTime;
            if (retentionPeriods.size() > i) {
                purgingCheckState = isSafeToPurgeTheDuration(purgeTime, aggregationTables.get(activeIncrementalDurations.get(i)), aggregationTables.get(duration), duration, timeZone);
                if (purgingCheckState.get(IS_DATA_AVAILABLE_TO_PURGE)) {
                    isNeededToExecutePurgeTask = true;
                    if (purgingCheckState.get(IS_PARENT_TABLE_HAS_AGGREGATED_DATA)) {
                        isSafeToRunPurgingTask = true;
                    } else {
                        isSafeToRunPurgingTask = false;
                        purgingHalted = true;
                    }
                } else {
                    isNeededToExecutePurgeTask = false;
                }
            }
            if (isNeededToExecutePurgeTask) {
                if (isSafeToRunPurgingTask) {
                    StateEvent secEvent = createStreamEvent(purgeTimeArray, currentTime);
                    eventChunk.add(secEvent);
                    Table table = aggregationTables.get(duration);
                    try {
                        if (LOG.isDebugEnabled()) {
                            LOG.debug("Purging data of table: " + table.getTableDefinition().getId() + " with a" + " retention of timestamp : " + purgeTime);
                        }
                        table.deleteEvents(eventChunk, compiledConditionsHolder.get(duration), 1);
                    } catch (RuntimeException e) {
                        LOG.error("Exception occurred while deleting events from " + table.getTableDefinition().getId() + " table", e);
                        throw new DataPurgingException("Exception occurred while deleting events from " + table.getTableDefinition().getId() + " table", e);
                    }
                } else {
                    errorMessage = "Purging task halted!!!. Data purging for table: " + aggregationTables.get(duration).getTableDefinition().getId() + " with a retention" + " of timestamp : " + purgeTime + " didn't executed since parent " + aggregationTables.get(activeIncrementalDurations.get(i)).getTableDefinition().getId() + " table does not contain values of above period. This has to be investigate since" + " this may lead to an aggregation data mismatch";
                    LOG.info(errorMessage);
                    return;
                }
            }
        }
        i++;
    }
}
Also used : Table(io.siddhi.core.table.Table) TimePeriod(io.siddhi.query.api.aggregation.TimePeriod) MetaStateEvent(io.siddhi.core.event.state.MetaStateEvent) StateEvent(io.siddhi.core.event.state.StateEvent) DataPurgingException(io.siddhi.core.exception.DataPurgingException)

Aggregations

MetaStateEvent (io.siddhi.core.event.state.MetaStateEvent)1 StateEvent (io.siddhi.core.event.state.StateEvent)1 DataPurgingException (io.siddhi.core.exception.DataPurgingException)1 Table (io.siddhi.core.table.Table)1 TimePeriod (io.siddhi.query.api.aggregation.TimePeriod)1